Esempio n. 1
0
 def setUp(self):
     super(S3HandlerTestURLEncodeDeletes, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)
Esempio n. 2
0
 def setUp(self):
     self.local_file = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep + 'text1.txt')
     self.local_dir = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep)
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.files = make_loc_files()
Esempio n. 3
0
 def setUp(self):
     super(S3HandlerTestDeleteList, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session)
     self.loc_files = make_loc_files()
Esempio n. 4
0
 def setUp(self):
     self.environ = {}
     self.environ_patch = patch('os.environ', self.environ)
     self.environ_patch.start()
     self.session = FakeSession()
     self.mock = MagicMock()
     self.mock.get_config = MagicMock(return_value={'region': None})
     self.loc_files = make_loc_files()
     self.bucket = make_s3_files(self.session)
Esempio n. 5
0
 def setUp(self):
     super(S3HandlerTestMvLocalS3, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
Esempio n. 6
0
 def setUp(self):
     super(S3HandlerExceptionSingleTaskTest, self).setUp()
     self.session = FakeSession(True, True)
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
Esempio n. 7
0
 def setUp(self):
     super(S3HandlerExceptionMultiTaskTest, self).setUp()
     self.session = FakeSession(True, True)
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'quiet': True}
     self.s3_handler_multi = S3Handler(self.session, params,
                                       multi_threshold=10, chunksize=2)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
Esempio n. 8
0
 def setUp(self):
     super(S3HandlerTestMvS3Local, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session)
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
     directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
     filename1 = directory1 + "text1.txt"
     directory2 = directory1 + 'another_directory' + os.sep
     filename2 = directory2 + "text2.txt"
     self.loc_files = [filename1, filename2]
Esempio n. 9
0
 def setUp(self):
     super(S3HandlerTestUpload, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(
         self.session, params=params,
         runtime_config=runtime_config(
             multipart_threshold=10, multipart_chunksize=2))
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
Esempio n. 10
0
 def setUp(self):
     super(S3HandlerTestDeleteList, self).setUp()
     self.session = FakeSession()
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session)
     self.loc_files = make_loc_files()
Esempio n. 11
0
 def setUp(self):
     super(S3HandlerTestBucket, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.params = {'region': 'us-east-1'}
     self.bucket = None
Esempio n. 12
0
 def setUp(self):
     self.local_file = os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep + 'text1.txt'
     self.local_dir = os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep
     self.session = FakeSession()
     self.files = make_loc_files()
Esempio n. 13
0
 def setUp(self):
     self.local_file = os.path.abspath(".") + os.sep + "some_directory" + os.sep + "text1.txt"
     self.local_dir = os.path.abspath(".") + os.sep + "some_directory" + os.sep
     self.session = FakeSession()
     self.service = self.session.get_service("s3")
     self.endpoint = self.service.get_endpoint("us-east-1")
     self.files = make_loc_files()
Esempio n. 14
0
class S3HandlerTestURLEncodeDeletes(S3HandlerBaseTest):
    def setUp(self):
        super(S3HandlerTestURLEncodeDeletes, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)

    def tearDown(self):
        super(S3HandlerTestURLEncodeDeletes, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_s3_delete_url_encode(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        key = self.bucket + '/a+b/foo'
        tasks = [FileInfo(
            src=key, src_type='s3', dest_type='local',
            operation_name='delete', size=0,
            service=self.service, endpoint=self.endpoint)]
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
Esempio n. 15
0
 def setUp(self):
     self.session = FakeSession()
     self.bucket = make_s3_files(self.session)
     self.file1 = self.bucket + "/" + "text1.txt"
     self.file2 = self.bucket + "/" + "another_directory/text2.txt"
     self.service = self.session.get_service("s3")
     self.endpoint = self.service.get_endpoint("us-east-1")
 def setUp(self):
     self.session = FakeSession()
     self.bucket = make_s3_files(self.session)
     self.file1 = self.bucket + '/' + 'text1.txt'
     self.file2 = self.bucket + '/' + 'another_directory/text2.txt'
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
Esempio n. 17
0
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(
            self.session, params,
            runtime_config=runtime_config(multipart_threshold=10,
                                          multipart_chunksize=2))
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(
            self.fail_session, params,
            runtime_config=runtime_config(
                multipart_threshold=10,
                multipart_chunksize=2))
Esempio n. 18
0
class S3HandlerTestURLEncodeDeletes(S3HandlerBaseTest):
    def setUp(self):
        super(S3HandlerTestURLEncodeDeletes, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)

    def tearDown(self):
        super(S3HandlerTestURLEncodeDeletes, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_s3_delete_url_encode(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        key = self.bucket + '/a+b/foo'
        tasks = [
            FileInfo(src=key,
                     src_type='s3',
                     dest_type='local',
                     operation_name='delete',
                     size=0,
                     service=self.service,
                     endpoint=self.endpoint)
        ]
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
Esempio n. 19
0
 def setUp(self):
     super(S3HandlerTestURLEncodeDeletes, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session, key1='a+b/foo', key2=None)
Esempio n. 20
0
 def setUp(self):
     super(CommandArchitectureTest, self).setUp()
     self.session = FakeSession()
     self.bucket = make_s3_files(self.session)
     self.loc_files = make_loc_files()
     self.output = StringIO()
     self.saved_stdout = sys.stdout
     sys.stdout = self.output
Esempio n. 21
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File does not exist." %
                          filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Esempio n. 22
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(
            warning_message.message,
            ("warning: Skipping file %s. File does not exist." % filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Esempio n. 23
0
 def setUp(self):
     self.environ = {}
     self.environ_patch = patch('os.environ', self.environ)
     self.environ_patch.start()
     self.session = FakeSession()
     self.mock = MagicMock()
     self.mock.get_config = MagicMock(return_value={'region': None})
     self.loc_files = make_loc_files()
     self.bucket = make_s3_files(self.session)
 def setUp(self):
     self.local_file = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep + 'text1.txt')
     self.local_dir = six.text_type(os.path.abspath('.') + os.sep + 'some_directory' \
         + os.sep)
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.files = make_loc_files()
Esempio n. 25
0
 def setUp(self):
     super(S3HandlerTestDeleteList, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1'}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = make_s3_files(self.session)
     self.loc_files = make_loc_files()
Esempio n. 26
0
 def test_endpoint(self):
     session = FakeSession()
     endpoint = get_endpoint(session.service,
                             region='us-west-1',
                             endpoint_url='URL',
                             verify=True)
     self.assertEqual(endpoint.region_name, 'us-west-1')
     self.assertEqual(endpoint.endpoint_url, 'URL')
     self.assertTrue(endpoint.verify)
Esempio n. 27
0
class S3HandlerTestMvS3Local(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a download then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3Local, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

    def tearDown(self):
        super(S3HandlerTestMvS3Local, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move(self):
        # Create file info objects to perform move.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='move',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')
        # Ensure the objects are no longer in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
Esempio n. 28
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_warning(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertTrue(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Esempio n. 29
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_bad_symlink(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Esempio n. 30
0
 def setUp(self):
     super(S3HandlerTestMvLocalS3, self).setUp()
     self.session = FakeSession()
     params = {'region': 'us-east-1', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [
         self.bucket + '/text1.txt',
         self.bucket + '/another_directory/text2.txt'
     ]
Esempio n. 31
0
class S3HandlerTestMvLocalS3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a upload then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvLocalS3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private'], 'quiet': True}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerTestMvLocalS3, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move_unicode(self):
        self.bucket2 = make_s3_files(self.session, key1=u'\u2713')
        tasks = [FileInfo(
            src=self.bucket2 + '/' + u'\u2713',
            src_type='s3',
            dest=self.bucket + '/' + u'\u2713',
            dest_type='s3', operation_name='move',
            size=0,
            service=self.service,
            endpoint=self.endpoint,
        )]
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)

    def test_move(self):
        # Create file info objects to perform move.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i], src_type='local',
                dest=self.s3_files[i], dest_type='s3',
                operation_name='move', size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
        # Confirm local files do not exist.
        for filename in files:
            self.assertFalse(os.path.exists(filename))
Esempio n. 32
0
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, params,
                                          multi_threshold=10, chunksize=2)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(self.fail_session, params,
                                                 multi_threshold=10,
                                                 chunksize=2)
Esempio n. 33
0
 def setUp(self):
     super(S3HandlerTestUpload, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     params = {'region': 'us-east-1', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(self.session, multi_threshold=10,
                                       chunksize=2,
                                       params=params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [self.bucket + '/text1.txt',
                      self.bucket + '/another_directory/text2.txt']
Esempio n. 34
0
class S3HandlerTestMvS3Local(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a download then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3Local, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

    def tearDown(self):
        super(S3HandlerTestMvS3Local, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_move(self):
        # Create file info objects to perform move.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='move',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')
        # Ensure the objects are no longer in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
Esempio n. 35
0
class S3HandlerTestMvS3S3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a copy then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3S3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.bucket2 = create_bucket(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.s3_files2 = [
            self.bucket2 + '/text1.txt',
            self.bucket2 + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerTestMvS3S3, self).tearDown()
        s3_cleanup(self.bucket, self.session)
        s3_cleanup(self.bucket2, self.session)

    def test_move(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 0)
        # Create file info objects to perform move.
        tasks = []
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.s3_files2[i],
                         dest_type='s3',
                         operation_name='move',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were moved.  The origial bucket had three
        # objects. Only two were moved.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
Esempio n. 36
0
 def setUp(self):
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.files = FileCreator()
     # List of local filenames.
     self.filenames = []
     self.root = self.files.rootdir
     self.bucket = 'bucket/'
     filename_1 = self.files.create_file('foo.txt',
                                         contents='foo.txt contents')
     self.filenames.append(filename_1)
     nested_dir = os.path.join(self.root, 'realfiles')
     os.mkdir(nested_dir)
     filename_2 = self.files.create_file(os.path.join(nested_dir,
                                                      'bar.txt'),
                                         contents='bar.txt contents')
     self.filenames.append(filename_2)
     # Names of symlinks.
     self.symlinks = []
     # Names of files if symlinks are followed.
     self.symlink_files = []
     # Create symlink to file foo.txt.
     symlink_1 = os.path.join(self.root, 'symlink_1')
     os.symlink(filename_1, symlink_1)
     self.symlinks.append(symlink_1)
     self.symlink_files.append(symlink_1)
     # Create a symlink to a file that does not exist.
     symlink_2 = os.path.join(self.root, 'symlink_2')
     os.symlink('non-existent-file', symlink_2)
     self.symlinks.append(symlink_2)
     # Create a symlink to directory realfiles
     symlink_3 = os.path.join(self.root, 'symlink_3')
     os.symlink(nested_dir, symlink_3)
     self.symlinks.append(symlink_3)
     self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))
Esempio n. 37
0
 def setUp(self):
     super(S3HandlerTestUpload, self).setUp()
     self.session = FakeSession()
     params = {'region': 'us-east-1', 'acl': ['private']}
     self.s3_handler = S3Handler(self.session, params)
     self.s3_handler_multi = S3Handler(self.session,
                                       multi_threshold=10,
                                       chunksize=2,
                                       params=params)
     self.bucket = create_bucket(self.session)
     self.loc_files = make_loc_files()
     self.s3_files = [
         self.bucket + '/text1.txt',
         self.bucket + '/another_directory/text2.txt'
     ]
Esempio n. 38
0
class S3HandlerExceptionMultiTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle multipart upload exceptions.
    This includes a standard error stemming from an operation on
    a nonexisting bucket, connection error, and md5 error.
    """
    def setUp(self):
        super(S3HandlerExceptionMultiTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler_multi = S3Handler(self.session,
                                          params,
                                          multi_threshold=10,
                                          chunksize=2)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerExceptionMultiTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        fail_s3_files = [
            self.bucket + '/text1.txt',
            self.bucket[:-1] + '/another_directory/text2.txt'
        ]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=fail_s3_files[i],
                         size=15,
                         operation_name='upload',
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
Esempio n. 39
0
class S3HandlerTestMvS3S3(S3HandlerBaseTest):
    """
    This class tests the ability to move s3 objects.  The move
    operation uses a copy then delete.
    """
    def setUp(self):
        super(S3HandlerTestMvS3S3, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.bucket2 = create_bucket(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        self.s3_files2 = [self.bucket2 + '/text1.txt',
                          self.bucket2 + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerTestMvS3S3, self).tearDown()
        s3_cleanup(self.bucket, self.session)
        s3_cleanup(self.bucket2, self.session)

    def test_move(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 0)
        # Create file info objects to perform move.
        tasks = []
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.s3_files2[i], dest_type='s3',
                operation_name='move', size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the move.
        self.s3_handler.call(tasks)
        # Confirm the files were moved.  The origial bucket had three
        # objects. Only two were moved.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 1)
        self.assertEqual(len(list_contents(self.bucket2, self.session)), 2)
Esempio n. 40
0
class S3HandlerTestBucket(S3HandlerBaseTest):
    """
    Test the ability to make a bucket then remove it.
    """
    def setUp(self):
        super(S3HandlerTestBucket, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = None

    def tearDown(self):
        super(S3HandlerTestBucket, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(
            src=self.bucket,
            operation_name='remove_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        self.s3_handler.call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
Esempio n. 41
0
class S3HandlerExceptionSingleTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle connection and md5 exceptions.
    The command used in this general test is a put command.
    """
    def setUp(self):
        super(S3HandlerExceptionSingleTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]

    def tearDown(self):
        super(S3HandlerExceptionSingleTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(src=self.loc_files[i],
                         dest=self.s3_files[i],
                         operation_name='upload',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm despite the exceptions, the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
Esempio n. 42
0
class S3HandlerTestBucket(S3HandlerBaseTest):
    """
    Test the ability to make a bucket then remove it.
    """
    def setUp(self):
        super(S3HandlerTestBucket, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.params = {'region': 'us-east-1'}
        self.bucket = None

    def tearDown(self):
        super(S3HandlerTestBucket, self).tearDown()
        s3_cleanup(self.bucket, self.session)

    def test_bucket(self):
        rand1 = random.randrange(5000)
        rand2 = random.randrange(5000)
        self.bucket = str(rand1) + 'mybucket' + str(rand2) + '/'
        orig_number_buckets = len(list_buckets(self.session))

        file_info = FileInfo(
            src=self.bucket,
            operation_name='make_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets + 1, number_buckets)

        file_info = FileInfo(
            src=self.bucket,
            operation_name='remove_bucket',
            size=0,
            service=self.service,
            endpoint=self.endpoint)
        S3Handler(self.session, self.params).call([file_info])
        number_buckets = len(list_buckets(self.session))
        self.assertEqual(orig_number_buckets, number_buckets)
Esempio n. 43
0
class S3HandlerExceptionMultiTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle multipart upload exceptions.
    This includes a standard error stemming from an operation on
    a nonexisting bucket, connection error, and md5 error.
    """
    def setUp(self):
        super(S3HandlerExceptionMultiTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'quiet': True}
        self.s3_handler_multi = S3Handler(
            self.session, params,
            runtime_config=runtime_config(
                multipart_threshold=10, multipart_chunksize=2))
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerExceptionMultiTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        fail_s3_files = [self.bucket + '/text1.txt',
                         self.bucket[:-1] + '/another_directory/text2.txt']
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=fail_s3_files[i], size=15,
                operation_name='upload',
                service=self.service,
                endpoint=self.endpoint))
        self.s3_handler_multi.call(tasks)
Esempio n. 44
0
class S3HandlerExceptionSingleTaskTest(S3HandlerBaseTest):
    """
    This tests the ability to handle connection and md5 exceptions.
    The command used in this general test is a put command.
    """
    def setUp(self):
        super(S3HandlerExceptionSingleTaskTest, self).setUp()
        self.session = FakeSession(True, True)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']

    def tearDown(self):
        super(S3HandlerExceptionSingleTaskTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(src=self.loc_files[i],
                                  dest=self.s3_files[i],
                                  operation_name='upload', size=0,
                                  service=self.service,
                                  endpoint=self.endpoint))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm despite the exceptions, the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)
Esempio n. 45
0
 def setUp(self):
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.files = FileCreator()
     # List of local filenames.
     self.filenames = []
     self.root = self.files.rootdir
     self.bucket = 'bucket/'
     filename_1 = self.files.create_file('foo.txt',
                                         contents='foo.txt contents')
     self.filenames.append(filename_1)
     nested_dir = os.path.join(self.root, 'realfiles')
     os.mkdir(nested_dir)
     filename_2 = self.files.create_file(os.path.join(nested_dir,
                                                      'bar.txt'),
                                         contents='bar.txt contents')
     self.filenames.append(filename_2)
     # Names of symlinks.
     self.symlinks = []
     # Names of files if symlinks are followed.
     self.symlink_files = []
     # Create symlink to file foo.txt.
     symlink_1 = os.path.join(self.root, 'symlink_1')
     os.symlink(filename_1, symlink_1)
     self.symlinks.append(symlink_1)
     self.symlink_files.append(symlink_1)
     # Create a symlink to a file that does not exist.
     symlink_2 = os.path.join(self.root, 'symlink_2')
     os.symlink('non-existent-file', symlink_2)
     self.symlinks.append(symlink_2)
     # Create a symlink to directory realfiles
     symlink_3 = os.path.join(self.root, 'symlink_3')
     os.symlink(nested_dir, symlink_3)
     self.symlinks.append(symlink_3)
     self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))
Esempio n. 46
0
class CommandParametersTest(unittest.TestCase):
    def setUp(self):
        self.environ = {}
        self.environ_patch = patch('os.environ', self.environ)
        self.environ_patch.start()
        self.session = FakeSession()
        self.mock = MagicMock()
        self.mock.get_config = MagicMock(return_value={'region': None})
        self.loc_files = make_loc_files()
        self.bucket = make_s3_files(self.session)

    def tearDown(self):
        self.environ_patch.stop()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_add_paths(self):
        """
        This ensures that the paths are assigned properly in the
        class's parameters dictionary.
        """
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        path1 = [s3_file, 's3://some_file']
        path2 = [s3_file]
        cmd_params = CommandParameters(self.session, 'cp', {})
        cmd_params2 = CommandParameters(self.session, 'rm', {})
        cmd_params.check_region([])
        cmd_params2.check_region([])
        cmd_params.add_paths(path1)
        cmd_params2.add_paths(path2)
        ref_params = {
            'dir_op': False,
            'src': s3_file,
            'dest': 's3://some_file',
            'region': self.session.get_config()['region'],
            'paths_type': 's3s3'
        }
        self.assertEqual(cmd_params.parameters, ref_params)
        ref_params2 = {
            'dir_op': False,
            'src': s3_file,
            'dest': s3_file,
            'region': self.session.get_config()['region'],
            'paths_type': 's3'
        }
        self.assertEqual(cmd_params2.parameters, ref_params2)

    def test_check_path_type_pass(self):
        """
        This tests the class's ability to determine whether the correct
        path types have been passed for a particular command.  It test every
        possible combination that is correct for every command.
        """
        cmds = {
            'cp': ['locals3', 's3s3', 's3local'],
            'mv': ['locals3', 's3s3', 's3local'],
            'rm': ['s3'],
            'ls': ['s3'],
            'mb': ['s3'],
            'rb': ['s3'],
            'sync': ['locals3', 's3s3', 's3local']
        }
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]

        combos = {
            's3s3': [s3_file, s3_file],
            's3local': [s3_file, local_file],
            'locals3': [local_file, s3_file],
            's3': [s3_file],
            'local': [local_file],
            'locallocal': [local_file, local_file]
        }

        for cmd in cmds.keys():
            cmd_param = CommandParameters(self.session, cmd, {})
            cmd_param.check_region([])
            correct_paths = cmds[cmd]
            for path_args in correct_paths:
                cmd_param.check_path_type(combos[path_args])

    def test_check_path_type_fail(self):
        """
        This tests the class's ability to determine whether the correct
        path types have been passed for a particular command. It test every
        possible combination that is incorrect for every command.
        """

        cmds = {
            'cp': ['local', 'locallocal', 's3'],
            'mv': ['local', 'locallocal', 's3'],
            'rm': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
            'ls': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
            'sync': ['local', 'locallocal', 's3'],
            'mb': ['local', 'locallocal', 's3s3', 'locals3', 's3local'],
            'rb': ['local', 'locallocal', 's3s3', 'locals3', 's3local']
        }
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]

        combos = {
            's3s3': [s3_file, s3_file],
            's3local': [s3_file, local_file],
            'locals3': [local_file, s3_file],
            's3': [s3_file],
            'local': [local_file],
            'locallocal': [local_file, local_file]
        }

        for cmd in cmds.keys():
            cmd_param = CommandParameters(self.session, cmd, {})
            cmd_param.check_region([])
            wrong_paths = cmds[cmd]
            for path_args in wrong_paths:
                with self.assertRaises(TypeError):
                    cmd_param.check_path_type(combos[path_args])

    def test_check_src_path_pass(self):
        """
        This tests to see if all of the checks on the source path works.  It
        does so by testing if s3 objects and and prefixes exist as well as
        local files and directories.  All of these should not throw an
        exception.
        """
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket
        local_dir = self.loc_files[3]

        # :var files: a list of tuples where the first element is a single
        #     element list of file paths. The second element is a boolean
        #     representing if the operation is a directory operation.
        files = [([s3_file], False), ([local_file], False),
                 ([s3_prefix], True), ([local_dir], True)]

        parameters = {}
        for filename in files:
            parameters['dir_op'] = filename[1]
            cmd_parameter = CommandParameters(self.session, 'put', parameters)
            cmd_parameter.check_region([])
            cmd_parameter.check_src_path(filename[0])

    def test_check_src_path_fail(self):
        """
        This tests to see if all of the checks on the source path works.  It
        does so by testing if s3 objects and and prefixes do not exist as well
        as local files and directories.  All of these should throw an
        exception.
        """
        local_file = self.loc_files[0]
        local_dir = self.loc_files[3]
        fake_s3_file = 's3://' + self.bucket + '/' + 'text1.tx'
        fake_local_file = local_file[:-1]
        fake_s3_prefix = 's3://' + self.bucket + '/' + 'fake/'
        fake_local_dir = local_dir + os.sep + 'fake' + os.sep

        # :var files: a list of tuples where the first element is a single
        #     element list of file paths. The second element is a boolean
        #     representing if the operation is a directory operation.
        files = [([fake_s3_file], False), ([fake_local_file], False),
                 ([fake_s3_prefix], True), ([local_file], True),
                 ([local_dir], False), ([fake_s3_file + 'dag'], False)]

        parameters = {}
        for filename in files:
            parameters['dir_op'] = filename[1]
            cmd_parameter = CommandParameters(self.session, 'put', parameters)
            cmd_parameter.check_region([])
            with self.assertRaises(Exception):
                cmd_parameter.check_src_path(filename[0])

    def test_check_force(self):
        """
        This checks to make sure that the force parameter is run. If
        successful. The delete command will fail as the bucket is empty
        and be caught by the exception.
        """
        cmd_params = CommandParameters(self.session, 'rb', {'force': True})
        cmd_params.parameters['src'] = 's3://mybucket'
        cmd_params.check_force(None, None)

    def test_region(self):
        """
        This tests the ability to specify the region and throw an error
        if a region is never specified whether if it is an environment
        variable, config file, or parsed global.
        """
        cmd_params = CommandParameters(self.session, 'mb', {})
        parser = argparse.ArgumentParser()
        parser.add_argument('--region', nargs=1)
        parser.add_argument('--test', action='store_true')
        parsed_args = parser.parse_args(['--region', 'eu-west-1'])
        cmd_params.check_region(parsed_args)
        self.assertEqual(cmd_params.parameters['region'][0], 'eu-west-1')

        cmd_params2 = CommandParameters(self.mock, 'mb', {})
        parsed_args2 = parser.parse_args(['--test'])
        with self.assertRaises(Exception):
            cmd_params2.check_region(parsed_args2)
Esempio n. 47
0
class TestStreams(S3HandlerBaseTest):
    def setUp(self):
        super(TestStreams, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.params = {'is_stream': True, 'region': 'us-east-1'}

    def test_pull_from_stream(self):
        s3handler = S3StreamHandler(self.session, self.params, chunksize=2)
        input_to_stdin = b'This is a test'
        size = len(input_to_stdin)
        # Retrieve the entire string.
        with MockStdIn(input_to_stdin):
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertTrue(is_amount_requested)
            self.assertEqual(data, input_to_stdin)
        # Ensure the function exits when there is nothing to read.
        with MockStdIn():
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertFalse(is_amount_requested)
            self.assertEqual(data, b'')
        # Ensure the function does not grab too much out of stdin.
        with MockStdIn(input_to_stdin):
            payload, is_amount_requested = s3handler._pull_from_stream(size-2)
            data = payload.read()
            self.assertTrue(is_amount_requested)
            self.assertEqual(data, input_to_stdin[:-2])
            # Retrieve the rest of standard in.
            payload, is_amount_requested = s3handler._pull_from_stream(size)
            data = payload.read()
            self.assertFalse(is_amount_requested)
            self.assertEqual(data, input_to_stdin[-2:])

    def test_upload_stream_not_multipart_task(self):
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        fileinfos = [FileInfo('filename', operation_name='upload',
                              is_stream=True, size=0)]
        with MockStdIn(b'bar'):
            s3handler._enqueue_tasks(fileinfos)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # No multipart upload should have been submitted.
        self.assertEqual(len(submitted_tasks), 1)
        self.assertEqual(submitted_tasks[0][0][0].payload.read(),
                         b'bar')

    def test_upload_stream_is_multipart_task(self):
        s3handler = S3StreamHandler(self.session, self.params,
                                    multi_threshold=1)
        s3handler.executor = mock.Mock()
        fileinfos = [FileInfo('filename', operation_name='upload',
                              is_stream=True, size=0)]
        with MockStdIn(b'bar'):
            s3handler._enqueue_tasks(fileinfos)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # This should be a multipart upload so multiple tasks
        # should have been submitted.
        self.assertEqual(len(submitted_tasks), 4)
        self.assertEqual(submitted_tasks[1][0][0]._payload.read(),
                         b'b')
        self.assertEqual(submitted_tasks[2][0][0]._payload.read(),
                         b'ar')

    def test_upload_stream_with_expected_size(self):
        self.params['expected_size'] = 100000
        # With this large of expected size, the chunksize of 2 will have
        # to change.
        s3handler = S3StreamHandler(self.session, self.params, chunksize=2)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='upload',
                            is_stream=True)
        with MockStdIn(b'bar'):
            s3handler._enqueue_multipart_upload_tasks(fileinfo, b'')
        submitted_tasks = s3handler.executor.submit.call_args_list
        # Determine what the chunksize was changed to from one of the
        # UploadPartTasks.
        changed_chunk_size = submitted_tasks[1][0][0]._chunk_size
        # New chunksize should have a total parts under 1000.
        self.assertTrue(100000/changed_chunk_size < 1000)

    def test_upload_stream_enqueue_upload_task(self):
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='upload',
                            is_stream=True)
        stdin_input = b'This is a test'
        with MockStdIn(stdin_input):
            num_parts = s3handler._enqueue_upload_tasks(None, 2, mock.Mock(),
                                                        fileinfo,
                                                        UploadPartTask)
        submitted_tasks = s3handler.executor.submit.call_args_list
        # Ensure the returned number of parts is correct.
        self.assertEqual(num_parts, len(submitted_tasks) + 1)
        # Ensure the number of tasks uploaded are as expected
        self.assertEqual(len(submitted_tasks), 8)
        index = 0
        for i in range(len(submitted_tasks)-1):
            self.assertEqual(submitted_tasks[i][0][0]._payload.read(),
                             stdin_input[index:index+2])
            index += 2
        # Ensure that the last part is an empty string as expected.
        self.assertEqual(submitted_tasks[7][0][0]._payload.read(), b'')

    def test_enqueue_upload_single_part_task_stream(self):
        """
        This test ensures that a payload gets attached to a task when
        it is submitted to the executor.
        """
        s3handler = S3StreamHandler(self.session, self.params)
        s3handler.executor = mock.Mock()
        mock_task_class = mock.Mock()
        s3handler._enqueue_upload_single_part_task(
            part_number=1, chunk_size=2, upload_context=None,
            filename=None, task_class=mock_task_class,
            payload=b'This is a test'
        )
        args, kwargs = mock_task_class.call_args
        self.assertIn('payload', kwargs.keys())
        self.assertEqual(kwargs['payload'], b'This is a test')

    def test_enqueue_multipart_download_stream(self):
        """
        This test ensures the right calls are made in ``_enqueue_tasks()``
        if the file should be a multipart download.
        """
        s3handler = S3StreamHandler(self.session, self.params,
                                    multi_threshold=5)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='download',
                            is_stream=True)
        with mock.patch('awscli.customizations.s3.s3handler'
                        '.S3StreamHandler._enqueue_range_download_tasks') as \
                mock_enqueue_range_tasks:
            with mock.patch('awscli.customizations.s3.fileinfo.FileInfo'
                            '.set_size_from_s3') as mock_set_size_from_s3:
                # Set the file size to something larger than the multipart
                # threshold.
                fileinfo.size = 100
                # Run the main enqueue function.
                s3handler._enqueue_tasks([fileinfo])
                # Assert that the size of the ``FileInfo`` object was set
                # if we are downloading a stream.
                self.assertTrue(mock_set_size_from_s3.called)
                # Ensure that this download would have been a multipart
                # download.
                self.assertTrue(mock_enqueue_range_tasks.called)

    def test_enqueue_range_download_tasks_stream(self):
        s3handler = S3StreamHandler(self.session, self.params, chunksize=100)
        s3handler.executor = mock.Mock()
        fileinfo = FileInfo('filename', operation_name='download',
                            is_stream=True, size=100)
        s3handler._enqueue_range_download_tasks(fileinfo)
        # Ensure that no request was sent to make a file locally.
        submitted_tasks = s3handler.executor.submit.call_args_list
        self.assertNotEqual(type(submitted_tasks[0][0][0]),
                            CreateLocalFileTask)
Esempio n. 48
0
 def setUp(self):
     self.session = FakeSession()
     module = 'awscli.customizations.s3.s3.CommandArchitecture'
     self.cmd_arc_patch = patch(module)
     self.cmd_arc_mock = self.cmd_arc_patch.start()
     self.cmd_arc_mock.run.return_value = "Passed"
Esempio n. 49
0
class S3HandlerTestDownload(S3HandlerBaseTest):
    """
    This class tests the ability to download s3 objects locally as well
    as using multipart downloads
    """
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          params,
                                          multi_threshold=10,
                                          chunksize=2)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(self.fail_session,
                                                 params,
                                                 multi_threshold=10,
                                                 chunksize=2)

    def tearDown(self):
        super(S3HandlerTestDownload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_download(self):
        # Confirm that the files do not exist.
        for filename in self.loc_files:
            self.assertFalse(os.path.exists(filename))
        # Create file info objects to perform download.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=self.s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='download',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the download.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download(self):
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(
                    src=self.s3_files[i],
                    src_type='s3',
                    dest=self.loc_files[i],
                    dest_type='local',
                    last_update=time,
                    operation_name='download',
                    size=15,
                    service=self.service,
                    endpoint=self.endpoint,
                ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download_fail(self):
        """
        This test ensures that a multipart download can handle a
        standard error exception stemming from an operation
        being performed on a nonexistant bucket.  The existing file
        should be downloaded properly but the other will not.
        """
        tasks = []
        wrong_s3_files = [
            self.bucket + '/text1.txt',
            self.bucket[:-1] + '/another_directory/text2.txt'
        ]
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(
                FileInfo(src=wrong_s3_files[i],
                         src_type='s3',
                         dest=self.loc_files[i],
                         dest_type='local',
                         last_update=time,
                         operation_name='download',
                         size=15,
                         service=self.service,
                         endpoint=self.endpoint))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        self.assertTrue(os.path.exists(self.loc_files[0]))
        # The second file should not exist.
        self.assertFalse(os.path.exists(self.loc_files[1]))
        # Ensure that contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
Esempio n. 50
0
 def setUp(self):
     super(TestStreams, self).setUp()
     self.session = FakeSession()
     self.service = self.session.get_service('s3')
     self.endpoint = self.service.get_endpoint('us-east-1')
     self.params = {'is_stream': True, 'region': 'us-east-1'}
Esempio n. 51
0
class S3FileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.file1 = self.bucket + "/" + "text1.txt"
        self.file2 = self.bucket + "/" + "another_directory/text2.txt"
        self.service = self.session.get_service("s3")
        self.endpoint = self.service.get_endpoint("us-east-1")

    def tearDown(self):
        s3_cleanup(self.bucket, self.session)

    def test_nonexist_s3_file(self):
        """
        This tests to make sure that files are not misproperly yielded by
        ensuring the file prefix is the exact same as what was inputted.
        """
        input_s3_file = {
            "src": {"path": self.file1[:-1], "type": "s3"},
            "dest": {"path": "text1.txt", "type": "local"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        self.assertEqual(len(list(files)), 0)

    def test_s3_file(self):
        """
        Generate a single s3 file
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.file1, "type": "s3"},
            "dest": {"path": "text1.txt", "type": "local"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        zero size files are ignored.
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.bucket + "/", "type": "s3"},
            "dest": {"path": "", "type": "local"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        file_info = FileInfo(
            src=self.file2,
            dest="another_directory" + os.sep + "text2.txt",
            compare_key="another_directory/text2.txt",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )
        file_info2 = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[1].size,
            last_update=result_list[1].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info, file_info2]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_s3_delete_directory(self):
        """
        Generates s3 files under a common prefix. Also it ensures that
        the directory itself is included because it is a delete command
        Note: Size and last update are not tested because s3 generates them.
        """
        input_s3_file = {
            "src": {"path": self.bucket + "/", "type": "s3"},
            "dest": {"path": "", "type": "local"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "delete", params).call(input_s3_file)
        result_list = []
        for filename in files:
            result_list.append(filename)

        file_info1 = FileInfo(
            src=self.bucket + "/another_directory/",
            dest="another_directory" + os.sep,
            compare_key="another_directory/",
            size=result_list[0].size,
            last_update=result_list[0].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info2 = FileInfo(
            src=self.file2,
            dest="another_directory" + os.sep + "text2.txt",
            compare_key="another_directory/text2.txt",
            size=result_list[1].size,
            last_update=result_list[1].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )
        file_info3 = FileInfo(
            src=self.file1,
            dest="text1.txt",
            compare_key="text1.txt",
            size=result_list[2].size,
            last_update=result_list[2].last_update,
            src_type="s3",
            dest_type="local",
            operation_name="delete",
            service=None,
            endpoint=None,
        )

        ref_list = [file_info1, file_info2, file_info3]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Esempio n. 52
0
class LocalFileGeneratorTest(unittest.TestCase):
    def setUp(self):
        self.local_file = os.path.abspath(".") + os.sep + "some_directory" + os.sep + "text1.txt"
        self.local_dir = os.path.abspath(".") + os.sep + "some_directory" + os.sep
        self.session = FakeSession()
        self.service = self.session.get_service("s3")
        self.endpoint = self.service.get_endpoint("us-east-1")
        self.files = make_loc_files()

    def tearDown(self):
        clean_loc_files(self.files)

    def test_local_file(self):
        """
        Generate a single local file.
        """
        input_local_file = {
            "src": {"path": self.local_file, "type": "local"},
            "dest": {"path": "bucket/text1.txt", "type": "s3"},
            "dir_op": False,
            "use_src_name": False,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_file)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(
            src=self.local_file,
            dest="bucket/text1.txt",
            compare_key="text1.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        ref_list = [file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])

    def test_local_directory(self):
        """
        Generate an entire local directory.
        """
        input_local_dir = {
            "src": {"path": self.local_dir, "type": "local"},
            "dest": {"path": "bucket/", "type": "s3"},
            "dir_op": True,
            "use_src_name": True,
        }
        params = {"region": "us-east-1"}
        files = FileGenerator(self.service, self.endpoint, "", params).call(input_local_dir)
        result_list = []
        for filename in files:
            result_list.append(filename)
        size, last_update = get_file_stat(self.local_file)
        file_info = FileInfo(
            src=self.local_file,
            dest="bucket/text1.txt",
            compare_key="text1.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        path = self.local_dir + "another_directory" + os.sep + "text2.txt"
        size, last_update = get_file_stat(path)
        file_info2 = FileInfo(
            src=path,
            dest="bucket/another_directory/text2.txt",
            compare_key="another_directory/text2.txt",
            size=size,
            last_update=last_update,
            src_type="local",
            dest_type="s3",
            operation_name="",
            service=None,
            endpoint=None,
        )
        ref_list = [file_info2, file_info]
        self.assertEqual(len(result_list), len(ref_list))
        for i in range(len(result_list)):
            compare_files(self, result_list[i], ref_list[i])
Esempio n. 53
0
class S3HandlerTestDeleteList(S3HandlerBaseTest):
    """
    This tests the ability to delete both files locally and in s3.
    """
    def setUp(self):
        super(S3HandlerTestDeleteList, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()

    def tearDown(self):
        super(S3HandlerTestDeleteList, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_loc_delete(self):
        """
        Test delete local file tasks.  The local files are the same
        generated from filegenerator_test.py.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for filename in files:
            self.assertTrue(os.path.exists(filename))
            tasks.append(
                FileInfo(src=filename,
                         src_type='local',
                         dest_type='s3',
                         operation_name='delete',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        for filename in files:
            self.assertFalse(os.path.exists(filename))

    def test_s3_delete(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        keys = [
            self.bucket + '/another_directory/text2.txt',
            self.bucket + '/text1.txt', self.bucket + '/another_directory/'
        ]
        tasks = []
        for key in keys:
            tasks.append(
                FileInfo(src=key,
                         src_type='s3',
                         dest_type='local',
                         operation_name='delete',
                         size=0,
                         service=self.service,
                         endpoint=self.endpoint))
        self.assertEqual(len(list_contents(self.bucket, self.session)), 3)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)

    def test_list_objects(self):
        """
        Tests the ability to list objects, common prefixes, and buckets.
        If an error occurs the test fails as this is only a printing
        operation
        """
        prefix_name = self.bucket + '/'
        file_info = FileInfo(src=prefix_name,
                             operation_name='list_objects',
                             size=0,
                             service=self.service,
                             endpoint=self.endpoint)
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
        file_info = FileInfo(src='',
                             operation_name='list_objects',
                             size=0,
                             service=self.service,
                             endpoint=self.endpoint)
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
Esempio n. 54
0
class S3HandlerTestDownload(S3HandlerBaseTest):
    """
    This class tests the ability to download s3 objects locally as well
    as using multipart downloads
    """
    def setUp(self):
        super(S3HandlerTestDownload, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session, params,
                                          multi_threshold=10, chunksize=2)
        self.bucket = make_s3_files(self.session)
        self.s3_files = [self.bucket + '/text1.txt',
                         self.bucket + '/another_directory/text2.txt']
        directory1 = os.path.abspath('.') + os.sep + 'some_directory' + os.sep
        filename1 = directory1 + "text1.txt"
        directory2 = directory1 + 'another_directory' + os.sep
        filename2 = directory2 + "text2.txt"
        self.loc_files = [filename1, filename2]

        self.fail_session = FakeSession(connection_error=True)
        self.fail_session.s3 = self.session.s3
        self.s3_handler_multi_except = S3Handler(self.fail_session, params,
                                                 multi_threshold=10,
                                                 chunksize=2)

    def tearDown(self):
        super(S3HandlerTestDownload, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_download(self):
        # Confirm that the files do not exist.
        for filename in self.loc_files:
            self.assertFalse(os.path.exists(filename))
        # Create file info objects to perform download.
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        # Perform the download.
        self.s3_handler.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download(self):
        tasks = []
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=15,
                service=self.service,
                endpoint=self.endpoint,
            ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        for filename in self.loc_files:
            self.assertTrue(os.path.exists(filename))
        # Ensure the contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
        with open(self.loc_files[1], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is another test.')

    def test_multi_download_fail(self):
        """
        This test ensures that a multipart download can handle a
        standard error exception stemming from an operation
        being performed on a nonexistant bucket.  The existing file
        should be downloaded properly but the other will not.
        """
        tasks = []
        wrong_s3_files = [self.bucket + '/text1.txt',
                          self.bucket[:-1] + '/another_directory/text2.txt']
        time = datetime.datetime.now()
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=wrong_s3_files[i], src_type='s3',
                dest=self.loc_files[i], dest_type='local',
                last_update=time, operation_name='download',
                size=15,
                service=self.service,
                endpoint=self.endpoint
            ))
        # Perform the multipart  download.
        self.s3_handler_multi.call(tasks)
        # Confirm that the files now exist.
        self.assertTrue(os.path.exists(self.loc_files[0]))
        # The second file should not exist.
        self.assertFalse(os.path.exists(self.loc_files[1]))
        # Ensure that contents are as expected.
        with open(self.loc_files[0], 'rb') as filename:
            self.assertEqual(filename.read(), b'This is a test.')
Esempio n. 55
0
class S3HandlerTestDeleteList(S3HandlerBaseTest):
    """
    This tests the ability to delete both files locally and in s3.
    """
    def setUp(self):
        super(S3HandlerTestDeleteList, self).setUp()
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1'}
        self.s3_handler = S3Handler(self.session, params)
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()

    def tearDown(self):
        super(S3HandlerTestDeleteList, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_loc_delete(self):
        """
        Test delete local file tasks.  The local files are the same
        generated from filegenerator_test.py.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for filename in files:
            self.assertTrue(os.path.exists(filename))
            tasks.append(FileInfo(
                src=filename, src_type='local',
                dest_type='s3', operation_name='delete', size=0,
                service=self.service, endpoint=self.endpoint))
        self.s3_handler.call(tasks)
        for filename in files:
            self.assertFalse(os.path.exists(filename))

    def test_s3_delete(self):
        """
        Tests S3 deletes. The files used are the same generated from
        filegenerators_test.py.  This includes the create s3 file.
        """
        keys = [self.bucket + '/another_directory/text2.txt',
                self.bucket + '/text1.txt',
                self.bucket + '/another_directory/']
        tasks = []
        for key in keys:
            tasks.append(FileInfo(
                src=key, src_type='s3',
                dest_type='local', operation_name='delete',
                size=0,
                service=self.service,
                endpoint=self.endpoint))
        self.assertEqual(len(list_contents(self.bucket, self.session)), 3)
        self.s3_handler.call(tasks)
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)

    def test_list_objects(self):
        """
        Tests the ability to list objects, common prefixes, and buckets.
        If an error occurs the test fails as this is only a printing
        operation
        """
        prefix_name = self.bucket + '/'
        file_info = FileInfo(
            src=prefix_name, operation_name='list_objects', size=0,
            service=self.service, endpoint=self.endpoint)
        params = {'region': 'us-east-1'}
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])
        file_info = FileInfo(
            src='', operation_name='list_objects', size=0,
            service=self.service, endpoint=self.endpoint)
        s3_handler = S3Handler(self.session, params)
        s3_handler.call([file_info])