Exemplo n.º 1
0
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': local_dir, 'dest': s3_prefix, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': 'us-west-2'}
     self.parsed_responses = [
         {"CommonPrefixes": [], "Contents": [
             {"Key": "text1.txt", "Size": 100,
              "LastModified": "2014-01-09T20:45:49.000Z"}]},
         {"CommonPrefixes": [], "Contents": []}]
     cmd_arc = CommandArchitecture(self.session, 'sync', params)
     cmd_arc.create_instructions()
     cmd_arc.set_clients()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 2
0
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'rb']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler'],
                        'rb': ['s3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])
Exemplo n.º 3
0
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': local_dir, 'dest': s3_prefix, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': 'us-west-2'}
     self.parsed_responses = [
         {"CommonPrefixes": [], "Contents": [
             {"Key": "text1.txt", "Size": 100,
              "LastModified": "2014-01-09T20:45:49.000Z"}]},
         {"CommonPrefixes": [], "Contents": []}]
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'sync', params, config)
     cmd_arc.create_instructions()
     cmd_arc.set_clients()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 4
0
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': True,
         'dryrun': True,
         'quiet': False,
         'src': local_dir,
         'dest': s3_prefix,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True
     }
     cmd_arc = CommandArchitecture(self.session, 'sync', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 5
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None, 'metadata': None}
     self.http_response.status_code = 400
     self.parsed_responses = [{'Error': {
                               'Code': 'BucketNotExists',
                               'Message': 'Bucket does not exist'}}]
     cmd_arc = CommandArchitecture(
         self.session, 'cp', params, RuntimeConfig().build_config())
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s An error" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())
Exemplo n.º 6
0
 def test_run_cp_put(self):
     # This ensures that the architecture sets up correctly for a ``cp`` put
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = self._get_file_path(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None,
         'metadata': None
     }
     config = RuntimeConfig().build_config()
     cmd_arc = CommandArchitecture(self.session, 'cp', params, config)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 7
0
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])
Exemplo n.º 8
0
 def test_run_cp_copy(self):
     # This ensures that the architecture sets up correctly for a ``cp`` copy
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': s3_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 's3s3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False
     }
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 9
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': False,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False
     }
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s Error: Bucket does not exist\n" %
         (rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())
Exemplo n.º 10
0
 def test_run_rb_nonzero_rc(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {
         'dir_op': True,
         'dryrun': False,
         'quiet': False,
         'src': s3_prefix,
         'dest': s3_prefix,
         'paths_type': 's3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False
     }
     self.http_response.status_code = 400
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     self.patch_make_request()
     rc = cmd_arc.run()
     output_str = "remove_bucket failed: %s" % s3_prefix
     self.assertIn(output_str, self.err_output.getvalue())
     self.assertEqual(rc, 1)
Exemplo n.º 11
0
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': s3_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 's3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None
     }
     self.parsed_responses = [{
         "ETag": "abcd",
         "ContentLength": 100,
         "LastModified": "2014-01-09T20:45:49.000Z"
     }]
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 12
0
 def test_run_mb(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True}
     cmd_arc = CommandArchitecture(self.session, 'mb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) make_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 13
0
 def test_run_mb(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True,
               'page_size': None, 'is_stream': False}
     cmd_arc = CommandArchitecture(self.session, 'mb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) make_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 14
0
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 15
0
 def test_run_rb_nonzero_rc(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': False, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None, 'follow_symlinks': True,
               'page_size': None, 'is_stream': False}
     self.http_response.status_code = 400
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     self.patch_make_request()
     rc = cmd_arc.run()
     output_str = "remove_bucket failed: %s" % s3_prefix
     self.assertIn(output_str, self.err_output.getvalue())
     self.assertEqual(rc, 1)
Exemplo n.º 16
0
 def test_run_cp_get(self):
     # This ensures that the architecture sets up correctly for a ``cp`` get
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': local_file, 'filters': filters,
               'paths_type': 's3local', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 17
0
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s Error: Bucket does not exist\n" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.output.getvalue())
Exemplo n.º 18
0
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None,
               'follow_symlinks': True, 'page_size': None,
               'is_stream': False, 'source_region': None}
     self.parsed_responses = [{"ETag": "abcd", "ContentLength": 100,
                               "LastModified": "2014-01-09T20:45:49.000Z"}]
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': False,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True,
         'page_size': None,
         'is_stream': False,
         'source_region': None,
         'metadata': None
     }
     self.http_response.status_code = 400
     self.parsed_responses = [{
         'Error': {
             'Code': 'BucketNotExists',
             'Message': 'Bucket does not exist'
         }
     }]
     cmd_arc = CommandArchitecture(self.session, 'cp', params,
                                   RuntimeConfig().build_config())
     cmd_arc.set_clients()
     cmd_arc.create_instructions()
     self.patch_make_request()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = ("upload failed: %s to %s An error" %
                   (rel_local_file, s3_file))
     self.assertIn(output_str, self.err_output.getvalue())