コード例 #1
0
ファイル: test_s3.py プロジェクト: nealma/aws-cli
 def test_run_sync(self):
     """
     This ensures that the architecture sets up correctly for a ``sync``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = "s3://" + self.bucket + "/" + "text1.txt"
     local_file = self.loc_files[0]
     s3_prefix = "s3://" + self.bucket + "/"
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [["--include", "*"]]
     params = {
         "dir_op": True,
         "dryrun": True,
         "quiet": False,
         "src": local_dir,
         "dest": s3_prefix,
         "filters": filters,
         "paths_type": "locals3",
         "region": "us-east-1",
     }
     cmd_arc = CommandArchitecture(self.session, "sync", params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #2
0
ファイル: test_s3.py プロジェクト: CitizenB/aws-cli
 def test_run_ls(self):
     # This ensures that the architecture sets up correctly for a ``ls``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1'}
     cmd_arc = CommandArchitecture(self.session, 'ls', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
コード例 #3
0
ファイル: test_s3.py プロジェクト: Gimpson/aws-cli
 def test_run_mb(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None}
     cmd_arc = CommandArchitecture(self.session, 'mb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) make_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
コード例 #4
0
ファイル: test_s3.py プロジェクト: danielgtaylor/aws-cli
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1'}
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
コード例 #5
0
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'ls', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 's3_handler'],
            'mv': ['file_generator', 's3_handler'],
            'rm': ['file_generator', 's3_handler'],
            'sync': ['file_generator', 'comparator', 's3_handler'],
            'ls': ['s3_handler'],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {'filters': True}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd, {})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions,
                         ['file_generator', 'filters', 's3_handler'])
コード例 #6
0
ファイル: test_s3.py プロジェクト: danielgtaylor/aws-cli
 def test_run_rb_nonzero_rc(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': False, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1'}
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     rc = cmd_arc.run()
     output_str = "remove_bucket failed: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
     self.assertEqual(rc, 1)
コード例 #7
0
ファイル: test_s3.py プロジェクト: HackedByChinese/aws-cli
 def test_run_cp_copy(self):
     # This ensures that the architecture sets up correctly for a ``cp`` copy
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3s3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #8
0
ファイル: test_s3.py プロジェクト: smikes/aws-cli
 def test_run_rb_nonzero_rc(self):
     # This ensures that the architecture sets up correctly for a ``rb``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_prefix = 's3://' + self.bucket + '/'
     params = {'dir_op': True, 'dryrun': False, 'quiet': False,
               'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
               'region': 'us-east-1', 'endpoint_url': None,
               'verify_ssl': None}
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     rc = cmd_arc.run()
     output_str = "remove_bucket failed: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
     self.assertEqual(rc, 1)
コード例 #9
0
ファイル: test_s3.py プロジェクト: danielgtaylor/aws-cli
 def test_run_cp_get(self):
     # This ensures that the architecture sets up correctly for a ``cp`` get
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': local_file, 'filters': filters,
               'paths_type': 's3local', 'region': 'us-east-1'}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #10
0
ファイル: test_s3.py プロジェクト: Gimpson/aws-cli
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': False, 'quiet': False,
               'src': local_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None}
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s Error: Bucket does not exist\n" % (
             rel_local_file, s3_file))
     self.assertIn(output_str, self.output.getvalue())
コード例 #11
0
ファイル: test_s3.py プロジェクト: HackedByChinese/aws-cli
 def test_run_sync(self):
     # This ensures that the architecture sets up correctly for a ``sync``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {'dir_op': True, 'dryrun': True, 'quiet': False,
               'src': local_dir, 'dest': s3_prefix, 'filters': filters,
               'paths_type': 'locals3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None}
     cmd_arc = CommandArchitecture(self.session, 'sync', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #12
0
ファイル: test_s3.py プロジェクト: nealma/aws-cli
 def test_run_ls(self):
     """
     This ensures that the architecture sets up correctly for a ``ls``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = "s3://" + self.bucket + "/" + "text1.txt"
     s3_prefix = "s3://" + self.bucket + "/"
     params = {
         "dir_op": True,
         "dryrun": True,
         "quiet": False,
         "src": s3_prefix,
         "dest": s3_prefix,
         "paths_type": "s3",
         "region": "us-east-1",
     }
     cmd_arc = CommandArchitecture(self.session, "ls", params)
     cmd_arc.create_instructions()
     cmd_arc.run()
コード例 #13
0
ファイル: test_s3.py プロジェクト: nealma/aws-cli
 def test_run_rb(self):
     """
     This ensures that the architecture sets up correctly for a ``mb``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_prefix = "s3://" + self.bucket + "/"
     params = {
         "dir_op": True,
         "dryrun": True,
         "quiet": False,
         "src": s3_prefix,
         "dest": s3_prefix,
         "paths_type": "s3",
         "region": "us-east-1",
     }
     cmd_arc = CommandArchitecture(self.session, "rb", params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) remove_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
コード例 #14
0
ファイル: test_s3.py プロジェクト: ifa6/aws-cli
 def test_run_cp_get(self):
     # This ensures that the architecture sets up correctly for a ``cp`` get
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': s3_file,
         'dest': local_file,
         'filters': filters,
         'paths_type': 's3local',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True
     }
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #15
0
ファイル: test_s3.py プロジェクト: nealma/aws-cli
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ["cp", "mv", "rm", "sync", "ls", "mb", "rb"]

        instructions = {
            "cp": ["file_generator", "s3_handler"],
            "mv": ["file_generator", "s3_handler"],
            "rm": ["file_generator", "s3_handler"],
            "sync": ["file_generator", "comparator", "s3_handler"],
            "ls": ["s3_handler"],
            "mb": ["s3_handler"],
            "rb": ["s3_handler"],
        }

        params = {"filters": True}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd, {})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, "cp", params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ["file_generator", "filters", "s3_handler"])
コード例 #16
0
ファイル: test_s3.py プロジェクト: ifa6/aws-cli
 def test_error_on_same_line_as_status(self):
     s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': False,
         'quiet': False,
         'src': local_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1',
         'endpoint_url': None,
         'verify_ssl': None,
         'follow_symlinks': True
     }
     cmd_arc = CommandArchitecture(self.session, 'cp', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     # Also, we need to verify that the error message is on the *same* line
     # as the upload failed line, to make it easier to track.
     output_str = (
         "upload failed: %s to %s Error: Bucket does not exist\n" %
         (rel_local_file, s3_file))
     self.assertIn(output_str, self.output.getvalue())
コード例 #17
0
ファイル: test_s3.py プロジェクト: Gimpson/aws-cli
    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {'cp': ['file_generator', 's3_handler'],
                        'mv': ['file_generator', 's3_handler'],
                        'rm': ['file_generator', 's3_handler'],
                        'sync': ['file_generator', 'comparator', 's3_handler'],
                        'mb': ['s3_handler'],
                        'rb': ['s3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd, {'region': 'us-east-1', 'endpoint_url': None})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                's3_handler'])
コード例 #18
0
 def test_run_sync(self):
     """
     This ensures that the architecture sets up correctly for a ``sync``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     local_file = self.loc_files[0]
     s3_prefix = 's3://' + self.bucket + '/'
     local_dir = self.loc_files[3]
     rel_local_file = os.path.relpath(local_file)
     filters = [['--include', '*']]
     params = {
         'dir_op': True,
         'dryrun': True,
         'quiet': False,
         'src': local_dir,
         'dest': s3_prefix,
         'filters': filters,
         'paths_type': 'locals3',
         'region': 'us-east-1'
     }
     cmd_arc = CommandArchitecture(self.session, 'sync', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())
コード例 #19
0
ファイル: test_s3.py プロジェクト: nealma/aws-cli
 def test_run_remove(self):
     """
     This ensures that the architecture sets up correctly for a ``rm``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = "s3://" + self.bucket + "/" + "text1.txt"
     filters = [["--include", "*"]]
     params = {
         "dir_op": False,
         "dryrun": True,
         "quiet": False,
         "src": s3_file,
         "dest": s3_file,
         "filters": filters,
         "paths_type": "s3",
         "region": "us-east-1",
     }
     cmd_arc = CommandArchitecture(self.session, "rm", params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
コード例 #20
0
ファイル: test_s3.py プロジェクト: smikes/aws-cli
 def test_run_remove(self):
     # This ensures that the architecture sets up correctly for a ``rm``
     # command.  It is just just a dry run, but all of the components need
     # to be wired correctly for it to work.
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {'dir_op': False, 'dryrun': True, 'quiet': False,
               'src': s3_file, 'dest': s3_file, 'filters': filters,
               'paths_type': 's3', 'region': 'us-east-1',
               'endpoint_url': None, 'verify_ssl': None}
     cmd_arc = CommandArchitecture(self.session, 'rm', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) delete: %s" % s3_file
     self.assertIn(output_str, self.output.getvalue())
コード例 #21
0
 def test_run_ls(self):
     """
     This ensures that the architecture sets up correctly for a ``ls``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     s3_prefix = 's3://' + self.bucket + '/'
     params = {
         'dir_op': True,
         'dryrun': True,
         'quiet': False,
         'src': s3_prefix,
         'dest': s3_prefix,
         'paths_type': 's3',
         'region': 'us-east-1'
     }
     cmd_arc = CommandArchitecture(self.session, 'ls', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
コード例 #22
0
 def test_run_rb(self):
     """
     This ensures that the architecture sets up correctly for a ``mb``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_prefix = 's3://' + self.bucket + '/'
     params = {
         'dir_op': True,
         'dryrun': True,
         'quiet': False,
         'src': s3_prefix,
         'dest': s3_prefix,
         'paths_type': 's3',
         'region': 'us-east-1'
     }
     cmd_arc = CommandArchitecture(self.session, 'rb', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) remove_bucket: %s" % s3_prefix
     self.assertIn(output_str, self.output.getvalue())
コード例 #23
0
 def test_run_mv(self):
     """
     This ensures that the architecture sets up correctly for a ``mv``
     command.  It is just just a dry run, but all of the components need
     to be wired correctly for it to work.
     """
     s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
     filters = [['--include', '*']]
     params = {
         'dir_op': False,
         'dryrun': True,
         'quiet': False,
         'src': s3_file,
         'dest': s3_file,
         'filters': filters,
         'paths_type': 's3s3',
         'region': 'us-east-1'
     }
     cmd_arc = CommandArchitecture(self.session, 'mv', params)
     cmd_arc.create_instructions()
     cmd_arc.run()
     output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
     self.assertIn(output_str, self.output.getvalue())