Esempio n. 1
0
    def test_cp_to_and_from_s3(self):
        # This tests the ability to put a single file in s3
        # move it to a different bucket.
        # and download the file locally
        bucket_name = self.create_bucket()

        # copy file into bucket.
        foo_txt = self.files.create_file('foo.txt', 'this is foo.txt')
        p = aws('s3 cp %s s3://%s/foo.txt' % (foo_txt, bucket_name))
        self.assert_no_errors(p)

        # Make sure object is in bucket.
        self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
        self.assertEqual(
            self.get_key_contents(bucket_name, key_name='foo.txt'),
            'this is foo.txt')

        self.assertEqual(
            self.content_type_for_key(bucket_name, key_name='foo.txt'),
            'text/plain')

        # Make a new name for the file and copy it locally.
        full_path = self.files.full_path('bar.txt')
        p = aws('s3 cp s3://%s/foo.txt %s' % (bucket_name, full_path))
        self.assert_no_errors(p)

        with open(full_path, 'r') as f:
            self.assertEqual(f.read(), 'this is foo.txt')
Esempio n. 2
0
 def test_exclude_filter_with_delete(self):
     # Test for: https://github.com/aws/aws-cli/issues/778
     bucket_name = self.create_bucket()
     first = self.files.create_file('foo.txt', 'contents')
     second = self.files.create_file('bar.py', 'contents')
     p = aws("s3 sync %s s3://%s/" % (self.files.rootdir, bucket_name))
     self.assert_no_errors(p)
     self.assertTrue(self.key_exists(bucket_name, key_name='bar.py'))
     os.remove(second)
     # We now have the same state as specified in the bug:
     # local           remote
     # -----           ------
     #
     # foo.txt         foo.txt
     #                 bar.py
     #
     # If we now run --exclude '*.py' --delete, then we should *not*
     # delete bar.py and the remote side.
     p = aws("s3 sync %s s3://%s/ --exclude '*.py' --delete" % (
         self.files.rootdir, bucket_name))
     self.assert_no_errors(p)
     self.assertTrue(
         self.key_exists(bucket_name, key_name='bar.py'),
         ("The --delete flag was not applied to the receiving "
          "end, the 'bar.py' file was deleted even though it was excluded."))
Esempio n. 3
0
    def test_sync_with_delete_option_with_same_prefix(self):
        # Test for issue 440 (https://github.com/aws/aws-cli/issues/440)
        # First, we need to create a directory structure that has a dir with
        # the same prefix as some of the files:
        #
        #  test/foo.txt
        #  test-123.txt
        #  test-321.txt
        #  test.txt
        bucket_name = self.create_bucket()
        # create test/foo.txt
        nested_dir = os.path.join(self.files.rootdir, 'test')
        os.mkdir(nested_dir)
        self.files.create_file(os.path.join(nested_dir, 'foo.txt'),
                               contents='foo.txt contents')
        # Then create test-123.txt, test-321.txt, test.txt.
        self.files.create_file('test-123.txt', 'test-123.txt contents')
        self.files.create_file('test-321.txt', 'test-321.txt contents')
        self.files.create_file('test.txt', 'test.txt contents')

        # Now sync this content up to s3.
        p = aws('s3 sync %s s3://%s/' % (self.files.rootdir, bucket_name))

        # Now here's the issue.  If we try to sync the contents down
        # with the --delete flag we should *not* see any output, the
        # sync operation should determine that nothing is different and
        # therefore do nothing.  We can just use --dryrun to show the issue.
        p = aws('s3 sync s3://%s/ %s --dryrun' % (
            bucket_name, self.files.rootdir))
        # These assertion methods will give better error messages than just
        # checking if the output is empty.
        self.assertNotIn('download:', p.stdout)
        self.assertNotIn('delete:', p.stdout)
        self.assertEqual('', p.stdout)
Esempio n. 4
0
 def tearDown(self):
     if os.path.exists(self.filename1):
         os.remove(self.filename1)
     aws('s3 rb --force s3://%s' % self.bucket_name)
     aws('s3 rb --force s3://%s' % self.bucket_name2)
     if os.path.exists(self.filename2):
         os.remove(self.filename2)
Esempio n. 5
0
    def test_set_with_empty_config_file(self):
        with open(self.config_filename, 'w'):
            pass

        aws('configure set region us-west-1', env_vars=self.env_vars)
        self.assertEqual(
            '[default]\n'
            'region = us-west-1\n', self.get_config_file_contents())
Esempio n. 6
0
 def tearDown(self):
     aws('s3 rm --recursive s3://%s --quiet' % self.bucket_name)
     aws('s3 rb s3://%s' % self.bucket_name)
     if os.path.exists(self.path1):
         os.remove(self.path1)
     if os.path.exists(self.path2):
         os.remove(self.path2)
     if os.path.exists('some_dir'):
         os.rmdir('some_dir')
Esempio n. 7
0
    def test_mb_rb(self):
        p = aws('s3 mb s3://%s' % self.bucket_name)
        self.assert_no_errors(p)

        response = self.list_buckets()
        self.assertIn(self.bucket_name, [b['Name'] for b in response])

        p = aws('s3 rb s3://%s' % self.bucket_name)
        self.assert_no_errors(p)
Esempio n. 8
0
    def test_set_with_updating_value(self):
        self.set_config_file_contents(
            '[default]\n'
            'region = us-west-2\n')

        aws('configure set region us-west-1', env_vars=self.env_vars)
        self.assertEqual(
            '[default]\n'
            'region = us-west-1\n', self.get_config_file_contents())
Esempio n. 9
0
 def test_set_with_commented_out_field(self):
     self.set_config_file_contents(
         '#[preview]\n'
         ';cloudsearch = true\n')
     aws('configure set preview.cloudsearch true', env_vars=self.env_vars)
     self.assertEqual(
         '#[preview]\n'
         ';cloudsearch = true\n'
         '[preview]\n'
         'cloudsearch = true\n', self.get_config_file_contents())
Esempio n. 10
0
 def test_mv_local_to_s3(self):
     bucket_name = self.create_bucket()
     full_path = self.files.create_file('foo.txt', 'this is foo.txt')
     aws('s3 mv %s s3://%s/foo.txt' % (full_path,
                                       bucket_name))
     # When we move an object, the local file is gone:
     self.assertTrue(not os.path.exists(full_path))
     # And now resides in s3.
     contents = self.get_key_contents(bucket_name, 'foo.txt')
     self.assertEqual(contents, 'this is foo.txt')
Esempio n. 11
0
    def test_basic_exclude_filter_for_single_file(self):
        full_path = self.files.create_file('foo.txt', 'this is foo.txt')
        # With no exclude we should upload the file.
        p = aws('s3 cp %s s3://random-bucket-name/ --dryrun' % full_path)
        self.assert_no_errors(p)
        self.assertIn('(dryrun) upload:', p.stdout)

        p2 = aws("s3 cp %s s3://random-bucket-name/ --dryrun --exclude '*'"
                 % full_path)
        self.assert_no_files_would_be_uploaded(p2)
Esempio n. 12
0
    def test_cp_s3_s3_multipart(self):
        from_bucket = self.create_bucket()
        to_bucket = self.create_bucket()
        file_contents = 'abcd' * (1024 * 1024 * 10)
        self.put_object(from_bucket, 'foo.txt', file_contents)

        aws('s3 cp s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
        contents = self.get_key_contents(to_bucket, 'foo.txt')
        self.assertEqual(contents, file_contents)
        self.assertTrue(self.key_exists(from_bucket, key_name='foo.txt'))
Esempio n. 13
0
    def test_mv_s3_to_s3(self):
        from_bucket = self.create_bucket()
        to_bucket = self.create_bucket()
        self.put_object(from_bucket, 'foo.txt', 'this is foo.txt')

        aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
        contents = self.get_key_contents(to_bucket, 'foo.txt')
        self.assertEqual(contents, 'this is foo.txt')
        # And verify that the object no longer exists in the from_bucket.
        self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
Esempio n. 14
0
    def test_json_param_parsing(self):
        # This is convered by unit tests in botocore, but this is a sanity
        # check that we get a json response from a json service.
        p = aws('swf list-domains --registration-status REGISTERED')
        self.assertEqual(p.rc, 0)
        self.assertIsInstance(p.json, dict)

        p = aws('dynamodb list-tables')
        self.assertEqual(p.rc, 0)
        self.assertIsInstance(p.json, dict)
Esempio n. 15
0
    def test_mv_s3_to_s3_multipart(self):
        from_bucket = self.create_bucket()
        to_bucket = self.create_bucket()
        file_contents = 'abcd' * (1024 * 1024 * 10)
        self.put_object(from_bucket, 'foo.txt', file_contents)

        aws('s3 mv s3://%s/foo.txt s3://%s/foo.txt' % (from_bucket, to_bucket))
        contents = self.get_key_contents(to_bucket, 'foo.txt')
        self.assertEqual(contents, file_contents)
        # And verify that the object no longer exists in the from_bucket.
        self.assertTrue(not self.key_exists(from_bucket, key_name='foo.txt'))
Esempio n. 16
0
 def test_mv_s3_to_local(self):
     bucket_name = self.create_bucket()
     self.put_object(bucket_name, 'foo.txt', 'this is foo.txt')
     full_path = self.files.full_path('foo.txt')
     self.assertTrue(self.key_exists(bucket_name, key_name='foo.txt'))
     aws('s3 mv s3://%s/foo.txt %s' % (bucket_name, full_path))
     self.assertTrue(os.path.exists(full_path))
     with open(full_path, 'r') as f:
         self.assertEqual(f.read(), 'this is foo.txt')
     # The s3 file should not be there anymore.
     self.assertTrue(not self.key_exists(bucket_name, key_name='foo.txt'))
Esempio n. 17
0
    def test_recur_cp(self):
        p = aws('s3 cp %s s3://%s --recursive --quiet' % ('some_dir',
                                                          self.bucket_name))
        self.assertEqual(p.rc, 0)
        p = aws('s3 cp s3://%s %s --recursive --quiet' % (self.bucket_name,
                                                          'some_dir'))
        self.assertEqual(p.rc, 0)
        with open(self.path1, 'rb') as file2:
            data = file2.read()

        # Ensure the contents are the same.
        self.assertEqual(data, b'This is a test.')
Esempio n. 18
0
 def test_upload_download_file_with_spaces(self):
     bucket_name = self.create_bucket()
     filename = self.files.create_file('with space.txt', 'contents')
     p = aws('s3 cp %s s3://%s/ --recursive' % (self.files.rootdir,
                                                bucket_name))
     self.assert_no_errors(p)
     os.remove(filename)
     # Now download the file back down locally.
     p = aws('s3 cp s3://%s/ %s --recursive' % (bucket_name,
                                                self.files.rootdir))
     self.assert_no_errors(p)
     self.assertEqual(os.listdir(self.files.rootdir)[0], 'with space.txt')
Esempio n. 19
0
    def test_fail_mb_rb(self):
        """
        Makes sure that mb and rb fail properly.
        Note: mybucket is not available to create and therefore
        you cannot delete it as well.
        """
        bucket_name = "mybucket"
        p = aws('s3 mb s3://%s' % bucket_name)
        self.assertIn("BucketAlreadyExists", p.stdout)

        bucket_name = "mybucket"
        p = aws('s3 rb s3://%s' % bucket_name)
        self.assertIn("AccessDenied", p.stdout)
Esempio n. 20
0
    def test_cp(self):
        file_path1 = 'some_dir' + os.sep + self.filename1
        file_path2 = 'some_dir' + os.sep + self.filename2
        p = aws('s3 cp %s s3://%s --quiet' % (file_path1, self.bucket_name))
        self.assertEqual(p.rc, 0)
        s3_path = self.bucket_name + self.filename1
        p = aws('s3 cp s3://%s %s --quiet' % (s3_path, file_path2))
        self.assertEqual(p.rc, 0)
        with open(self.path2, 'rb') as file2:
            data = file2.read()

        # Ensure the contents are the same.
        self.assertEqual(data, b'This is a test.')
Esempio n. 21
0
    def test_dryrun(self):
        # Make a bucket.
        p = aws('s3 mb s3://%s' % self.bucket_name)

        # Copy file into bucket.
        p = aws('s3 cp %s s3://%s --dryrun' % (self.filename1,
                                               self.bucket_name))
        self.assertEqual(p.rc, 0)
        self.assertNotIn("Error:", p.stdout)
        self.assertNotIn("failed:", p.stdout)

        # Make sure the file is not in the bucket.
        p = aws('s3 ls s3://%s' % self.bucket_name)
        self.assertNotIn(self.filename1, p.stdout)
Esempio n. 22
0
 def test_sync_file_with_spaces(self):
     bucket_name = self.create_bucket()
     bucket_name = self.create_bucket()
     filename = self.files.create_file('with space.txt', 'contents')
     p = aws('s3 sync %s s3://%s/' % (self.files.rootdir,
                                      bucket_name))
     self.assert_no_errors(p)
     # Now syncing again should *not* trigger any uploads (i.e we should
     # get nothing on stdout).
     p2 = aws('s3 sync %s s3://%s/' % (self.files.rootdir,
                                       bucket_name))
     self.assertEqual(p2.stdout, '')
     self.assertEqual(p2.stderr, '')
     self.assertEqual(p2.rc, 0)
Esempio n. 23
0
    def test_cp(self):
        bucket_name = self.create_bucket()
        local_example1_txt = self.files.create_file('êxample.txt', 'example1 contents')
        s3_example1_txt = 's3://%s/%s' % (bucket_name,
                                          os.path.basename(local_example1_txt))
        local_example2_txt = self.files.full_path('êxample2.txt')

        p = aws('s3 cp %s %s' % (local_example1_txt, s3_example1_txt))
        self.assert_no_errors(p)

        # Download the file to the second example2.txt filename.
        p = aws('s3 cp %s %s --quiet' % (s3_example1_txt, local_example2_txt))
        self.assert_no_errors(p)
        with open(local_example2_txt, 'rb') as f:
            self.assertEqual(f.read(), b'example1 contents')
Esempio n. 24
0
    def test_recursive_cp(self):
        bucket_name = self.create_bucket()
        local_example1_txt = self.files.create_file('êxample1.txt', 'example1 contents')
        local_example2_txt = self.files.create_file('êxample2.txt', 'example2 contents')
        p = aws('s3 cp %s s3://%s --recursive --quiet' % (
            self.files.rootdir, bucket_name))
        self.assert_no_errors(p)

        os.remove(local_example1_txt)
        os.remove(local_example2_txt)

        p = aws('s3 cp s3://%s %s --recursive --quiet' % (
            bucket_name, self.files.rootdir))
        self.assert_no_errors(p)
        self.assertEqual(open(local_example1_txt).read(), 'example1 contents')
        self.assertEqual(open(local_example2_txt).read(), 'example2 contents')
Esempio n. 25
0
    def test_sync_to_nonexistent_bucket(self):
        self.files.create_file('foo.txt', 'foo contents')
        self.files.create_file('bar.txt', 'bar contents')

        # Sync the directory and the bucket.
        p = aws('s3 sync %s s3://noexist-bkt-nme-1412' % (self.files.rootdir,))
        self.assertEqual(p.rc, 1)
Esempio n. 26
0
 def test_download_non_existent_key(self):
     p = aws('s3 cp s3://jasoidfjasdjfasdofijasdf/foo.txt foo.txt')
     self.assertEqual(p.rc, 1)
     expected_err_msg = (
         'A client error (NoSuchKey) occurred when calling the '
         'HeadObject operation: Key "foo.txt" does not exist')
     self.assertIn(expected_err_msg, p.stdout)
Esempio n. 27
0
 def test_cp_empty_file(self):
     bucket_name = self.create_bucket()
     foo_txt = self.files.create_file('foo.txt', contents='')
     p = aws('s3 cp %s s3://%s/' % (foo_txt, bucket_name))
     self.assertEqual(p.rc, 0)
     self.assertNotIn('failed', p.stderr)
     self.assertTrue(self.key_exists(bucket_name, 'foo.txt'))
Esempio n. 28
0
    def test_rm_with_newlines(self):
        bucket_name = self.create_bucket()

        # Note the carriage return in the key name.
        foo_txt = self.files.create_file('foo\r.txt', 'this is foo.txt')
        p = aws('s3 cp %s s3://%s/foo\r.txt' % (foo_txt, bucket_name))
        self.assert_no_errors(p)

        # Make sure object is in bucket.
        self.assertTrue(self.key_exists(bucket_name, key_name='foo\r.txt'))

        # Then delete the file.
        p = aws('s3 rm s3://%s/ --recursive' % (bucket_name,))

        # And verify it's gone.
        self.assertFalse(self.key_exists(bucket_name, key_name='foo\r.txt'))
Esempio n. 29
0
    def test_mv_s3_to_s3_multipart_recursive(self):
        from_bucket = self.create_bucket()
        to_bucket = self.create_bucket()

        large_file_contents = 'abcd' * (1024 * 1024 * 10)
        small_file_contents = 'small file contents'
        self.put_object(from_bucket, 'largefile', large_file_contents)
        self.put_object(from_bucket, 'smallfile', small_file_contents)

        p = aws('s3 mv s3://%s/ s3://%s/ --recursive' % (from_bucket,
                                                         to_bucket))
        self.assert_no_errors(p)
        # Nothing's in the from_bucket.
        self.assertTrue(not self.key_exists(from_bucket, key_name='largefile'))
        self.assertTrue(not self.key_exists(from_bucket, key_name='smallfile'))

        # And both files are in the to_bucket.
        self.assertTrue(self.key_exists(to_bucket, key_name='largefile'))
        self.assertTrue(self.key_exists(to_bucket, key_name='smallfile'))

        # And the contents are what we expect.
        self.assertEqual(self.get_key_contents(to_bucket, 'smallfile'),
                         small_file_contents)
        self.assertEqual(self.get_key_contents(to_bucket, 'largefile'),
                         large_file_contents)
Esempio n. 30
0
    def test_transfer_single_large_file(self):
        # 40MB will force a multipart upload.
        bucket_name = self.create_bucket()
        file_contents = 'abcdabcd' * (1024 * 1024 * 10)
        foo_txt = self.files.create_file('foo.txt', file_contents)
        full_command = 's3 mv %s s3://%s/foo.txt' % (foo_txt, bucket_name)
        p = aws(full_command, collect_memory=True)
        self.assert_no_errors(p)
        self.assert_max_memory_used(p, self.max_mem_allowed, full_command)

        # Verify downloading it back down obeys memory utilization.
        download_full_command = 's3 mv s3://%s/foo.txt %s' % (
            bucket_name, foo_txt)
        p = aws(download_full_command, collect_memory=True)
        self.assert_no_errors(p)
        self.assert_max_memory_used(p, self.max_mem_allowed, download_full_command)
Esempio n. 31
0
 def test_set_with_profile(self):
     aws('configure set region us-west-1 --profile testing',
         env_vars=self.env_vars)
     self.assertEqual(
         '[profile testing]\n'
         'region = us-west-1\n', self.get_config_file_contents())
Esempio n. 32
0
 def test_param_with_bad_json(self):
     p = aws('ec2 describe-instances --filters '
             '\'{"Name": "bad-filter", "Values": ["i-123"]}\'')
     self.assertEqual(p.rc, 255)
     self.assertIn("The filter 'bad-filter' is invalid", p.stderr,
                   "stdout: %s, stderr: %s" % (p.stdout, p.stderr))
Esempio n. 33
0
 def test_help_usage_operation_level(self):
     p = aws('ec2 run-instances')
     self.assertIn(
         'usage: aws [options] <command> '
         '<subcommand> [parameters]', p.stderr)
Esempio n. 34
0
 def test_help_usage_service_level(self):
     p = aws('ec2')
     self.assertIn(
         'usage: aws [options] <command> '
         '<subcommand> [parameters]', p.stderr)
     self.assertIn('too few arguments', p.stderr)
Esempio n. 35
0
 def test_set_with_fq_double_dot(self):
     aws('configure set profile.testing.region us-west-2',
         env_vars=self.env_vars)
     self.assertEqual(
         '[profile testing]\n'
         'region = us-west-2\n', self.get_config_file_contents())
Esempio n. 36
0
 def test_top_level_options_debug(self):
     p = aws('ec2 describe-instances --debug')
     self.assertEqual(p.rc, 0)
     self.assertIn('DEBUG', p.stderr)
Esempio n. 37
0
 def test_set_with_fq_single_dot(self):
     aws('configure set preview.cloudsearch true', env_vars=self.env_vars)
     self.assertEqual(
         '[preview]\n'
         'cloudsearch = true\n', self.get_config_file_contents())
Esempio n. 38
0
 def test_make_requests_to_other_region(self):
     p = aws('ec2 describe-instances --region us-west-2')
     self.assertEqual(p.rc, 0)
     self.assertIn('Reservations', p.json)
Esempio n. 39
0
 def test_set_with_config_file_no_exist(self):
     aws('configure set region us-west-1', env_vars=self.env_vars)
     self.assertEqual(
         '[default]\n'
         'region = us-west-1\n', self.get_config_file_contents())