Exemplo n.º 1
0
class TestIsSpecialFile(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.filename = 'foo'

    def tearDown(self):
        self.files.remove_all()

    def test_is_character_device(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        self.files.create_file(self.filename, contents='')
        with mock.patch('stat.S_ISCHR') as mock_class:
            mock_class.return_value = True
            self.assertTrue(is_special_file(file_path))

    def test_is_block_device(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        self.files.create_file(self.filename, contents='')
        with mock.patch('stat.S_ISBLK') as mock_class:
            mock_class.return_value = True
            self.assertTrue(is_special_file(file_path))

    def test_is_fifo(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        mode = 0o600 | stat.S_IFIFO
        os.mknod(file_path, mode)
        self.assertTrue(is_special_file(file_path))

    def test_is_socket(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        self.assertTrue(is_special_file(file_path))
Exemplo n.º 2
0
class TestIsSpecialFile(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.filename = 'foo'

    def tearDown(self):
        self.files.remove_all()

    def test_is_character_device(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        self.files.create_file(self.filename, contents='')
        with mock.patch('stat.S_ISCHR') as mock_class:
            mock_class.return_value = True
            self.assertTrue(is_special_file(file_path))

    def test_is_block_device(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        self.files.create_file(self.filename, contents='')
        with mock.patch('stat.S_ISBLK') as mock_class:
            mock_class.return_value = True
            self.assertTrue(is_special_file(file_path))

    def test_is_fifo(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        mode = 0o600 | stat.S_IFIFO
        os.mknod(file_path, mode)
        self.assertTrue(is_special_file(file_path))

    def test_is_socket(self):
        file_path = os.path.join(self.files.rootdir, self.filename)
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        self.assertTrue(is_special_file(file_path))
Exemplo n.º 3
0
 def setUp(self):
     super(TestDeployCommand, self).setUp()
     # setup required values
     files = FileCreator()
     self.task_def_file = files.create_file(
         'taskDef.json', json.dumps(self.TASK_DEFINITION_JSON), mode='w')
     self.appspec_file = files.create_file(
         'appspec.yaml', self.YAML_APPSPEC, mode='w')
     self.appspec_file_json = files.create_file(
         'appspec.json', self.JSON_APPSPEC, mode='w')
     self.service_name = 'serviceTest'
     self.service_arn = 'arn:aws:ecs:::service/serviceTest'
     # setup default optional values
     self.cluster_name = 'default'
     self.cluster_arn = 'arn:aws:ecs:::cluster/default'
     self.application_name = get_app_name(
         self.service_name, self.cluster_name, None)
     self.deployment_group_name = get_deploy_group_name(
         self.service_name, self.cluster_name, None)
     # setup test response resources
     self.missing_properties_appspec = files.create_file(
         'appspec_bad.yaml', self.BAD_APPSPEC, mode='w')
     self.task_definition_arn = \
         'arn:aws:ecs::1234567890:task-definition\\test:2'
     self.deployment_id = 'd-1234567XX'
     self.mock_deployer = CodeDeployer(None, self.APPSPEC_DICT)
     self.mock_deployer.update_task_def_arn(self.task_definition_arn)
     self.expected_stdout = ("Successfully registered new ECS task "
                             "definition " + self.task_definition_arn + "\n"
                             "Successfully created deployment " +
                             self.deployment_id + "\n"
                             "Waiting for " + self.deployment_id +
                             " to succeed...\nSuccessfully deployed "
                             + self.task_definition_arn + " to service '"
                             + self.service_name + "'\n")
Exemplo n.º 4
0
class BaseSSOTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(BaseSSOTest, self).setUp()
        self.files = FileCreator()
        self.start_url = 'https://mysigin.com'
        self.sso_region = 'us-west-2'
        self.account = '012345678912'
        self.role_name = 'SSORole'
        self.config_file = self.files.full_path('config')
        self.environ['AWS_CONFIG_FILE'] = self.config_file
        self.set_config_file_content()
        self.access_token = 'foo.token.string'

    def tearDown(self):
        super(BaseSSOTest, self).tearDown()
        self.files.remove_all()

    def set_config_file_content(self, content=None):
        if content is None:
            content = ('[default]\n'
                       'sso_start_url=%s\n'
                       'sso_region=%s\n'
                       'sso_role_name=%s\n'
                       'sso_account_id=%s\n' %
                       (self.start_url, self.sso_region, self.role_name,
                        self.account))
        self.files.create_file(self.config_file, content)
        # We need to recreate the driver (which includes its session) in order
        # for the config changes to be pulled in by the session.
        self.driver = create_clidriver()
Exemplo n.º 5
0
class TestQueryFileArgument(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_proxies_to_super_ctor(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "bar.baz", "event", 0o600)
        self.assertEqual("foo", arg.name)
        self.assertEqual("bar.baz", arg.query)

    def test_adds_default_help_text(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "bar.baz", "event", 0o600)
        self.assertEqual(("Saves the command output contents of bar.baz " "to the given filename"), arg.documentation)

    def test_does_not_add_help_text_if_set(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "bar.baz", "event", 0o600, help_text="abc")
        self.assertEqual("abc", arg.documentation)

    def test_saves_query_to_file(self):
        outfile = self.files.create_file("not-empty-test", "")
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "baz", "event", 0o600)
        arg.add_to_params({}, outfile)
        arg.save_query({"ResponseMetadata": {"HTTPStatusCode": 200}, "baz": "abc123"})
        with open(outfile) as fp:
            self.assertEquals("abc123", fp.read())
        self.assertEquals(1, session.register.call_count)
        session.register.assert_called_with("event", arg.save_query)

    def test_does_not_save_when_not_set(self):
        session = mock.Mock()
        QueryOutFileArgument(session, "foo", "baz", "event", 0o600)
        self.assertEquals(0, session.register.call_count)

    def test_saves_query_to_file_as_empty_string_when_none_result(self):
        outfile = self.files.create_file("none-test", "")
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "baz", "event", 0o600)
        arg.add_to_params({}, outfile)
        arg.save_query({"ResponseMetadata": {"HTTPStatusCode": 200}})
        with open(outfile) as fp:
            self.assertEquals("", fp.read())

    @skip_if_windows("Test not valid on windows.")
    def test_permissions_on_created_file(self):
        outfile = self.files.create_file("not-empty-test", "")
        session = mock.Mock()
        arg = QueryOutFileArgument(session, "foo", "baz", "event", 0o600)
        arg.add_to_params({}, outfile)
        arg.save_query({"ResponseMetadata": {"HTTPStatusCode": 200}, "baz": "abc123"})
        with open(outfile) as fp:
            fp.read()
        self.assertEqual(os.stat(outfile).st_mode & 0xFFF, 0o600)
Exemplo n.º 6
0
class BaseHistoryCommandParamsTest(BaseAWSCommandParamsTest):
    def setUp(self):
        history_recorder = self._make_clean_history_recorder()
        super(BaseHistoryCommandParamsTest, self).setUp()
        self.history_recorder = history_recorder
        self.files = FileCreator()
        config_contents = ('[default]\n' 'cli_history = enabled')
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'config', config_contents)
        self.environ['AWS_CLI_HISTORY_FILE'] = self.files.create_file(
            'history.db', '')
        self.driver = create_clidriver()
        # The run_cmd patches stdout with a StringIO object (similar to what
        # nose does). Therefore it will run into issues when
        # get_binary_stdout is called because it returns sys.stdout.buffer
        # for Py3 and StringIO does not have a buffer
        self.binary_stdout_patch = mock.patch('awscli.utils.get_binary_stdout')
        mock_get_binary_stdout = self.binary_stdout_patch.start()
        self.binary_stdout = BytesIO()
        mock_get_binary_stdout.return_value = self.binary_stdout

    def _make_clean_history_recorder(self):
        # This is to ensure that for each new test run the CLI is using
        # a brand new HistoryRecorder as this is global so previous test
        # runs could have injected handlers onto it as all of the tests
        # are ran in the same process.
        history_recorder = HistoryRecorder()

        # The HISTORY_RECORDER is instantiated on module import before we
        # doing any patching which means we cannot simply patch
        # botocore.get_global_history_recorder as the objects are already
        # instantiated as so we have to individually patch each one of these...
        self._apply_history_recorder_patch('awscli.clidriver',
                                           history_recorder)
        self._apply_history_recorder_patch('awscli.customizations.history',
                                           history_recorder)
        return history_recorder

    def _apply_history_recorder_patch(self, module, history_recorder):
        patch_history_recorder = mock.patch(module + '.HISTORY_RECORDER',
                                            history_recorder)
        patch_history_recorder.start()
        self.addCleanup(patch_history_recorder.stop)

    def _cleanup_db_connections(self):
        # Reaching into private data to close out the database connection.
        # Windows won't let us delete the tempdir until these connections are
        # closed in the tearDown step and we have no other way of forcing
        # them to close.
        handlers = self.history_recorder._handlers
        for handler in handlers:
            handler._writer.close()

    def tearDown(self):
        super(BaseHistoryCommandParamsTest, self).tearDown()
        self._cleanup_db_connections()
        self.files.remove_all()
        self.binary_stdout_patch.stop()
Exemplo n.º 7
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File does not exist." %
                          filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Exemplo n.º 8
0
class TestThrowsWarning(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.root = self.files.rootdir
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')

    def tearDown(self):
        self.files.remove_all()

    def test_no_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        return_val = file_gen.triggers_warning(full_path)
        self.assertFalse(return_val)
        self.assertTrue(file_gen.result_queue.empty())

    def test_no_exists(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        filename = os.path.join(self.root, 'file')
        return_val = file_gen.triggers_warning(filename)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(
            warning_message.message,
            ("warning: Skipping file %s. File does not exist." % filename))

    def test_no_read_access(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        self.files.create_file("foo.txt", contents="foo")
        full_path = os.path.join(self.root, "foo.txt")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            return_val = file_gen.triggers_warning(full_path)
            self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File/Directory is "
                          "not readable." % full_path))

    @unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
                     'Special files only supported on mac/linux')
    def test_is_special_file_warning(self):
        file_gen = FileGenerator(self.service, self.endpoint, '', False)
        file_path = os.path.join(self.files.rootdir, 'foo')
        # Use socket for special file.
        sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        sock.bind(file_path)
        return_val = file_gen.triggers_warning(file_path)
        self.assertTrue(return_val)
        warning_message = file_gen.result_queue.get()
        self.assertEqual(warning_message.message,
                         ("warning: Skipping file %s. File is character "
                          "special device, block special device, FIFO, or "
                          "socket." % file_path))
Exemplo n.º 9
0
class TestQueryFileArgument(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_proxies_to_super_ctor(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'bar.baz', 'event')
        self.assertEqual('foo', arg.name)
        self.assertEqual('bar.baz', arg.query)

    def test_adds_default_help_text(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'bar.baz', 'event')
        self.assertEqual(('Saves the command output contents of bar.baz '
                          'to the given filename'), arg.documentation)

    def test_does_not_add_help_text_if_set(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session,
                                   'foo',
                                   'bar.baz',
                                   'event',
                                   help_text='abc')
        self.assertEqual('abc', arg.documentation)

    def test_saves_query_to_file(self):
        outfile = self.files.create_file('not-empty-test', '')
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'baz', 'event')
        arg.add_to_params({}, outfile)
        arg.save_query({
            'ResponseMetadata': {
                'HTTPStatusCode': 200
            },
            'baz': 'abc123'
        })
        with open(outfile) as fp:
            self.assertEquals('abc123', fp.read())
        self.assertEquals(1, session.register.call_count)
        session.register.assert_called_with('event', arg.save_query)

    def test_does_not_save_when_not_set(self):
        session = mock.Mock()
        QueryOutFileArgument(session, 'foo', 'baz', 'event')
        self.assertEquals(0, session.register.call_count)

    def test_saves_query_to_file_as_empty_string_when_none_result(self):
        outfile = self.files.create_file('none-test', '')
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'baz', 'event')
        arg.add_to_params({}, outfile)
        arg.save_query({'ResponseMetadata': {'HTTPStatusCode': 200}})
        with open(outfile) as fp:
            self.assertEquals('', fp.read())
Exemplo n.º 10
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_warning(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertTrue(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Exemplo n.º 11
0
class TestIgnoreFilesLocally(unittest.TestCase):
    """
    This class tests the ability to ignore particular files.  This includes
    skipping symlink when desired.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_bad_symlink(self):
        path = os.path.join(self.files.rootdir, 'badsymlink')
        os.symlink('non-existent-file', path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_skip_symlink(self):
        filename = 'foo.txt'
        self.files.create_file(os.path.join(self.files.rootdir,
                               filename),
                               contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(filename, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', False)
        self.assertTrue(filegenerator.should_ignore_file(sym_path))

    def test_no_skip_symlink(self):
        filename = 'foo.txt'
        path = self.files.create_file(os.path.join(self.files.rootdir,
                                                   filename),
                                      contents='foo.txt contents')
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))

    def test_no_skip_symlink_dir(self):
        filename = 'dir'
        path = os.path.join(self.files.rootdir, 'dir/')
        os.mkdir(path)
        sym_path = os.path.join(self.files.rootdir, 'symlink')
        os.symlink(path, sym_path)
        filegenerator = FileGenerator(self.service, self.endpoint,
                                      '', True)
        self.assertFalse(filegenerator.should_ignore_file(sym_path))
        self.assertFalse(filegenerator.should_ignore_file(path))
Exemplo n.º 12
0
class TestQueryFileArgument(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_proxies_to_super_ctor(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'bar.baz', 'event')
        self.assertEqual('foo', arg.name)
        self.assertEqual('bar.baz', arg.query)

    def test_adds_default_help_text(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'bar.baz', 'event')
        self.assertEqual(('Saves the command output contents of bar.baz '
                          'to the given filename'), arg.documentation)

    def test_does_not_add_help_text_if_set(self):
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'bar.baz', 'event',
                                   help_text='abc')
        self.assertEqual('abc', arg.documentation)

    def test_saves_query_to_file(self):
        outfile = self.files.create_file('not-empty-test', '')
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'baz', 'event')
        arg.add_to_params({}, outfile)
        arg.save_query({'ResponseMetadata': {'HTTPStatusCode': 200},
                        'baz': 'abc123'})
        with open(outfile) as fp:
            self.assertEquals('abc123', fp.read())
        self.assertEquals(1, session.register.call_count)
        session.register.assert_called_with('event', arg.save_query)

    def test_does_not_save_when_not_set(self):
        session = mock.Mock()
        QueryOutFileArgument(session, 'foo', 'baz', 'event')
        self.assertEquals(0, session.register.call_count)

    def test_saves_query_to_file_as_empty_string_when_none_result(self):
        outfile = self.files.create_file('none-test', '')
        session = mock.Mock()
        arg = QueryOutFileArgument(session, 'foo', 'baz', 'event')
        arg.add_to_params({}, outfile)
        arg.save_query({'ResponseMetadata': {'HTTPStatusCode': 200}})
        with open(outfile) as fp:
            self.assertEquals('', fp.read())
Exemplo n.º 13
0
class TestCLITimestampParser(BaseCLIWireResponseTest):
    def setUp(self):
        super(TestCLITimestampParser, self).setUp()
        self.files = FileCreator()
        self.wire_response = json.dumps({
            'builds': [{
                'startTime': 0,
            }]
        }).encode('utf-8')
        self.command = ['codebuild', 'batch-get-builds', '--ids', 'foo']
        self.patch_send(content=self.wire_response)

    def tearDown(self):
        super(TestCLITimestampParser, self).tearDown()
        self.files.remove_all()

    def test_iso(self):
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'iso',
            '[default]\ncli_timestamp_format = iso8601\n')
        self.driver = create_clidriver()
        expected_time = datetime.datetime.fromtimestamp(0).replace(
            tzinfo=tzlocal()).isoformat()

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)

    def test_none(self):
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'none',
            '[default]\ncli_timestamp_format = none\n')
        self.driver = create_clidriver()
        expected_time = 0

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)

    def test_default(self):
        self.driver = create_clidriver()
        expected_time = 0

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)
Exemplo n.º 14
0
class TestZipDirectory(unittest.TestCase):
    def setUp(self):
        self.file_creator = FileCreator()
        self.zip_file = self.file_creator.create_file('build.zip', '')
        self._dir_root = 'mybuild'

    def tearDown(self):
        self.file_creator.remove_all()

    @property
    def dir_root(self):
        return self.file_creator.full_path(self._dir_root)

    def add_to_directory(self, filename):
        self.file_creator.create_file(
            os.path.join(self._dir_root, filename), 'Some contents')

    def assert_contents_of_zip_file(self, filenames):
        zip_file_object = zipfile.ZipFile(
            self.zip_file, 'r', zipfile.ZIP_DEFLATED)
        with contextlib.closing(zip_file_object) as zf:
            ref_zipfiles = []
            zipfile_contents = zf.namelist()
            for ref_zipfile in zipfile_contents:
                if os.sep == '\\':
                    # Internally namelist() represent directories with
                    # forward slashes so we need to account for that if
                    # the separator is a backslash depending on the operating
                    # system.
                    ref_zipfile = ref_zipfile.replace('/', '\\')
                ref_zipfiles.append(ref_zipfile)
            self.assertEqual(sorted(ref_zipfiles), filenames)

    def test_single_file(self):
        self.add_to_directory('foo')
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file(['foo'])

    def test_multiple_files(self):
        self.add_to_directory('foo')
        self.add_to_directory('bar')
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file(['bar', 'foo'])

    def test_nested_file(self):
        filename = os.path.join('mydir', 'foo')
        self.add_to_directory(filename)
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file([filename])
Exemplo n.º 15
0
class TestZipDirectory(unittest.TestCase):
    def setUp(self):
        self.file_creator = FileCreator()
        self.zip_file = self.file_creator.create_file('build.zip', '')
        self._dir_root = 'mybuild'

    def tearDown(self):
        self.file_creator.remove_all()

    @property
    def dir_root(self):
        return self.file_creator.full_path(self._dir_root)

    def add_to_directory(self, filename):
        self.file_creator.create_file(
            os.path.join(self._dir_root, filename), 'Some contents')

    def assert_contents_of_zip_file(self, filenames):
        zip_file_object = zipfile.ZipFile(
            self.zip_file, 'r', zipfile.ZIP_DEFLATED)
        with contextlib.closing(zip_file_object) as zf:
            ref_zipfiles = []
            zipfile_contents = zf.namelist()
            for ref_zipfile in zipfile_contents:
                if os.sep == '\\':
                    # Internally namelist() represent directories with
                    # forward slashes so we need to account for that if
                    # the separator is a backslash depending on the operating
                    # system.
                    ref_zipfile = ref_zipfile.replace('/', '\\')
                ref_zipfiles.append(ref_zipfile)
            self.assertEqual(sorted(ref_zipfiles), filenames)

    def test_single_file(self):
        self.add_to_directory('foo')
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file(['foo'])

    def test_multiple_files(self):
        self.add_to_directory('foo')
        self.add_to_directory('bar')
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file(['bar', 'foo'])

    def test_nested_file(self):
        filename = os.path.join('mydir', 'foo')
        self.add_to_directory(filename)
        zip_directory(self.zip_file, self.dir_root)
        self.assert_contents_of_zip_file([filename])
Exemplo n.º 16
0
class TestCLITimestampParser(BaseCLIWireResponseTest):
    def setUp(self):
        super(TestCLITimestampParser, self).setUp()
        self.files = FileCreator()
        self.wire_response = json.dumps({
            'builds': [{
                'startTime': 0,
            }]
        }).encode('utf-8')
        self.command = ['codebuild', 'batch-get-builds', '--ids', 'foo']
        self.patch_send(content=self.wire_response)

    def tearDown(self):
        super(TestCLITimestampParser, self).tearDown()
        self.files.remove_all()

    def test_iso(self):
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'iso', '[default]\ncli_timestamp_format = iso8601\n')
        self.driver = create_clidriver()
        expected_time = datetime.datetime.fromtimestamp(0).replace(
            tzinfo=tzlocal()).isoformat()

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)

    def test_none(self):
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'none', '[default]\ncli_timestamp_format = wire\n')
        self.driver = create_clidriver()
        expected_time = 0

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)

    def test_default(self):
        self.driver = create_clidriver()
        expected_time = datetime.datetime.fromtimestamp(0).replace(
            tzinfo=tzlocal()).isoformat()

        stdout, _, _ = self.run_cmd(self.command)
        json_response = json.loads(stdout)
        start_time = json_response["builds"][0]["startTime"]
        self.assertEqual(expected_time, start_time)
Exemplo n.º 17
0
class TestAPIVersions(BaseAWSCommandParamsTest):
    def setUp(self):
        super(TestAPIVersions, self).setUp()
        self.files = FileCreator()
        # We just pick ec2 because it is a service that actually has
        # multiple api versions.
        self.service_name = 'ec2'
        self.api_version = '2014-10-01'
        config_contents = ('[default]\n'
                           'api_versions =\n'
                           '    %s = %s\n' %
                           (self.service_name, self.api_version))
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'myconfig', config_contents)
        self.driver = create_clidriver()

    def tearDown(self):
        super(TestAPIVersions, self).tearDown()
        self.files.remove_all()

    def test_command_send_correct_api_version(self):
        cmdline = 'ec2 describe-instances'
        self.run_cmd(cmdline)
        # Make sure that the correct api version is used for the client
        # by checking the version that was sent in the request.
        self.assertEqual(self.last_params['Version'], self.api_version)

    def test_command_interface_reflects_api_version(self):
        # Take an arbitrary command such as describe-nat-gateways that is not
        # in the 2014-10-01 EC2 API version and make sure its CLI command
        # interface is not available as well.
        cmdline = 'ec2 describe-nat-gateways'
        _, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
        self.assertIn("Invalid choice: 'describe-nat-gateways'", stderr)
Exemplo n.º 18
0
class TestAPIVersions(BaseAWSCommandParamsTest):
    def setUp(self):
        super(TestAPIVersions, self).setUp()
        self.files = FileCreator()
        # We just pick ec2 because it is a service that actually has
        # multiple api versions.
        self.service_name = 'ec2'
        self.api_version = '2014-10-01'
        config_contents = (
            '[default]\n'
            'api_versions =\n'
            '    %s = %s\n' % (self.service_name, self.api_version)
        )
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'myconfig', config_contents)
        self.driver = create_clidriver()

    def tearDown(self):
        super(TestAPIVersions, self).tearDown()
        self.files.remove_all()

    def test_command_send_correct_api_version(self):
        cmdline = 'ec2 describe-instances'
        self.run_cmd(cmdline)
        # Make sure that the correct api version is used for the client
        # by checking the version that was sent in the request.
        self.assertEqual(self.last_params['Version'], self.api_version)

    def test_command_interface_reflects_api_version(self):
        # Take an arbitrary command such as describe-nat-gateways that is not
        # in the 2014-10-01 EC2 API version and make sure its CLI command
        # interface is not available as well.
        cmdline = 'ec2 describe-nat-gateways'
        _, stderr, _ = self.run_cmd(cmdline, expected_rc=2)
        self.assertIn("Invalid choice: 'describe-nat-gateways'", stderr)
Exemplo n.º 19
0
class TestArgsResolution(BaseAWSCommandParamsTest):

    def setUp(self):
        super(TestArgsResolution, self).setUp()
        self.files = FileCreator()
        config_contents = (
            '[profile bar]\n'
            'region = us-west-2\n'
        )
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'myconfig', config_contents)
        self.driver = create_clidriver()

    def tearDown(self):
        super(TestArgsResolution, self).tearDown()
        self.files.remove_all()

    def test_profile_resolution_order(self):
        self.environ['AWS_PROFILE'] = 'foo'
        self.parsed_responses = [{"Reservations": []}]
        self.run_cmd('--profile bar ec2 describe-instances', expected_rc=0)
        self.assertEqual(self.driver.session.profile, 'bar')

    def test_can_get_version_with_non_existent_profile(self):
        self.environ['AWS_PROFILE'] = 'foo'
        # ProfileNotFound exception shouldn't be raised
        self.run_cmd('--version', expected_rc=0)
Exemplo n.º 20
0
class TestArgumentHelpers(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_only_validates_filename_when_set(self):
        resolve_given_outfile_path(None)

    def test_works_with_valid_filename(self):
        filename = self.files.create_file('valid', '')
        self.assertEqual(filename, resolve_given_outfile_path(filename))

    def test_works_with_relative_filename(self):
        filename = '../valid'
        self.assertEqual(filename, resolve_given_outfile_path(filename))

    def test_raises_when_cannot_write_to_file(self):
        filename = os.sep.join(['_path', 'not', '_exist_', 'file.xyz'])
        with self.assertRaises(ValueError):
            resolve_given_outfile_path(filename)

    def test_checks_if_valid_result(self):
        result = {'ResponseMetadata': {'HTTPStatusCode': 200}}
        self.assertTrue(is_parsed_result_successful(result))

    def test_checks_if_invalid_result(self):
        result = {'ResponseMetadata': {'HTTPStatusCode': 300}}
        self.assertFalse(is_parsed_result_successful(result))
Exemplo n.º 21
0
class TestSyncCommand(BaseAWSCommandParamsTest):

    prefix = 's3 sync '

    def setUp(self):
        super(TestSyncCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestSyncCommand, self).tearDown()
        self.files.remove_all()

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, self.files.rootdir, 'http://someserver')
        self.parsed_responses = [{
            "CommonPrefixes": [],
            "Contents": []
        }, {
            'ETag': '"c8afdb36c52cf4727836669019e69222"'
        }]
        self.run_cmd(cmdline, expected_rc=0)

        # The only operations we should have called are ListObjects/PutObject.
        self.assertEqual(len(self.operations_called), 2,
                         self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'ListObjects')
        self.assertEqual(self.operations_called[1][0].name, 'PutObject')
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address when uploading the object
        self.assertEqual(
            self.operations_called[1][1]['website_redirect_location'],
            'http://someserver')
Exemplo n.º 22
0
class TestArgumentHelpers(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_only_validates_filename_when_set(self):
        resolve_given_outfile_path(None)

    def test_works_with_valid_filename(self):
        filename = self.files.create_file("valid", "")
        self.assertEquals(filename, resolve_given_outfile_path(filename))

    def test_works_with_relative_filename(self):
        filename = "../valid"
        self.assertEquals(filename, resolve_given_outfile_path(filename))

    def test_raises_when_cannot_write_to_file(self):
        filename = os.sep.join(["_path", "not", "_exist_", "file.xyz"])
        with self.assertRaises(ValueError):
            resolve_given_outfile_path(filename)

    def test_checks_if_valid_result(self):
        result = {"ResponseMetadata": {"HTTPStatusCode": 200}}
        self.assertTrue(is_parsed_result_successful(result))

    def test_checks_if_invalid_result(self):
        result = {"ResponseMetadata": {"HTTPStatusCode": 300}}
        self.assertFalse(is_parsed_result_successful(result))
Exemplo n.º 23
0
class TestShow(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.environ = os.environ.copy()
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'config', ('[default]\n'
                       'cli_history = enabled'))
        self.environ['AWS_DEFAULT_PROFILE'] = 'default'
        self.environ['AWS_DEFAULT_REGION'] = 'us-west-2'
        self.environ['AWS_CLI_HISTORY_FILE'] = os.path.join(
            self.files.rootdir, 'history.db')

    def tearDown(self):
        self.files.remove_all()

    def remove_color(self, output):
        return re.compile(r'\x1b[^m]*m').sub('', output)

    def assert_contains_in_order(self, lines, contents):
        current_pos = 0
        prev_line = None
        for line in lines:
            self.assertIn(line, contents)
            new_pos = contents.find(line)
            if new_pos < current_pos:
                self.fail('Line: "%s" should have came after line: "%s"' %
                          (line, prev_line))
            prev_line = line
            current_pos = new_pos

    def test_show(self):
        # Make a call that does not require credentials just in case the
        # user was using the config file to provide credentials.
        cmd = 'sts assume-role-with-saml '
        cmd += '--role-arn  arn:aws:iam::...:invalid '
        cmd += '--principal-arn  arn:aws:iam::...:invalid  '
        cmd += '--saml-assertion fake-assertion'
        aws(cmd, env_vars=self.environ)
        # Now run the show command and make sure the general output is all
        # there.
        result = aws('history show', env_vars=self.environ)
        uncolored_content = self.remove_color(result.stdout)

        self.assert_contains_in_order([
            'AWS CLI command entered', 'with AWS CLI version: aws-cli/',
            "with arguments: ['sts', 'assume-role-with-saml',",
            '[0] API call made', 'to service: sts',
            'using operation: AssumeRoleWithSAML', 'with parameters: {',
            '    "PrincipalArn": "arn:aws:iam::...:invalid",',
            '    "RoleArn": "arn:aws:iam::...:invalid",',
            '    "SAMLAssertion": "fake-assertion"', '[0] HTTP request sent',
            'to URL: https://sts.amazonaws.com/', 'with method: POST',
            'with body: Action=AssumeRoleWithSAML&Version=2011-06-15',
            '[0] HTTP response received', 'with status code: 400',
            'with body: <?xml version="1.0" ?>', '[0] HTTP response parsed',
            'parsed to: {', '    "Error": {', 'AWS CLI command exited',
            'with return code: 255'
        ], uncolored_content)
Exemplo n.º 24
0
class TestParamFile(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def get_paramfile(self, path):
        return get_paramfile(path, LOCAL_PREFIX_MAP.copy())

    def test_text_file(self):
        contents = 'This is a test'
        filename = self.files.create_file('foo', contents)
        prefixed_filename = 'file://' + filename
        data = self.get_paramfile(prefixed_filename)
        self.assertEqual(data, contents)
        self.assertIsInstance(data, six.string_types)

    def test_binary_file(self):
        contents = 'This is a test'
        filename = self.files.create_file('foo', contents)
        prefixed_filename = 'fileb://' + filename
        data = self.get_paramfile(prefixed_filename)
        self.assertEqual(data, b'This is a test')
        self.assertIsInstance(data, six.binary_type)

    @skip_if_windows('Binary content error only occurs '
                     'on non-Windows platforms.')
    def test_cannot_load_text_file(self):
        contents = b'\xbfX\xac\xbe'
        filename = self.files.create_file('foo', contents, mode='wb')
        prefixed_filename = 'file://' + filename
        with self.assertRaises(ResourceLoadingError):
            self.get_paramfile(prefixed_filename)

    def test_file_does_not_exist_raises_error(self):
        with self.assertRaises(ResourceLoadingError):
            self.get_paramfile('file://file/does/not/existsasdf.txt')

    def test_no_match_uris_returns_none(self):
        self.assertIsNone(self.get_paramfile('foobar://somewhere.bar'))

    def test_non_string_type_returns_none(self):
        self.assertIsNone(self.get_paramfile(100))
Exemplo n.º 25
0
class TestAliasCommandInjector(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.alias_file = self.files.create_file("alias", "[toplevel]\n")
        self.alias_loader = AliasLoader(self.alias_file)
        self.session = mock.Mock(spec=Session)
        self.alias_cmd_injector = AliasCommandInjector(self.session, self.alias_loader)
        self.command_table = {}
        self.parser = MainArgParser(
            command_table=self.command_table, version_string="version", description="description", argument_table={}
        )

    def tearDown(self):
        self.files.remove_all()

    def test_service_alias_command(self):
        with open(self.alias_file, "a+") as f:
            f.write("my-alias = my-alias-value\n")

        self.alias_cmd_injector.inject_aliases(self.command_table, self.parser)
        self.assertIn("my-alias", self.command_table)
        self.assertIsInstance(self.command_table["my-alias"], ServiceAliasCommand)

    def test_external_alias_command(self):
        with open(self.alias_file, "a+") as f:
            f.write("my-alias = !my-alias-value\n")

        self.alias_cmd_injector.inject_aliases(self.command_table, self.parser)
        self.assertIn("my-alias", self.command_table)
        self.assertIsInstance(self.command_table["my-alias"], ExternalAliasCommand)

    def test_clobbers_builtins(self):
        builtin_cmd = mock.Mock(spec=CLICommand)
        self.command_table["builtin"] = builtin_cmd

        with open(self.alias_file, "a+") as f:
            f.write("builtin = my-alias-value\n")

        self.alias_cmd_injector.inject_aliases(self.command_table, self.parser)
        self.assertIn("builtin", self.command_table)
        self.assertIsInstance(self.command_table["builtin"], ServiceAliasCommand)

    def test_shadow_proxy_command(self):
        builtin_cmd = mock.Mock(spec=CLICommand)
        builtin_cmd.name = "builtin"
        self.command_table["builtin"] = builtin_cmd

        with open(self.alias_file, "a+") as f:
            f.write("builtin = builtin\n")

        self.alias_cmd_injector.inject_aliases(self.command_table, self.parser)

        self.command_table["builtin"]([], FakeParsedArgs(command="builtin"))
        # The builtin command should be passed to the alias
        # command when added to the table.
        builtin_cmd.assert_called_with([], FakeParsedArgs(command="builtin"))
Exemplo n.º 26
0
class TestPlugins(BaseCLIDriverTest):
    def setUp(self):
        super(TestPlugins, self).setUp()
        self.files = FileCreator()
        self.plugins_site_packages = os.path.join(self.files.rootdir,
                                                  'site-packages')
        self.plugin_module_name = 'add_awscli_cmd_plugin'
        self.plugin_filename = os.path.join(os.path.dirname(__file__),
                                            self.plugin_module_name) + '.py'
        self.setup_plugin_site_packages()

    def setup_plugin_site_packages(self):
        os.makedirs(self.plugins_site_packages)
        shutil.copy(self.plugin_filename, self.plugins_site_packages)

    def tearDown(self):
        super(TestPlugins, self).tearDown()
        self.files.remove_all()

    def assert_plugin_loaded(self, clidriver):
        self.assertIn('plugin-test-cmd', clidriver.subcommand_table)

    def assert_plugin_not_loaded(self, clidriver):
        self.assertNotIn('plugin-test-cmd', clidriver.subcommand_table)

    def create_config(self, config_contents):
        config_file = self.files.create_file('config', config_contents)
        self.environ['AWS_CONFIG_FILE'] = config_file

    def test_plugins_loaded_from_specified_path(self):
        self.create_config(
            '[plugins]\n'
            'cli_legacy_plugin_path = %s\n'
            'myplugin = %s\n' %
            (self.plugins_site_packages, self.plugin_module_name))
        clidriver = create_clidriver()
        self.assert_plugin_loaded(clidriver)

    def test_plugins_are_not_loaded_when_path_specified(self):
        self.create_config('[plugins]\n'
                           'myplugin = %s\n' % self.plugin_module_name)
        clidriver = create_clidriver()
        self.assert_plugin_not_loaded(clidriver)

    def test_looks_in_all_specified_paths(self):
        nonexistent_dir = os.path.join(self.files.rootdir, 'no-exist')
        plugin_path = os.pathsep.join(
            [nonexistent_dir, self.plugins_site_packages])
        self.create_config('[plugins]\n'
                           'cli_legacy_plugin_path = %s\n'
                           'myplugin = %s\n' %
                           (plugin_path, self.plugin_module_name))
        clidriver = create_clidriver()
        self.assert_plugin_loaded(clidriver)
Exemplo n.º 27
0
class TestCreateFunction(BaseAWSCommandParamsTest):

    prefix = 'lambda create-function'

    def setUp(self):
        super(TestCreateFunction, self).setUp()

        # Make a temporary file
        self.files = FileCreator()
        self.contents_of_file = 'myzipcontents'
        self.temp_file = self.files.create_file('foo', self.contents_of_file)

    def tearDown(self):
        super(TestCreateFunction, self).tearDown()
        self.files.remove_all()

    def test_create_function(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler --zip-file myzip'
        result = {
            'FunctionName': 'myfunction',
            'Runtime': 'myruntime',
            'Role': 'myrole',
            'Handler': 'myhandler',
            'Code': {
                'ZipFile': 'myzip'
            }
        }
        self.assert_params_for_cmd(cmdline, result)

    def test_create_function_with_file(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler'
        cmdline += ' --zip-file file://%s' % self.temp_file
        result = {
            'FunctionName': 'myfunction',
            'Runtime': 'myruntime',
            'Role': 'myrole',
            'Handler': 'myhandler',
            'Code': {
                'ZipFile': self.contents_of_file
            }
        }
        self.assert_params_for_cmd(cmdline, result)

    def test_create_function_code_argument_cause_error(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler --zip-file myzip'
        cmdline += ' --code mycode'
        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)
        self.assertIn('Unknown options: --code', stderr)
Exemplo n.º 28
0
class BaseRekognitionTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(BaseRekognitionTest, self).setUp()
        self.files = FileCreator()
        self.temp_file = self.files.create_file('foo', 'mycontents')
        with open(self.temp_file, 'rb') as f:
            self.temp_file_bytes = f.read()

    def tearDown(self):
        super(BaseRekognitionTest, self).tearDown()
        self.files.remove_all()
Exemplo n.º 29
0
class TestIsReadable(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.filename = 'foo'
        self.full_path = os.path.join(self.files.rootdir, self.filename)

    def tearDown(self):
        self.files.remove_all()

    def test_unreadable_file(self):
        self.files.create_file(self.filename, contents="foo")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            self.assertFalse(is_readable(self.full_path))

    def test_unreadable_directory(self):
        os.mkdir(self.full_path)
        with mock.patch('os.listdir') as mock_class:
            mock_class.side_effect = OSError()
            self.assertFalse(is_readable(self.full_path))
Exemplo n.º 30
0
class TestIsReadable(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.filename = 'foo'
        self.full_path = os.path.join(self.files.rootdir, self.filename)

    def tearDown(self):
        self.files.remove_all()

    def test_unreadable_file(self):
        self.files.create_file(self.filename, contents="foo")
        open_function = 'awscli.customizations.s3.filegenerator._open'
        with mock.patch(open_function) as mock_class:
            mock_class.side_effect = OSError()
            self.assertFalse(is_readable(self.full_path))

    def test_unreadable_directory(self):
        os.mkdir(self.full_path)
        with mock.patch('os.listdir') as mock_class:
            mock_class.side_effect = OSError()
            self.assertFalse(is_readable(self.full_path))
Exemplo n.º 31
0
class BaseRekognitionTest(BaseAWSCommandParamsTest):
    def setUp(self):
        super(BaseRekognitionTest, self).setUp()
        self.files = FileCreator()
        self.temp_file = self.files.create_file(
            'foo', 'mycontents')
        with open(self.temp_file, 'rb') as f:
            self.temp_file_bytes = f.read()

    def tearDown(self):
        super(BaseRekognitionTest, self).tearDown()
        self.files.remove_all()
Exemplo n.º 32
0
class TestParamFile(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_text_file(self):
        contents = "This is a test"
        filename = self.files.create_file("foo", contents)
        prefixed_filename = "file://" + filename
        data = get_paramfile(prefixed_filename)
        self.assertEqual(data, contents)
        self.assertIsInstance(data, six.string_types)

    def test_binary_file(self):
        contents = "This is a test"
        filename = self.files.create_file("foo", contents)
        prefixed_filename = "fileb://" + filename
        data = get_paramfile(prefixed_filename)
        self.assertEqual(data, b"This is a test")
        self.assertIsInstance(data, six.binary_type)

    @skip_if_windows("Binary content error only occurs " "on non-Windows platforms.")
    def test_cannot_load_text_file(self):
        contents = b"\xbfX\xac\xbe"
        filename = self.files.create_file("foo", contents, mode="wb")
        prefixed_filename = "file://" + filename
        with self.assertRaises(ResourceLoadingError):
            get_paramfile(prefixed_filename)

    def test_file_does_not_exist_raises_error(self):
        with self.assertRaises(ResourceLoadingError):
            get_paramfile("file://file/does/not/existsasdf.txt")

    def test_no_match_uris_returns_none(self):
        self.assertIsNone(get_paramfile("foobar://somewhere.bar"))

    def test_non_string_type_returns_none(self):
        self.assertIsNone(get_paramfile(100))
Exemplo n.º 33
0
 def setUp(self):
     super(TestDeployCommand, self).setUp()
     # setup required values
     files = FileCreator()
     self.task_def_file = files.create_file('taskDef.json',
                                            json.dumps(
                                                self.TASK_DEFINITION_JSON),
                                            mode='w')
     self.appspec_file = files.create_file('appspec.yaml',
                                           self.YAML_APPSPEC,
                                           mode='w')
     self.appspec_file_json = files.create_file('appspec.json',
                                                self.JSON_APPSPEC,
                                                mode='w')
     self.service_name = 'serviceTest'
     self.service_arn = 'arn:aws:ecs:::service/serviceTest'
     # setup default optional values
     self.cluster_name = 'default'
     self.cluster_arn = 'arn:aws:ecs:::cluster/default'
     self.application_name = get_app_name(self.service_name,
                                          self.cluster_name, None)
     self.deployment_group_name = get_deploy_group_name(
         self.service_name, self.cluster_name, None)
     # setup test response resources
     self.missing_properties_appspec = files.create_file('appspec_bad.yaml',
                                                         self.BAD_APPSPEC,
                                                         mode='w')
     self.task_definition_arn = \
         'arn:aws:ecs::1234567890:task-definition\\test:2'
     self.deployment_id = 'd-1234567XX'
     self.mock_deployer = CodeDeployer(None, self.APPSPEC_DICT)
     self.mock_deployer.update_task_def_arn(self.task_definition_arn)
     self.expected_stdout = ("Successfully registered new ECS task "
                             "definition " + self.task_definition_arn + "\n"
                             "Successfully created deployment " +
                             self.deployment_id + "\n"
                             "Waiting for " + self.deployment_id +
                             " to succeed...\nSuccessfully deployed " +
                             self.task_definition_arn + " to service '" +
                             self.service_name + "'\n")
Exemplo n.º 34
0
class TestDeployCommand(BaseAWSCommandParamsTest):
    def setUp(self):
        super(TestDeployCommand, self).setUp()
        self.files = FileCreator()
        self.parsed_responses = [
            # First it checks to see if a stack with that name exists. So
            # we fake a response indicating that the stack exists and is in
            # an OK state.
            {
                'Stacks': {
                    'StackName': 'Stack',
                    'StackStatus': 'UPDATE_COMPLETE'
                }
            },
            # Now it creates a changeset, so we fake a response with an ID.
            {
                'Id': 'FakeChangeSetId'
            },
            # This fakes a failed response from the waiter because the
            # changeset was empty.
            {
                'StackName':
                'Stack',
                'Status':
                'FAILED',
                'StatusReason':
                ('The submitted information didn\'t contain changes. '
                 'Submit different information to create a change set.'),
                'ExecutionStatus':
                'UNAVAILABLE'
            },
        ]
        # The template is inspected before we make any of the calls so it
        # needs to have valid JSON content.
        path = self.files.create_file('template.json', '{}')
        self.command = ('cloudformation deploy --template-file %s '
                        '--stack-name Stack') % path

    def tearDown(self):
        self.files.remove_all()
        super(TestDeployCommand, self).tearDown()

    def test_does_return_zero_exit_code_on_empty_changeset_by_default(self):
        self.run_cmd(self.command, expected_rc=0)

    def test_does_return_zero_exit_code_on_empty_changeset(self):
        self.command += ' --no-fail-on-empty-changeset'
        self.run_cmd(self.command, expected_rc=0)

    def test_does_return_non_zero_exit_code_on_empty_changeset(self):
        self.command += ' --fail-on-empty-changeset'
        self.run_cmd(self.command, expected_rc=255)
Exemplo n.º 35
0
class TestParamFile(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()

    def tearDown(self):
        self.files.remove_all()

    def test_text_file(self):
        contents = 'This is a test'
        filename = self.files.create_file('foo', contents)
        prefixed_filename = 'file://' + filename
        data = get_paramfile(prefixed_filename)
        self.assertEqual(data, contents)
        self.assertIsInstance(data, six.string_types)

    def test_binary_file(self):
        contents = 'This is a test'
        filename = self.files.create_file('foo', contents)
        prefixed_filename = 'fileb://' + filename
        data = get_paramfile(prefixed_filename)
        self.assertEqual(data, b'This is a test')
        self.assertIsInstance(data, six.binary_type)
Exemplo n.º 36
0
class TestCreateFunction(BaseAWSCommandParamsTest):

    prefix = 'lambda create-function'

    def setUp(self):
        super(TestCreateFunction, self).setUp()

        # Make a temporary file
        self.files = FileCreator()
        self.contents_of_file = 'myzipcontents'
        self.temp_file = self.files.create_file(
            'foo', self.contents_of_file)

    def tearDown(self):
        super(TestCreateFunction, self).tearDown()
        self.files.remove_all()

    def test_create_function(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler --zip-file myzip'
        result = {
            'FunctionName': 'myfunction',
            'Runtime': 'myruntime',
            'Role': 'myrole',
            'Handler': 'myhandler',
            'Code': {'ZipFile': 'myzip'}
        }
        self.assert_params_for_cmd(cmdline, result)

    def test_create_function_with_file(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler'
        cmdline += ' --zip-file file://%s' % self.temp_file
        result = {
            'FunctionName': 'myfunction',
            'Runtime': 'myruntime',
            'Role': 'myrole',
            'Handler': 'myhandler',
            'Code': {'ZipFile': self.contents_of_file}
        }
        self.assert_params_for_cmd(cmdline, result)

    def test_create_function_code_argument_cause_error(self):
        cmdline = self.prefix
        cmdline += ' --function-name myfunction --runtime myruntime'
        cmdline += ' --role myrole --handler myhandler --zip-file myzip'
        cmdline += ' --code mycode'
        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)
        self.assertIn('Unknown options: --code', stderr)
Exemplo n.º 37
0
class BaseLambdaTests(BaseAWSCommandParamsTest):
    def setUp(self):
        super(BaseLambdaTests, self).setUp()
        self.files = FileCreator()
        self.temp_file = self.files.create_file('foo', 'mycontents')
        self.zip_file = os.path.join(self.files.rootdir, 'foo.zip')
        with closing(zipfile.ZipFile(self.zip_file, 'w')) as f:
            f.write(self.temp_file)
        with open(self.zip_file, 'rb') as f:
            self.zip_file_contents = f.read()

    def tearDown(self):
        super(BaseLambdaTests, self).tearDown()
        self.files.remove_all()
Exemplo n.º 38
0
class BaseLambdaTests(BaseAWSCommandParamsTest):
    def setUp(self):
        super(BaseLambdaTests, self).setUp()
        self.files = FileCreator()
        self.temp_file = self.files.create_file("foo", "mycontents")
        self.zip_file = os.path.join(self.files.rootdir, "foo.zip")
        with closing(zipfile.ZipFile(self.zip_file, "w")) as f:
            f.write(self.temp_file)
        with open(self.zip_file, "rb") as f:
            self.zip_file_contents = f.read()

    def tearDown(self):
        super(BaseLambdaTests, self).tearDown()
        self.files.remove_all()
Exemplo n.º 39
0
class TestAliases(BaseAWSHelpOutputTest):
    def setUp(self):
        super(TestAliases, self).setUp()
        self.files = FileCreator()
        self.alias_file = self.files.create_file('alias', '[toplevel]\n')
        self.driver.alias_loader = AliasLoader(self.alias_file)

    def tearDown(self):
        super(TestAliases, self).tearDown()
        self.files.remove_all()

    def add_alias(self, alias_name, alias_value):
        with open(self.alias_file, 'a+') as f:
            f.write('%s = %s\n' % (alias_name, alias_value))

    def test_alias_not_in_main_help(self):
        self.add_alias('my-alias', 'ec2 describe-regions')
        self.driver.main(['help'])
        self.assert_not_contains('my-alias')
Exemplo n.º 40
0
class TestAliases(BaseAWSHelpOutputTest):
    def setUp(self):
        super(TestAliases, self).setUp()
        self.files = FileCreator()
        self.alias_file = self.files.create_file('alias', '[toplevel]\n')
        self.driver.alias_loader = AliasLoader(self.alias_file)

    def tearDown(self):
        super(TestAliases, self).tearDown()
        self.files.remove_all()

    def add_alias(self, alias_name, alias_value):
        with open(self.alias_file, 'a+') as f:
            f.write('%s = %s\n' % (alias_name, alias_value))

    def test_alias_not_in_main_help(self):
        self.add_alias('my-alias', 'ec2 describe-regions')
        self.driver.main(['help'])
        self.assert_not_contains('my-alias')
Exemplo n.º 41
0
class TestDescribeVolumes(BaseAWSCommandParamsTest):

    prefix = 'ec2 describe-volumes'

    def setUp(self):
        super(TestDescribeVolumes, self).setUp()
        self.file_creator = FileCreator()

    def tearDown(self):
        super(TestDescribeVolumes, self).tearDown()
        shutil.rmtree(self.file_creator.rootdir)

    def test_max_results_set_by_default(self):
        command = self.prefix
        params = {'MaxResults': 1000}
        self.assert_params_for_cmd(command, params)

    def test_max_results_not_set_with_volume_ids(self):
        command = self.prefix + ' --volume-ids id-volume'
        params = {'VolumeIds': ['id-volume']}
        self.assert_params_for_cmd(command, params)

    def test_max_results_not_set_with_filter(self):
        command = self.prefix + ' --filters Name=volume-id,Values=id-volume'
        params = {'Filters': [{'Name': 'volume-id', 'Values': ['id-volume']}]}
        self.assert_params_for_cmd(command, params)

    def test_max_results_not_overwritten(self):
        command = self.prefix + ' --max-results 5'
        params = {'MaxResults': 5}
        self.assert_params_for_cmd(command, params)

        command = self.prefix + ' --page-size 5'
        self.assert_params_for_cmd(command, params)

    def test_max_results_with_cli_input_json(self):
        params = {'VolumeIds': ['vol-12345']}
        file_path = self.file_creator.create_file(
            'params.json', json.dumps(params))

        command = self.prefix + ' --cli-input-json file://%s' % file_path
        self.assert_params_for_cmd(command, params)
Exemplo n.º 42
0
class TestTopicDocumentEventHandlerBase(unittest.TestCase):
    def setUp(self):
        self.session = mock.Mock()
        self.file_creator = FileCreator()

        self.tags_dict = {}

        # Make a temporary json index to base information on
        self.json_index = self.file_creator.create_file('index.json', '')
        with open(self.json_index, 'w') as f:
            json.dump(self.tags_dict, f, indent=4, sort_keys=True)

        self.index_patch = mock.patch('awscli.topictags.TopicTagDB.index_file',
                                      self.json_index)
        self.dir_patch = mock.patch('awscli.topictags.TopicTagDB.topic_dir',
                                    self.file_creator.rootdir)
        self.index_patch.start()
        self.dir_patch.start()

    def tearDown(self):
        self.dir_patch.stop()
        self.index_patch.stop()
        self.file_creator.remove_all()
Exemplo n.º 43
0
class TestSyncCommand(BaseAWSCommandParamsTest):

    prefix = 's3 sync '

    def setUp(self):
        super(TestSyncCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestSyncCommand, self).tearDown()
        self.files.remove_all()

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, self.files.rootdir, 'http://someserver')
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": []},
            {'ETag': '"c8afdb36c52cf4727836669019e69222"'}
        ]
        self.run_cmd(cmdline, expected_rc=0)

        # The only operations we should have called are ListObjects/PutObject.
        self.assertEqual(len(self.operations_called), 2, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'ListObjects')
        self.assertEqual(self.operations_called[1][0].name, 'PutObject')
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address when uploading the object
        self.assertEqual(
            self.operations_called[1][1]['website_redirect_location'],
            'http://someserver'
        )

    def test_no_recursive_option(self):
        cmdline = '. s3://mybucket --recursive'
        # Return code will be 2 for invalid parameter ``--recursive``
        self.run_cmd(cmdline, expected_rc=2)
Exemplo n.º 44
0
class TestMvCommand(BaseAWSCommandParamsTest):

    prefix = 's3 mv '

    def setUp(self):
        super(TestMvCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestMvCommand, self).tearDown()
        self.files.remove_all()

    def test_cant_mv_object_onto_itself(self):
        cmdline = '%s s3://bucket/key s3://bucket/key' % self.prefix
        stderr = self.run_cmd(cmdline, expected_rc=255)[1]
        self.assertIn('Cannot mv a file onto itself', stderr)

    def test_cant_mv_object_with_implied_name(self):
        # The "key" key name is implied in the dst argument.
        cmdline = '%s s3://bucket/key s3://bucket/' % self.prefix
        stderr = self.run_cmd(cmdline, expected_rc=255)[1]
        self.assertIn('Cannot mv a file onto itself', stderr)

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, full_path, 'http://someserver')
        self.parsed_responses = [{
            'ETag': '"c8afdb36c52cf4727836669019e69222"'
        }]
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address.
        self.assertEqual(
            self.operations_called[0][1]['website_redirect_location'],
            'http://someserver')
Exemplo n.º 45
0
class TestMvCommand(BaseAWSCommandParamsTest):

    prefix = 's3 mv '

    def setUp(self):
        super(TestMvCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestMvCommand, self).tearDown()
        self.files.remove_all()

    def test_cant_mv_object_onto_itself(self):
        cmdline = '%s s3://bucket/key s3://bucket/key' % self.prefix
        stderr = self.run_cmd(cmdline, expected_rc=255)[1]
        self.assertIn('Cannot mv a file onto itself', stderr)

    def test_cant_mv_object_with_implied_name(self):
        # The "key" key name is implied in the dst argument.
        cmdline = '%s s3://bucket/key s3://bucket/' % self.prefix
        stderr = self.run_cmd(cmdline, expected_rc=255)[1]
        self.assertIn('Cannot mv a file onto itself', stderr)

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, full_path, 'http://someserver')
        self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address.
        self.assertEqual(
            self.operations_called[0][1]['website_redirect_location'],
            'http://someserver'
        )
Exemplo n.º 46
0
class TestValidateDirectory(unittest.TestCase):
    def setUp(self):
        self.file_creator = FileCreator()
        self.dir_root = self.file_creator.rootdir

    def tearDown(self):
        self.file_creator.remove_all()

    def test_directory_contains_single_file(self):
        self.file_creator.create_file("foo", "")
        self.assertTrue(validate_directory(self.dir_root))

    def test_directory_contains_file_and_empty_directory(self):
        dirname = os.path.join(self.dir_root, "foo")
        os.makedirs(dirname)
        self.file_creator.create_file("bar", "")
        self.assertTrue(validate_directory(self.dir_root))

    def test_nested_file(self):
        self.file_creator.create_file("mydir/bar", "")
        self.assertTrue(validate_directory(self.dir_root))

    def test_empty_directory(self):
        self.assertFalse(validate_directory(self.dir_root))

    def test_nonexistent_directory(self):
        dir_not_exist = os.path.join(self.dir_root, "does_not_exist")
        self.assertFalse(validate_directory(dir_not_exist))

    def test_nonprovided_directory(self):
        self.assertFalse(validate_directory(""))

    def test_empty_nested_directory(self):
        dirname = os.path.join(self.dir_root, "foo")
        os.makedirs(dirname)
        self.assertFalse(validate_directory(self.dir_root))
Exemplo n.º 47
0
class TestValidateDirectory(unittest.TestCase):
    def setUp(self):
        self.file_creator = FileCreator()
        self.dir_root = self.file_creator.rootdir

    def tearDown(self):
        self.file_creator.remove_all()

    def test_directory_contains_single_file(self):
        self.file_creator.create_file('foo', '')
        self.assertTrue(validate_directory(self.dir_root))

    def test_directory_contains_file_and_empty_directory(self):
        dirname = os.path.join(self.dir_root, 'foo')
        os.makedirs(dirname)
        self.file_creator.create_file('bar', '')
        self.assertTrue(validate_directory(self.dir_root))

    def test_nested_file(self):
        self.file_creator.create_file('mydir/bar', '')
        self.assertTrue(validate_directory(self.dir_root))

    def test_empty_directory(self):
        self.assertFalse(validate_directory(self.dir_root))

    def test_nonexistent_directory(self):
        dir_not_exist = os.path.join(self.dir_root, 'does_not_exist')
        self.assertFalse(validate_directory(dir_not_exist))

    def test_nonprovided_directory(self):
        self.assertFalse(validate_directory(''))

    def test_empty_nested_directory(self):
        dirname = os.path.join(self.dir_root, 'foo')
        os.makedirs(dirname)
        self.assertFalse(validate_directory(self.dir_root))
Exemplo n.º 48
0
class TestUploadBuild(BaseAWSCommandParamsTest):

    prefix = 'gamelift upload-build'

    def setUp(self):
        super(TestUploadBuild, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestUploadBuild, self).tearDown()
        self.files.remove_all()

    def test_upload_build(self):
        self.files.create_file('tmpfile', 'Some contents')
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir

        self.parsed_responses = [{
            'Build': {
                'BuildId': 'myid'
            }
        }, {
            'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'
            },
            'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'
            }
        }, {}]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=0)

        # First the build is created.
        self.assertEqual(len(self.operations_called), 3)
        self.assertEqual(self.operations_called[0][0].name, 'CreateBuild')
        self.assertEqual(self.operations_called[0][1], {
            'Name': 'mybuild',
            'Version': 'myversion'
        })

        # Second the credentials are requested.
        self.assertEqual(self.operations_called[1][0].name,
                         'RequestUploadCredentials')
        self.assertEqual(self.operations_called[1][1], {'BuildId': 'myid'})

        # The build is then uploaded to S3.
        self.assertEqual(self.operations_called[2][0].name, 'PutObject')
        self.assertEqual(self.operations_called[2][1], {
            'Body': mock.ANY,
            'Bucket': 'mybucket',
            'Key': 'mykey'
        })

        # Check the output of the command.
        self.assertIn(
            'Successfully uploaded %s to AWS GameLift' % self.files.rootdir,
            stdout)
        self.assertIn('Build ID: myid', stdout)

    def test_upload_build_with_operating_system_param(self):
        self.files.create_file('tmpfile', 'Some contents')
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir
        cmdline += ' --operating-system WINDOWS_2012'

        self.parsed_responses = [{
            'Build': {
                'BuildId': 'myid'
            }
        }, {
            'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'
            },
            'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'
            }
        }, {}]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=0)

        # First the build is created.
        self.assertEqual(len(self.operations_called), 3)
        self.assertEqual(self.operations_called[0][0].name, 'CreateBuild')
        self.assertEqual(
            self.operations_called[0][1], {
                'Name': 'mybuild',
                'Version': 'myversion',
                'OperatingSystem': 'WINDOWS_2012'
            })

        # Second the credentials are requested.
        self.assertEqual(self.operations_called[1][0].name,
                         'RequestUploadCredentials')
        self.assertEqual(self.operations_called[1][1], {'BuildId': 'myid'})

        # The build is then uploaded to S3.
        self.assertEqual(self.operations_called[2][0].name, 'PutObject')
        self.assertEqual(self.operations_called[2][1], {
            'Body': mock.ANY,
            'Bucket': 'mybucket',
            'Key': 'mykey'
        })

        # Check the output of the command.
        self.assertIn(
            'Successfully uploaded %s to AWS GameLift' % self.files.rootdir,
            stdout)
        self.assertIn('Build ID: myid', stdout)

    def test_upload_build_with_empty_directory(self):
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir

        self.parsed_responses = [{
            'Build': {
                'BuildId': 'myid'
            }
        }, {
            'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'
            },
            'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'
            }
        }, {}]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n' %
            self.files.rootdir, stderr)

    def test_upload_build_with_nonexistent_directory(self):
        dir_not_exist = os.path.join(self.files.rootdir, 'does_not_exist')

        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % dir_not_exist

        self.parsed_responses = [{
            'Build': {
                'BuildId': 'myid'
            }
        }, {
            'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'
            },
            'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'
            }
        }, {}]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n' %
            dir_not_exist, stderr)

    def test_upload_build_with_nonprovided_directory(self):
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % '""'

        self.parsed_responses = [{
            'Build': {
                'BuildId': 'myid'
            }
        }, {
            'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'
            },
            'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'
            }
        }, {}]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n' % '""',
            stderr)
Exemplo n.º 49
0
class TestLocalDeleteRequestSubmitter(BaseTransferRequestSubmitterTest):
    def setUp(self):
        super(TestLocalDeleteRequestSubmitter, self).setUp()
        self.transfer_request_submitter = LocalDeleteRequestSubmitter(
            self.transfer_manager, self.result_queue, self.cli_params)
        self.file_creator = FileCreator()

    def tearDown(self):
        super(TestLocalDeleteRequestSubmitter, self).tearDown()
        self.file_creator.remove_all()

    def test_can_submit(self):
        fileinfo = FileInfo(src=self.filename,
                            dest=None,
                            operation_name='delete',
                            src_type='local')
        self.assertTrue(self.transfer_request_submitter.can_submit(fileinfo))
        fileinfo.operation_name = 'foo'
        self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))

    def test_cannot_submit_remote_deletes(self):
        fileinfo = FileInfo(src=self.filename,
                            dest=None,
                            operation_name='delete',
                            src_type='s3')
        self.assertFalse(self.transfer_request_submitter.can_submit(fileinfo))

    def test_submit(self):
        full_filename = self.file_creator.create_file(self.filename, 'content')
        fileinfo = FileInfo(src=full_filename,
                            dest=None,
                            operation_name='delete',
                            src_type='local')
        rval = self.transfer_request_submitter.submit(fileinfo)
        self.assertTrue(rval)

        queued_result = self.result_queue.get()
        self.assertIsInstance(queued_result, QueuedResult)
        self.assertEqual(queued_result.transfer_type, 'delete')
        self.assertTrue(queued_result.src.endswith(self.filename))
        self.assertIsNone(queued_result.dest)
        self.assertEqual(queued_result.total_transfer_size, 0)

        failure_result = self.result_queue.get()
        self.assertIsInstance(failure_result, SuccessResult)
        self.assertEqual(failure_result.transfer_type, 'delete')
        self.assertTrue(failure_result.src.endswith(self.filename))
        self.assertIsNone(failure_result.dest)

        self.assertFalse(os.path.exists(full_filename))

    def test_submit_with_exception(self):
        fileinfo = FileInfo(src=self.filename,
                            dest=None,
                            operation_name='delete',
                            src_type='local')
        # The file was never created so it should trigger an exception
        # when it is attempted to be deleted in the submitter.
        rval = self.transfer_request_submitter.submit(fileinfo)
        self.assertTrue(rval)

        queued_result = self.result_queue.get()
        self.assertIsInstance(queued_result, QueuedResult)
        self.assertEqual(queued_result.transfer_type, 'delete')
        self.assertTrue(queued_result.src.endswith(self.filename))
        self.assertIsNone(queued_result.dest)
        self.assertEqual(queued_result.total_transfer_size, 0)

        failure_result = self.result_queue.get()
        self.assertIsInstance(failure_result, FailureResult)
        self.assertEqual(failure_result.transfer_type, 'delete')
        self.assertTrue(failure_result.src.endswith(self.filename))
        self.assertIsNone(failure_result.dest)

    def test_dry_run(self):
        self.cli_params['dryrun'] = True
        fileinfo = FileInfo(src=self.filename,
                            src_type='local',
                            dest=self.filename,
                            dest_type='local',
                            operation_name='delete')
        self.transfer_request_submitter.submit(fileinfo)

        result = self.result_queue.get()
        self.assertIsInstance(result, DryRunResult)
        self.assertEqual(result.transfer_type, 'delete')
        self.assertTrue(result.src.endswith(self.filename))
        self.assertIsNone(result.dest)
Exemplo n.º 50
0
class TestSymlinksIgnoreFiles(unittest.TestCase):
    """
    This class tests the ability to list out the correct local files
    depending on if symlinks are being followed.  Also tests to ensure
    broken symlinks fail.
    """
    def setUp(self):
        self.session = FakeSession()
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        self.files = FileCreator()
        # List of local filenames.
        self.filenames = []
        self.root = self.files.rootdir
        self.bucket = 'bucket/'
        filename_1 = self.files.create_file('foo.txt',
                                            contents='foo.txt contents')
        self.filenames.append(filename_1)
        nested_dir = os.path.join(self.root, 'realfiles')
        os.mkdir(nested_dir)
        filename_2 = self.files.create_file(os.path.join(nested_dir,
                                                         'bar.txt'),
                                            contents='bar.txt contents')
        self.filenames.append(filename_2)
        # Names of symlinks.
        self.symlinks = []
        # Names of files if symlinks are followed.
        self.symlink_files = []
        # Create symlink to file foo.txt.
        symlink_1 = os.path.join(self.root, 'symlink_1')
        os.symlink(filename_1, symlink_1)
        self.symlinks.append(symlink_1)
        self.symlink_files.append(symlink_1)
        # Create a symlink to a file that does not exist.
        symlink_2 = os.path.join(self.root, 'symlink_2')
        os.symlink('non-existent-file', symlink_2)
        self.symlinks.append(symlink_2)
        # Create a symlink to directory realfiles
        symlink_3 = os.path.join(self.root, 'symlink_3')
        os.symlink(nested_dir, symlink_3)
        self.symlinks.append(symlink_3)
        self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))

    def tearDown(self):
        self.files.remove_all()

    def test_no_follow_symlink(self):
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', False).call(input_local_dir)
        self.filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(self.filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(self.filenames[i]))
            self.assertEqual(result_list[i], filename)

    def test_warn_bad_symlink(self):
        """
        This tests to make sure it fails when following bad symlinks.
        """
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        file_gen = FileGenerator(self.service, self.endpoint, '', True)
        file_stats = file_gen.call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
        self.assertFalse(file_gen.result_queue.empty())

    def test_follow_symlink(self):
        # First remove the bad symlink.
        os.remove(os.path.join(self.root, 'symlink_2'))
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {'src': {'path': abs_root,
                                   'type': 'local'},
                           'dest': {'path': self.bucket,
                                    'type': 's3'},
                           'dir_op': True, 'use_src_name': True}
        file_stats = FileGenerator(self.service, self.endpoint,
                                   '', True).call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
Exemplo n.º 51
0
class BaseHistoryCommandParamsTest(BaseAWSCommandParamsTest):
    def setUp(self):
        history_recorder = self._make_clean_history_recorder()
        super(BaseHistoryCommandParamsTest, self).setUp()
        self.history_recorder = history_recorder
        self.files = FileCreator()
        config_contents = (
            '[default]\n'
            'cli_history = enabled'
        )
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'config', config_contents)
        self.environ['AWS_CLI_HISTORY_FILE'] = self.files.create_file(
            'history.db', '')
        self.driver = create_clidriver()
        # The run_cmd patches stdout with a StringIO object (similar to what
        # nose does). Therefore it will run into issues when
        # get_binary_stdout is called because it returns sys.stdout.buffer
        # for Py3 and StringIO does not have a buffer
        self.binary_stdout_patch = mock.patch(
            'awscli.utils.get_binary_stdout')
        mock_get_binary_stdout = self.binary_stdout_patch.start()
        self.binary_stdout = BytesIO()
        mock_get_binary_stdout.return_value = self.binary_stdout

    def _make_clean_history_recorder(self):
        # This is to ensure that for each new test run the CLI is using
        # a brand new HistoryRecorder as this is global so previous test
        # runs could have injected handlers onto it as all of the tests
        # are ran in the same process.
        history_recorder = HistoryRecorder()

        # The HISTORY_RECORDER is instantiated on module import before we
        # doing any patching which means we cannot simply patch
        # botocore.get_global_history_recorder as the objects are already
        # instantiated as so we have to individually patch each one of these...
        self._apply_history_recorder_patch(
            'awscli.clidriver', history_recorder)
        self._apply_history_recorder_patch(
            'awscli.customizations.history', history_recorder)
        return history_recorder

    def _apply_history_recorder_patch(self, module, history_recorder):
        patch_history_recorder = mock.patch(
            module + '.HISTORY_RECORDER', history_recorder)
        patch_history_recorder.start()
        self.addCleanup(patch_history_recorder.stop)

    def _cleanup_db_connections(self):
        # Reaching into private data to close out the database connection.
        # Windows won't let us delete the tempdir until these connections are
        # closed in the tearDown step and we have no other way of forcing
        # them to close.
        handlers = self.history_recorder._handlers
        for handler in handlers:
            handler._writer.close()

    def tearDown(self):
        super(BaseHistoryCommandParamsTest, self).tearDown()
        self._cleanup_db_connections()
        self.files.remove_all()
        self.binary_stdout_patch.stop()
Exemplo n.º 52
0
class TestSymlinksIgnoreFiles(unittest.TestCase):
    """
    This class tests the ability to list out the correct local files
    depending on if symlinks are being followed.  Also tests to ensure
    broken symlinks fail.
    """
    def setUp(self):
        self.client = None
        self.files = FileCreator()
        # List of local filenames.
        self.filenames = []
        self.root = self.files.rootdir
        self.bucket = 'bucket/'
        filename_1 = self.files.create_file('foo.txt',
                                            contents='foo.txt contents')
        self.filenames.append(filename_1)
        nested_dir = os.path.join(self.root, 'realfiles')
        os.mkdir(nested_dir)
        filename_2 = self.files.create_file(os.path.join(
            nested_dir, 'bar.txt'),
                                            contents='bar.txt contents')
        self.filenames.append(filename_2)
        # Names of symlinks.
        self.symlinks = []
        # Names of files if symlinks are followed.
        self.symlink_files = []
        # Create symlink to file foo.txt.
        symlink_1 = os.path.join(self.root, 'symlink_1')
        os.symlink(filename_1, symlink_1)
        self.symlinks.append(symlink_1)
        self.symlink_files.append(symlink_1)
        # Create a symlink to a file that does not exist.
        symlink_2 = os.path.join(self.root, 'symlink_2')
        os.symlink('non-existent-file', symlink_2)
        self.symlinks.append(symlink_2)
        # Create a symlink to directory realfiles
        symlink_3 = os.path.join(self.root, 'symlink_3')
        os.symlink(nested_dir, symlink_3)
        self.symlinks.append(symlink_3)
        self.symlink_files.append(os.path.join(symlink_3, 'bar.txt'))

    def tearDown(self):
        self.files.remove_all()

    def test_no_follow_symlink(self):
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {
            'src': {
                'path': abs_root,
                'type': 'local'
            },
            'dest': {
                'path': self.bucket,
                'type': 's3'
            },
            'dir_op': True,
            'use_src_name': True
        }
        file_stats = FileGenerator(self.client, '',
                                   False).call(input_local_dir)
        self.filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(self.filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(self.filenames[i]))
            self.assertEqual(result_list[i], filename)

    def test_warn_bad_symlink(self):
        """
        This tests to make sure it fails when following bad symlinks.
        """
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {
            'src': {
                'path': abs_root,
                'type': 'local'
            },
            'dest': {
                'path': self.bucket,
                'type': 's3'
            },
            'dir_op': True,
            'use_src_name': True
        }
        file_stats = FileGenerator(self.client, '', True).call(input_local_dir)
        file_gen = FileGenerator(self.client, '', True)
        file_stats = file_gen.call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
        self.assertFalse(file_gen.result_queue.empty())

    def test_follow_symlink(self):
        # First remove the bad symlink.
        os.remove(os.path.join(self.root, 'symlink_2'))
        abs_root = six.text_type(os.path.abspath(self.root) + os.sep)
        input_local_dir = {
            'src': {
                'path': abs_root,
                'type': 'local'
            },
            'dest': {
                'path': self.bucket,
                'type': 's3'
            },
            'dir_op': True,
            'use_src_name': True
        }
        file_stats = FileGenerator(self.client, '', True).call(input_local_dir)
        all_filenames = self.filenames + self.symlink_files
        all_filenames.sort()
        result_list = []
        for file_stat in file_stats:
            result_list.append(getattr(file_stat, 'src'))
        self.assertEqual(len(result_list), len(all_filenames))
        # Just check to make sure the right local files are generated.
        for i in range(len(result_list)):
            filename = six.text_type(os.path.abspath(all_filenames[i]))
            self.assertEqual(result_list[i], filename)
Exemplo n.º 53
0
class TestShow(unittest.TestCase):
    def setUp(self):
        self.files = FileCreator()
        self.environ = os.environ.copy()
        self.environ['AWS_CONFIG_FILE'] = self.files.create_file(
            'config', (
                '[default]\n'
                'cli_history = enabled'
            )
        )
        self.environ['AWS_DEFAULT_PROFILE'] = 'default'
        self.environ['AWS_DEFAULT_REGION'] = 'us-west-2'
        self.environ['AWS_CLI_HISTORY_FILE'] = os.path.join(
            self.files.rootdir, 'history.db')

    def tearDown(self):
        self.files.remove_all()

    def remove_color(self, output):
        return re.compile(r'\x1b[^m]*m').sub('', output)

    def assert_contains_in_order(self, lines, contents):
        current_pos = 0
        prev_line = None
        for line in lines:
            self.assertIn(line, contents)
            new_pos = contents.find(line)
            if new_pos < current_pos:
                self.fail('Line: "%s" should have came after line: "%s"' % (
                    line, prev_line))
            prev_line = line
            current_pos = new_pos

    def test_show(self):
        # Make a call that does not require credentials just in case the
        # user was using the config file to provide credentials.
        cmd = 'sts assume-role-with-saml '
        cmd += '--role-arn  arn:aws:iam::...:invalid '
        cmd += '--principal-arn  arn:aws:iam::...:invalid  '
        cmd += '--saml-assertion fake-assertion'
        aws(cmd, env_vars=self.environ)
        # Now run the show command and make sure the general output is all
        # there.
        result = aws('history show', env_vars=self.environ)
        uncolored_content = self.remove_color(result.stdout)

        self.assert_contains_in_order(
            [
                'AWS CLI command entered',
                'with AWS CLI version: aws-cli/',
                "with arguments: ['sts', 'assume-role-with-saml',",
                '[0] API call made',
                'to service: sts',
                'using operation: AssumeRoleWithSAML',
                'with parameters: {',
                '    "PrincipalArn": "arn:aws:iam::...:invalid",',
                '    "RoleArn": "arn:aws:iam::...:invalid",',
                '    "SAMLAssertion": "fake-assertion"',
                '[0] HTTP request sent',
                'to URL: https://sts.amazonaws.com/',
                'with method: POST',
                'with body: Action=AssumeRoleWithSAML&Version=2011-06-15',
                '[0] HTTP response received',
                'with status code: 400',
                'with body: <?xml version="1.0" ?>',
                '[0] HTTP response parsed',
                'parsed to: {',
                '    "Error": {',
                'AWS CLI command exited',
                'with return code: 255'
            ],
            uncolored_content
        )
Exemplo n.º 54
0
class TestSyncCommand(BaseAWSCommandParamsTest):

    prefix = 's3 sync '

    def setUp(self):
        super(TestSyncCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestSyncCommand, self).tearDown()
        self.files.remove_all()

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, self.files.rootdir, 'http://someserver')
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": []},
            {'ETag': '"c8afdb36c52cf4727836669019e69222"'}
        ]
        self.run_cmd(cmdline, expected_rc=0)

        # The only operations we should have called are ListObjects/PutObject.
        self.assertEqual(len(self.operations_called), 2, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'ListObjects')
        self.assertEqual(self.operations_called[1][0].name, 'PutObject')
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address when uploading the object
        self.assertEqual(
            self.operations_called[1][1]['WebsiteRedirectLocation'],
            'http://someserver'
        )

    def test_no_recursive_option(self):
        cmdline = '. s3://mybucket --recursive'
        # Return code will be 2 for invalid parameter ``--recursive``
        self.run_cmd(cmdline, expected_rc=2)

    def test_sync_from_non_existant_directory(self):
        non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')
        cmdline = '%s %s s3://bucket/' % (self.prefix, non_existant_directory)
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": []}
        ]
        _, stderr, _ = self.run_cmd(cmdline, expected_rc=255)
        self.assertIn('does not exist', stderr)

    def test_sync_to_non_existant_directory(self):
        key = 'foo.txt'
        non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')
        cmdline = '%s s3://bucket/ %s' % (self.prefix, non_existant_directory)
        self.parsed_responses = [
            {"CommonPrefixes": [], "Contents": [
                {"Key": key, "Size": 3,
                 "LastModified": "2014-01-09T20:45:49.000Z"}]},
            {'ETag': '"c8afdb36c52cf4727836669019e69222-"',
             'Body': six.BytesIO(b'foo')}
        ]
        self.run_cmd(cmdline, expected_rc=0)
        # Make sure the file now exists.
        self.assertTrue(
            os.path.exists(os.path.join(non_existant_directory, key)))
Exemplo n.º 55
0
class TestCPCommand(BaseAWSCommandParamsTest):

    prefix = 's3 cp '

    def setUp(self):
        super(TestCPCommand, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestCPCommand, self).tearDown()
        self.files.remove_all()

    def test_operations_used_in_upload(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
        self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # The only operation we should have called is PutObject.
        self.assertEqual(len(self.operations_called), 1, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')

    def test_key_name_added_when_only_bucket_provided(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/' % (self.prefix, full_path)
        self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # The only operation we should have called is PutObject.
        self.assertEqual(len(self.operations_called), 1, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
        self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')

    def test_trailing_slash_appended(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        # Here we're saying s3://bucket instead of s3://bucket/
        # This should still work the same as if we added the trailing slash.
        cmdline = '%s %s s3://bucket' % (self.prefix, full_path)
        self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # The only operation we should have called is PutObject.
        self.assertEqual(len(self.operations_called), 1, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        self.assertEqual(self.operations_called[0][1]['Key'], 'foo.txt')
        self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')

    def test_upload_grants(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = ('%s %s s3://bucket/key.txt --grants read=id=foo '
                   'full=id=bar readacl=id=biz writeacl=id=baz' %
                   (self.prefix, full_path))
        self.parsed_responses = \
            [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # The only operation we should have called is PutObject.
        self.assertEqual(len(self.operations_called), 1,
                         self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        self.assertDictEqual(
            self.operations_called[0][1],
            {'Key': u'key.txt', 'Bucket': u'bucket', 'GrantRead': u'id=foo',
             'GrantFullControl': u'id=bar', 'GrantReadACP': u'id=biz',
             'GrantWriteACP': u'id=baz', 'ContentType': u'text/plain',
             'Body': mock.ANY}
        )

    def test_upload_expires(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = ('%s %s s3://bucket/key.txt --expires 90' %
                   (self.prefix, full_path))
        self.parsed_responses = \
            [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # The only operation we should have called is PutObject.
        self.assertEqual(len(self.operations_called), 1,
                         self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        self.assertEqual(self.operations_called[0][1]['Key'], 'key.txt')
        self.assertEqual(self.operations_called[0][1]['Bucket'], 'bucket')
        self.assertEqual(self.operations_called[0][1]['Expires'], '90')

    def test_operations_used_in_download_file(self):
        self.parsed_responses = [
            {"ContentLength": "100", "LastModified": "00:00:00Z"},
            {'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')},
        ]
        cmdline = '%s s3://bucket/key.txt %s' % (self.prefix,
                                                 self.files.rootdir)
        self.run_cmd(cmdline, expected_rc=0)
        # The only operations we should have called are HeadObject/GetObject.
        self.assertEqual(len(self.operations_called), 2, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
        self.assertEqual(self.operations_called[1][0].name, 'GetObject')

    def test_operations_used_in_recursive_download(self):
        self.parsed_responses = [
            {'ETag': '"foo-1"', 'Contents': [], 'CommonPrefixes': []},
        ]
        cmdline = '%s s3://bucket/key.txt %s --recursive' % (
            self.prefix, self.files.rootdir)
        self.run_cmd(cmdline, expected_rc=0)
        # We called ListObjects but had no objects to download, so
        # we only have a single ListObjects operation being called.
        self.assertEqual(len(self.operations_called), 1, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'ListObjects')

    def test_website_redirect_ignore_paramfile(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt --website-redirect %s' % \
            (self.prefix, full_path, 'http://someserver')
        self.parsed_responses = [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        # Make sure that the specified web address is used as opposed to the
        # contents of the web address.
        self.assertEqual(
            self.operations_called[0][1]['WebsiteRedirectLocation'],
            'http://someserver'
        )

    def test_metadata_directive_copy(self):
        self.parsed_responses = [
            {"ContentLength": "100", "LastModified": "00:00:00Z"},
            {'ETag': '"foo-1"'},
        ]
        cmdline = ('%s s3://bucket/key.txt s3://bucket/key2.txt'
                   ' --metadata-directive REPLACE' % self.prefix)
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(len(self.operations_called), 2,
                         self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'HeadObject')
        self.assertEqual(self.operations_called[1][0].name, 'CopyObject')
        self.assertEqual(self.operations_called[1][1]['MetadataDirective'],
                         'REPLACE')

    def test_no_metadata_directive_for_non_copy(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket --metadata-directive REPLACE' % \
            (self.prefix, full_path)
        self.parsed_responses = \
            [{'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(len(self.operations_called), 1,
                         self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'PutObject')
        self.assertNotIn('MetadataDirective', self.operations_called[0][1])

    def test_cp_succeeds_with_mimetype_errors(self):
        full_path = self.files.create_file('foo.txt', 'mycontent')
        cmdline = '%s %s s3://bucket/key.txt' % (self.prefix, full_path)
        self.parsed_responses = [
            {'ETag': '"c8afdb36c52cf4727836669019e69222"'}]
        with mock.patch('mimetypes.guess_type') as mock_guess_type:
            # This should throw a UnicodeDecodeError.
            mock_guess_type.side_effect = lambda x: b'\xe2'.decode('ascii')
            self.run_cmd(cmdline, expected_rc=0)
        # Because of the decoding error the command should have succeeded
        # just that there was no content type added.
        self.assertNotIn('ContentType', self.last_kwargs)
Exemplo n.º 56
0
class TestUploadBuild(BaseAWSCommandParamsTest):

    prefix = 'gamelift upload-build'

    def setUp(self):
        super(TestUploadBuild, self).setUp()
        self.files = FileCreator()

    def tearDown(self):
        super(TestUploadBuild, self).tearDown()
        self.files.remove_all()

    def test_upload_build(self):
        self.files.create_file('tmpfile', 'Some contents')
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir

        self.parsed_responses = [
            {'Build': {'BuildId': 'myid'}},
            {'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'},
             'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'}},
            {}
        ]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=0)

        # First the build is created.
        self.assertEqual(len(self.operations_called), 3)
        self.assertEqual(self.operations_called[0][0].name, 'CreateBuild')
        self.assertEqual(
            self.operations_called[0][1],
            {'Name': 'mybuild', 'Version': 'myversion'}
        )

        # Second the credentials are requested.
        self.assertEqual(
            self.operations_called[1][0].name, 'RequestUploadCredentials')
        self.assertEqual(
            self.operations_called[1][1], {'BuildId': 'myid'})

        # The build is then uploaded to S3.
        self.assertEqual(self.operations_called[2][0].name, 'PutObject')
        self.assertEqual(
            self.operations_called[2][1],
            {'Body': mock.ANY, 'Bucket': 'mybucket', 'Key': 'mykey'}
        )

        # Check the output of the command.
        self.assertIn(
            'Successfully uploaded %s to AWS GameLift' % self.files.rootdir,
            stdout)
        self.assertIn('Build ID: myid', stdout)

    def test_upload_build_with_operating_system_param(self):
        self.files.create_file('tmpfile', 'Some contents')
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir
        cmdline += ' --operating-system WINDOWS_2012'

        self.parsed_responses = [
            {'Build': {'BuildId': 'myid'}},
            {'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'},
             'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'}},
            {}
        ]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=0)

        # First the build is created.
        self.assertEqual(len(self.operations_called), 3)
        self.assertEqual(self.operations_called[0][0].name, 'CreateBuild')
        self.assertEqual(
            self.operations_called[0][1],
            {'Name': 'mybuild', 'Version': 'myversion',
             'OperatingSystem': 'WINDOWS_2012'}
        )

        # Second the credentials are requested.
        self.assertEqual(
            self.operations_called[1][0].name, 'RequestUploadCredentials')
        self.assertEqual(
            self.operations_called[1][1], {'BuildId': 'myid'})

        # The build is then uploaded to S3.
        self.assertEqual(self.operations_called[2][0].name, 'PutObject')
        self.assertEqual(
            self.operations_called[2][1],
            {'Body': mock.ANY, 'Bucket': 'mybucket', 'Key': 'mykey'}
        )

        # Check the output of the command.
        self.assertIn(
            'Successfully uploaded %s to AWS GameLift' % self.files.rootdir,
            stdout)
        self.assertIn('Build ID: myid', stdout)

    def test_upload_build_with_empty_directory(self):
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % self.files.rootdir

        self.parsed_responses = [
            {'Build': {'BuildId': 'myid'}},
            {'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'},
             'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'}},
            {}
            ]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n'
            % self.files.rootdir,
            stderr)

    def test_upload_build_with_nonexistent_directory(self):
        dir_not_exist = os.path.join(self.files.rootdir, 'does_not_exist')

        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % dir_not_exist

        self.parsed_responses = [
            {'Build': {'BuildId': 'myid'}},
            {'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'},
             'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'}},
            {}
            ]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n'
            % dir_not_exist,
            stderr)

    def test_upload_build_with_nonprovided_directory(self):
        cmdline = self.prefix
        cmdline += ' --name mybuild --build-version myversion'
        cmdline += ' --build-root %s' % '""'

        self.parsed_responses = [
            {'Build': {'BuildId': 'myid'}},
            {'StorageLocation': {
                'Bucket': 'mybucket',
                'Key': 'mykey'},
             'UploadCredentials': {
                'AccessKeyId': 'myaccesskey',
                'SecretAccessKey': 'mysecretkey',
                'SessionToken': 'mytoken'}},
            {}
            ]

        stdout, stderr, rc = self.run_cmd(cmdline, expected_rc=255)

        self.assertIn(
            'Fail to upload %s. '
            'The build root directory is empty or does not exist.\n'
            % '""',
            stderr)