def setUp(self): super(TestCodeArtifactLogin, self).setUp() self.file_creator = FileCreator() self.test_pypi_rc_path = self.file_creator.full_path('pypirc') if not os.path.isdir(os.path.dirname(self.test_pypi_rc_path)): os.makedirs(os.path.dirname(self.test_pypi_rc_path)) self.domain = 'domain' self.domain_owner = 'domain-owner' self.repository = 'repository' self.auth_token = 'auth-token' self.namespace = 'namespace' self.duration = 3600 self.expiration = time.time() + self.duration self.expiration_as_datetime = parse_timestamp(self.expiration) self.pypi_rc_path_patch = mock.patch( 'awscli.customizations.codeartifact.login.TwineLogin' '.get_pypi_rc_path' ) self.pypi_rc_path_mock = self.pypi_rc_path_patch.start() self.pypi_rc_path_mock.return_value = self.test_pypi_rc_path self.subprocess_patch = mock.patch('subprocess.check_call') self.subprocess_mock = self.subprocess_patch.start()
def setUp(self): self.region = 'us-east-1' self.system_patcher = mock.patch('platform.system') self.system = self.system_patcher.start() self.system.return_value = 'Linux' self.linux_distribution_patcher = mock.patch('awscli.compat.linux_distribution') self.linux_distribution = self.linux_distribution_patcher.start() self.linux_distribution.return_value = ('Ubuntu', '', '') self.urlopen_patcher = mock.patch( 'awscli.customizations.codedeploy.utils.urlopen' ) self.urlopen = self.urlopen_patcher.start() self.urlopen.side_effect = timeout('Not EC2 instance') self.geteuid_patcher = mock.patch('os.geteuid', create=True) self.geteuid = self.geteuid_patcher.start() self.geteuid.return_value = 0 self.remove_patcher = mock.patch('os.remove') self.remove = self.remove_patcher.start() self.args = Namespace() self.globals = Namespace() self.globals.region = self.region self.session = mock.MagicMock() self.uninstall = Uninstall(self.session)
def setUp(self): self.file_creator = FileCreator() self.test_pypi_rc_path = self.file_creator.full_path('pypirc') if not os.path.isdir(os.path.dirname(self.test_pypi_rc_path)): os.makedirs(os.path.dirname(self.test_pypi_rc_path)) self.domain = 'domain' self.domain_owner = 'domain-owner' self.repository = 'repository' self.auth_token = 'auth-token' self.namespace = 'namespace' self.nuget_index_url_fmt = '{endpoint}v3/index.json' self.nuget_source_name = self.domain + '/' + self.repository self.duration = 3600 self.expiration = time.time() + self.duration self.expiration_as_datetime = parse_timestamp(self.expiration) self.pypi_rc_path_patch = mock.patch( 'awscli.customizations.codeartifact.login.TwineLogin' '.get_pypi_rc_path') self.pypi_rc_path_mock = self.pypi_rc_path_patch.start() self.pypi_rc_path_mock.return_value = self.test_pypi_rc_path self.subprocess_patch = mock.patch('subprocess.check_call') self.subprocess_mock = self.subprocess_patch.start() self.subprocess_check_output_patch = mock.patch( 'subprocess.check_output') self.subprocess_check_out_mock = \ self.subprocess_check_output_patch.start() self.cli_runner = CLIRunner()
def setUp(self): self.iam_user_arn = 'arn:aws:iam::012345678912:user/AWS/CodeDeploy/foo' self.region = 'us-east-1' self.arg_name = 's3-location' self.bucket = 'bucket' self.key = 'key' self.system_patcher = mock.patch('platform.system') self.system = self.system_patcher.start() self.system.return_value = 'Linux' self.linux_distribution_patcher = mock.patch( 'awscli.compat.linux_distribution') self.linux_distribution = self.linux_distribution_patcher.start() self.linux_distribution.return_value = ('Ubuntu', '', '') self.urlopen_patcher = mock.patch( 'awscli.customizations.codedeploy.utils.urlopen') self.urlopen = self.urlopen_patcher.start() self.urlopen.side_effect = timeout('Not EC2 instance') self.globals = mock.MagicMock() self.session = mock.MagicMock() self.params = Namespace() self.params.session = self.session
def setUp(self): self.session = FakeSession() self.emitter = mock.Mock() self.emitter.emit.return_value = [] self.stdout = six.StringIO() self.stderr = six.StringIO() self.stdout_patch = mock.patch('sys.stdout', self.stdout) #self.stdout_patch.start() self.stderr_patch = mock.patch('sys.stderr', self.stderr) self.stderr_patch.start()
def setUp(self): self.popen_patcher = mock.patch('subprocess.Popen') self.popen = self.popen_patcher.start() self.check_call_patcher = mock.patch('subprocess.check_call') self.check_call = self.check_call_patcher.start() self.open_patcher = mock.patch( 'awscli.customizations.codedeploy.systems.open', mock.mock_open(), create=True) self.open = self.open_patcher.start() self.environ_patcher = mock.patch('os.environ') self.environ = self.environ_patcher.start() self.environ.copy.return_value = dict() self.config_dir = '/etc/codedeploy-agent/conf' self.config_file = 'codedeploy.onpremises.yml' self.config_path = '{0}/{1}'.format(self.config_dir, self.config_file) self.installer = 'install' self.bucket = 'bucket' self.key = 'key' self.region = 'us-east-1' self.access_key_id = 'ACCESSKEYID' self.secret_access_key = 'SECRETACCESSKEY' self.session_token = 'SESSION_TOKEN' self.credentials = mock.MagicMock() self.credentials.access_key = self.access_key_id self.credentials.secret_key = self.secret_access_key self.credentials.token = self.session_token self.environment = dict({ 'AWS_REGION': self.region, 'AWS_ACCESS_KEY_ID': self.access_key_id, 'AWS_SECRET_ACCESS_KEY': self.secret_access_key, 'AWS_SESSION_TOKEN': self.session_token }) self.body = 'install-script' self.reader = mock.MagicMock() self.reader.read.return_value = self.body self.s3 = mock.MagicMock() self.s3.get_object.return_value = {'Body': self.reader} self.session = mock.MagicMock() self.session.create_client.return_value = self.s3 self.session.get_credentials.return_value = self.credentials self.params = Namespace() self.params.session = self.session self.params.region = self.region self.params.bucket = self.bucket self.params.key = self.key
def setUp(self): super(TestLogoutCommand, self).setUp() self.token_cache_dir = self.files.full_path('token-cache') self.token_cache_dir_patch = mock.patch( 'awscli.customizations.sso.logout.SSO_TOKEN_DIR', self.token_cache_dir) self.token_cache_dir_patch.start() self.aws_creds_cache_dir = self.files.full_path('aws-creds-cache') self.aws_creds_cache_dir_patch = mock.patch( 'awscli.customizations.sso.logout.AWS_CREDS_CACHE_DIR', self.aws_creds_cache_dir) self.aws_creds_cache_dir_patch.start()
def test_can_patch_env(self): # The various edge case are tested in original_ld_library_path, # we're just checking that we're integrating everything together # correctly. env = {'LD_LIBRARY_PATH': '/foo'} with mock.patch('os.environ', env): with mock.patch('webbrowser.open_new_tab') as open_new_tab: captured_env = {} open_new_tab.side_effect = lambda x: captured_env.update( os.environ) open_browser_with_original_ld_path('http://example.com') self.assertIsNone(captured_env.get('LD_LIBRARY_PATH'))
def setUp(self): super(TestLoginCommand, self).setUp() self.token_cache_dir = self.files.full_path('token-cache') self.token_cache_dir_patch = mock.patch( 'awscli.customizations.sso.utils.SSO_TOKEN_DIR', self.token_cache_dir ) self.token_cache_dir_patch.start() self.open_browser_mock = mock.Mock(spec=OpenBrowserHandler) self.open_browser_patch = mock.patch( 'awscli.customizations.sso.utils.OpenBrowserHandler', self.open_browser_mock, ) self.open_browser_patch.start()
def test_rb_command_with_force_deletes_objects_in_bucket(self): with mock.patch(self.cmd_name) as rm_command: with mock.patch(self.arch_name): # RmCommand returns an RmCommand instance whose __call__ # should be the RC of the command. # In this case we'll have it return an RC of 0 which indicates # success. rm_command.return_value.return_value = 0 self.rb_command._run_main(self.parsed_args, parsed_globals=self.parsed_globals) # Because of --force we should have called the # rm_command with the --recursive option. rm_command.return_value.assert_called_with( ['s3://mybucket/', '--recursive'], mock.ANY)
def setUp(self): self.region = 'us-east-1' self.config_file = 'config-file' self.installer = 'install' self.bucket = 'aws-codedeploy-{0}'.format(self.region) self.key = 'latest/{0}'.format(self.installer) self.agent_installer = 's3://{0}/{1}'.format(self.bucket, self.key) self.system_patcher = mock.patch('platform.system') self.system = self.system_patcher.start() self.system.return_value = 'Linux' self.linux_distribution_patcher = mock.patch( 'awscli.compat.linux_distribution') self.linux_distribution = self.linux_distribution_patcher.start() self.linux_distribution.return_value = ('Ubuntu', '', '') self.urlopen_patcher = mock.patch( 'awscli.customizations.codedeploy.utils.urlopen') self.urlopen = self.urlopen_patcher.start() self.urlopen.side_effect = timeout('Not EC2 instance') self.geteuid_patcher = mock.patch('os.geteuid', create=True) self.geteuid = self.geteuid_patcher.start() self.geteuid.return_value = 0 self.isfile_patcher = mock.patch('os.path.isfile') self.isfile = self.isfile_patcher.start() self.isfile.return_value = False self.makedirs_patcher = mock.patch('os.makedirs') self.makedirs = self.makedirs_patcher.start() self.copyfile_patcher = mock.patch('shutil.copyfile') self.copyfile = self.copyfile_patcher.start() self.open_patcher = mock.patch( 'awscli.customizations.codedeploy.systems.open', mock.mock_open(), create=True) self.open = self.open_patcher.start() self.args = Namespace() self.args.override_config = False self.args.config_file = self.config_file self.args.agent_installer = None self.globals = Namespace() self.globals.region = self.region self.body = 'install-script' self.reader = mock.MagicMock() self.reader.read.return_value = self.body self.s3 = mock.MagicMock() self.s3.get_object.return_value = {'Body': self.reader} self.session = mock.MagicMock() self.session.create_client.return_value = self.s3 self.install = Install(self.session)
def setUp(self): self.create_client_patch = mock.patch("botocore.session.Session.create_client") self.mock_create_client = self.create_client_patch.start() self.session = get_session() self.client = mock.Mock() self.mock_create_client.return_value = self.client self.cmd = GetGameSessionLogCommand(self.session) self.contents = b"mycontents" self.file_creator = FileCreator() self.urlopen_patch = mock.patch("awscli.customizations.gamelift.getlog.urlopen") self.urlopen_mock = self.urlopen_patch.start() self.urlopen_mock.return_value = six.BytesIO(self.contents)
def setUp(self): self.create_client_patch = mock.patch("botocore.session.Session.create_client") self.mock_create_client = self.create_client_patch.start() self.session = get_session() self.gamelift_client = mock.Mock() self.s3_client = mock.Mock() self.mock_create_client.side_effect = [self.gamelift_client, self.s3_client] self.file_creator = FileCreator() self.upload_file_patch = mock.patch("awscli.customizations.gamelift.uploadbuild.S3Transfer.upload_file") self.upload_file_mock = self.upload_file_patch.start() self.cmd = UploadBuildCommand(self.session) self._setup_input_output()
def test_subscribe_when_bucket_needs_to_be_created(self): # TODO: fix this patch when we have a better way to stub out responses with mock.patch('botocore.endpoint.Endpoint._send') as \ http_session_send_patch: # Mock for HeadBucket request head_bucket_response = mock.Mock() head_bucket_response.status_code = 404 head_bucket_response.content = b'' head_bucket_response.headers = {} # Mock for CreateBucket request create_bucket_response = mock.Mock() create_bucket_response.status_code = 200 create_bucket_response.content = b'' create_bucket_response.headers = {} http_session_send_patch.side_effect = [ head_bucket_response, create_bucket_response ] s3_client = self.driver.session.create_client('s3') bucket_helper = S3BucketHelper(s3_client) bucket_helper.prepare_bucket('mybucket') send_call_list = http_session_send_patch.call_args_list self.assertEqual(send_call_list[0][0][0].method, 'HEAD') # Since the HeadObject fails with 404, the CreateBucket which is # is a PUT request should be made. self.assertEqual(send_call_list[1][0][0].method, 'PUT')
def test_multiple_configuration_recorders(self): status = { 'name': 'default', 'recording': True, 'lastStatus': 'SUCCESS' } self.recorder_status.append(status) status = { 'name': 'default', 'recording': True, 'lastStatus': 'FAILURE', 'lastErrorCode': '500', 'lastErrorMessage': 'This is the error' } self.recorder_status.append(status) status = {'name': 'default', 'recording': False} self.recorder_status.append(status) expected_output = ('Configuration Recorders:\n\n' 'name: default\n' 'recorder: ON\n' 'last status: SUCCESS\n\n' 'name: default\n' 'recorder: ON\n' 'last status: FAILURE\n' 'error code: 500\n' 'message: This is the error\n\n' 'name: default\n' 'recorder: OFF\n\n' 'Delivery Channels:\n\n') with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: self.cmd._run_main(self.parsed_args, self.parsed_globals) self.assertEqual(expected_output, mock_stdout.getvalue())
def test_error_message_when_directory_is_empty(self): with mock.patch("sys.stderr", six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), "Fail to upload %s. " "The build root directory is empty or does not exist.\n" % (self.build_root), )
def test_throws_more_relevant_error_when_errno_1(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with mock.patch('os.utime') as utime_mock: utime_mock.side_effect = OSError(1, '') with self.assertRaises(SetFileUtimeError): set_file_utime('not_real_file', epoch_now)
def test_invalid_subcommand(self): with mock.patch('sys.stderr') as f: with self.assertRaises(SystemExit): self.cmd(['no-exist-command'], None) # We should see the pointer to "aws help" in the error message. error_message = ''.join(arg[0][0] for arg in f.write.call_args_list) self.assertIn(HELP_BLURB, error_message)
def test_parse_verify_ssl_verify_turned_off(self): with mock.patch('os.environ', {}): parsed_args = FakeParsedArgs(verify_ssl=False, ca_bundle=None) session_var_map = {'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE')} session = FakeSession(session_vars=session_var_map) globalargs.resolve_verify_ssl(parsed_args, session) self.assertFalse(parsed_args.verify_ssl)
def test_error_message_when_directory_is_empty(self): with mock.patch('sys.stderr', six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), 'Fail to upload %s. ' 'The build root directory is empty or does not exist.\n' % (self.build_root))
def run(self, cmdline): with mock.patch('os.environ', self.env): with capture_output() as output: runner_result = self._do_run(cmdline) runner_result.stdout = output.stdout.getvalue() runner_result.stderr = output.stderr.getvalue() return runner_result
def test_passes_through_other_os_errors(self): now = datetime.datetime.now(tzlocal()) epoch_now = time.mktime(now.timetuple()) with mock.patch('os.utime') as utime_mock: utime_mock.side_effect = OSError(2, '') with self.assertRaises(OSError): set_file_utime('not_real_file', epoch_now)
def test_delivery_channel_mixed_multiple_delivery_info(self): name = 'default' success = {'lastStatus': 'SUCCESS'} failure = { 'lastStatus': 'FAILURE', 'lastErrorCode': '500', 'lastErrorMessage': 'This is the error' } stream_delivery_status = failure history_delivery_status = success snapshot_delivery_status = success status = self._make_delivery_channel_status( name, stream_delivery_status=stream_delivery_status, history_delivery_status=history_delivery_status, snapshot_delivery_status=snapshot_delivery_status) self.channel_status.append(status) expected_output = ('Configuration Recorders:\n\n' 'Delivery Channels:\n\n' 'name: default\n' 'last stream delivery status: FAILURE\n' 'error code: 500\n' 'message: This is the error\n' 'last history delivery status: SUCCESS\n' 'last snapshot delivery status: SUCCESS\n\n') with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: self.cmd._run_main(self.parsed_args, self.parsed_globals) self.assertEqual(expected_output, mock_stdout.getvalue())
def test_delivery_channel_success_multiple_delivery_info(self): name = 'default' success = {'lastStatus': 'SUCCESS'} stream_delivery_status = success history_delivery_status = success snapshot_delivery_status = success status = self._make_delivery_channel_status( name, stream_delivery_status=stream_delivery_status, history_delivery_status=history_delivery_status, snapshot_delivery_status=snapshot_delivery_status) self.channel_status.append(status) expected_output = ('Configuration Recorders:\n\n' 'Delivery Channels:\n\n' 'name: default\n' 'last stream delivery status: SUCCESS\n' 'last history delivery status: SUCCESS\n' 'last snapshot delivery status: SUCCESS\n\n') with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: self.cmd._run_main(self.parsed_args, self.parsed_globals) self.assertEqual(expected_output, mock_stdout.getvalue())
def test_generate_json_skeleton_with_timestamp(self): parsed_args = mock.Mock() parsed_args.generate_cli_skeleton = 'input' input_shape = { 'A': { 'type': 'structure', 'members': { 'B': { 'type': 'timestamp' }, } } } shape = DenormalizedStructureBuilder().with_members( input_shape).build_model() operation_model = mock.Mock(input_shape=shape) argument = GenerateCliSkeletonArgument(self.session, operation_model) with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: rc = argument.generate_json_skeleton(call_parameters=None, parsed_args=parsed_args, parsed_globals=None) self.assertEqual( '{\n' ' "A": {\n' ' "B": "1970-01-01T00:00:00"\n' ' }\n' '}\n', mock_stdout.getvalue()) self.assertEqual(rc, 0)
def test_tail_with_follow(self): self.parsed_responses = [{ "events": [{ 'eventId': 'event1', 'logStreamName': self.stream_name, 'message': self.message, 'timestamp': self.response_log_timestamp, 'ingestionTime': self.response_log_timestamp, }], }, { "events": [{ 'eventId': 'event2', 'logStreamName': self.stream_name, 'message': self.message, 'timestamp': self.response_log_timestamp, 'ingestionTime': self.response_log_timestamp, }], }] with mock.patch('time.sleep') as mock_sleep: mock_sleep.side_effect = [None, KeyboardInterrupt] stdout, _, _ = self.run_cmd('logs tail %s --follow' % self.group_name) self.assertEqual( stdout, '%s %s %s' % (self.formatted_log_timestamp, self.stream_name, self.message) * 2)
def setUp(self): urllib3_session_send = 'botocore.httpsession.URLLib3Session.send' self._urllib3_patch = mock.patch(urllib3_session_send) self._send = self._urllib3_patch.start() self._imds_responses = [] self._send.side_effect = self.get_imds_response self._region = 'us-mars-1a'
def setUp(self): self.create_client_patch = mock.patch( 'botocore.session.Session.create_client') self.mock_create_client = self.create_client_patch.start() self.session = get_session() self.client = mock.Mock() self.mock_create_client.return_value = self.client self.cmd = GetGameSessionLogCommand(self.session) self.contents = b'mycontents' self.file_creator = FileCreator() self.urlopen_patch = mock.patch( 'awscli.customizations.gamelift.getlog.urlopen') self.urlopen_mock = self.urlopen_patch.start() self.urlopen_mock.return_value = six.BytesIO(self.contents)
def test_output_create_bucket(self): name = 'MyBucket/MyPrefix' self.s3_client.head_bucket.side_effect = self.bucket_no_exists_error self.s3_client._endpoint.region_name = 'us-east-1' with mock.patch('sys.stdout', StringIO()) as mock_stdout: self.helper.prepare_bucket(name) self.assertIn('Using new S3 bucket: MyBucket', mock_stdout.getvalue())
def test_parse_verify_ssl_default_value(self): with mock.patch('os.environ', {}): parsed_args = FakeParsedArgs(verify_ssl=True, ca_bundle=None) session_var_map = {'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE')} session = FakeSession(session_vars=session_var_map) globalargs.resolve_verify_ssl(parsed_args, session) # None, so that botocore can apply it's default logic. self.assertIsNone(parsed_args.verify_ssl)
def setUp(self): self.session = mock.Mock() self.file_creator = FileCreator() self.tags_dict = {} # Make a temporary json index to base information on self.json_index = self.file_creator.create_file('index.json', '') with open(self.json_index, 'w') as f: json.dump(self.tags_dict, f, indent=4, sort_keys=True) self.index_patch = mock.patch('awscli.topictags.TopicTagDB.index_file', self.json_index) self.dir_patch = mock.patch('awscli.topictags.TopicTagDB.topic_dir', self.file_creator.rootdir) self.index_patch.start() self.dir_patch.start()
def test_tail_no_color_when_tty(self): with mock.patch( 'awscli.customizations.logs.tail.is_a_tty') as mock_is_a_tty: mock_is_a_tty.return_value = True stdout, _, _ = self.run_cmd('logs tail %s' % self.group_name) self.assertEqual( stdout, "\x1b[32m%s\x1b[0m \x1b[36m%s\x1b[0m %s" % (self.formatted_log_timestamp, self.stream_name, self.message))
def test_on_queued_failure_propogates_create_directory_error(self): # If makedirs() raises an OSError of exception, we should # propogate the exception with a better worded CreateDirectoryError. with mock.patch('os.makedirs') as makedirs_patch: makedirs_patch.side_effect = OSError() with self.assertRaises(CreateDirectoryError): self.subscriber.on_queued(self.future) self.assertFalse(os.path.exists(self.directory_to_create))
def test_detects_if_history_exists(self): self.show_cmd = ShowCommand(self.session) self.parsed_args.command_id = 'latest' db_filename = os.path.join(self.files.rootdir, 'name.db') with mock.patch('os.environ', {'AWS_CLI_HISTORY_FILE': db_filename}): with self.assertRaisesRegexp( RuntimeError, 'Could not locate history'): self.show_cmd._run_main(self.parsed_args, self.parsed_globals)
def setUp(self): super(TestGetGameSessionLog, self).setUp() self.files = FileCreator() self.filename = os.path.join(self.files.rootdir, 'myfile') self.urlopen_patch = mock.patch( 'awscli.customizations.gamelift.getlog.urlopen') self.contents = b'My Contents' self.urlopen_mock = self.urlopen_patch.start() self.urlopen_mock.return_value = six.BytesIO(self.contents)
def test_error_message_when_directory_is_not_provided(self): self.args = ["--name", self.build_name, "--build-version", self.build_version, "--build-root", ""] with mock.patch("sys.stderr", six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), "Fail to upload %s. " "The build root directory is empty or does not exist.\n" % (""), )
def test_error_message_when_directory_does_not_exist(self): dir_not_exist = os.path.join(self.build_root, "does_not_exist") self.args = ["--name", self.build_name, "--build-version", self.build_version, "--build-root", dir_not_exist] with mock.patch("sys.stderr", six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), "Fail to upload %s. " "The build root directory is empty or does not exist.\n" % (dir_not_exist), )
def test_errors_when_no_service_command(self): alias_value = "--global-param=val" command_table = self.create_command_table(["myservice"]) parser = self.create_parser(command_table, extra_params=["global-param"]) alias_cmd = ServiceAliasCommand(self.alias_name, alias_value, self.session, command_table, parser) with self.assertRaises(SystemExit): # Even though we catch the system exit, a message will always # be forced to screen because it happened at system exit. # The patch is to ensure it does not get displayed by nosetests. with mock.patch("sys.stderr"): alias_cmd([], FakeParsedArgs(command=self.alias_name))
def test_error_message_when_directory_is_not_provided(self): self.args = [ '--name', self.build_name, '--build-version', self.build_version, '--build-root', '' ] with mock.patch('sys.stderr', six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), 'Fail to upload %s. ' 'The build root directory is empty or does not exist.\n' % ('') )
def test_error_message_when_directory_does_not_exist(self): dir_not_exist = os.path.join(self.build_root, 'does_not_exist') self.args = [ '--name', self.build_name, '--build-version', self.build_version, '--build-root', dir_not_exist ] with mock.patch('sys.stderr', six.StringIO()) as mock_stderr: self.cmd(self.args, self.global_args) self.assertEqual( mock_stderr.getvalue(), 'Fail to upload %s. ' 'The build root directory is empty or does not exist.\n' % (dir_not_exist) )
def test_no_attach_history_handler_when_no_sqlite3( self, mock_recorder, mock_sqlite3): mock_session = mock.Mock(Session) mock_session.get_scoped_config.return_value = { 'cli_history': 'enabled' } parsed_args = argparse.Namespace() parsed_args.command = 's3' with mock.patch('sys.stderr', StringIO()) as mock_stderr: attach_history_handler( session=mock_session, parsed_args=parsed_args) self.assertIn( 'enabled but sqlite3 is unavailable', mock_stderr.getvalue()) self.assertFalse(mock_recorder.add_handler.called) self.assertFalse(mock_sqlite3.connect.called)
def test_create_directory_no_exists(self, mock_recorder, mock_db_sqlite3, mock_sqlite3): mock_session = mock.Mock(Session) mock_session.get_scoped_config.return_value = { 'cli_history': 'enabled' } parsed_args = argparse.Namespace() parsed_args.command = 's3' directory_to_create = os.path.join(self.files.rootdir, 'create-dir') db_filename = os.path.join(directory_to_create, 'name.db') with mock.patch('os.environ', {'AWS_CLI_HISTORY_FILE': db_filename}): attach_history_handler( session=mock_session, parsed_args=parsed_args) self.assertEqual(mock_recorder.add_handler.call_count, 1) # Is should create any missing parent directories of the # file as well. self.assertTrue(os.path.exists(directory_to_create)) self.assertTrue(mock_db_sqlite3.connect.called)
def setUp(self): history_recorder = self._make_clean_history_recorder() super(BaseHistoryCommandParamsTest, self).setUp() self.history_recorder = history_recorder self.files = FileCreator() config_contents = ( '[default]\n' 'cli_history = enabled' ) self.environ['AWS_CONFIG_FILE'] = self.files.create_file( 'config', config_contents) self.environ['AWS_CLI_HISTORY_FILE'] = self.files.create_file( 'history.db', '') self.driver = create_clidriver() # The run_cmd patches stdout with a StringIO object (similar to what # nose does). Therefore it will run into issues when # get_binary_stdout is called because it returns sys.stdout.buffer # for Py3 and StringIO does not have a buffer self.binary_stdout_patch = mock.patch( 'awscli.utils.get_binary_stdout') mock_get_binary_stdout = self.binary_stdout_patch.start() self.binary_stdout = BytesIO() mock_get_binary_stdout.return_value = self.binary_stdout
def get_presigned_url_for_cmd(self, cmdline): with mock.patch('time.time', FROZEN_TIME): with mock.patch('datetime.datetime') as d: d.utcnow = FROZEN_DATETIME stdout = self.assert_params_for_cmd(cmdline, None)[0].strip() return stdout
def _apply_history_recorder_patch(self, module, history_recorder): patch_history_recorder = mock.patch( module + '.HISTORY_RECORDER', history_recorder) patch_history_recorder.start() self.addCleanup(patch_history_recorder.stop)