def test_multi(self): now = 1379406823.9 fo = MockStdout() with mock.patch('dnf.cli.progress._term_width', return_value=60), \ mock.patch('dnf.cli.progress.time', lambda: now): p = dnf.cli.progress.MultiFileProgressMeter(fo) p.start(2, 30) for i in range(11): p.progress('foo', 10.0, float(i)) self.assertEquals(len(fo.out), i*2 + 1) if i == 10: p.end('foo', 10, None) now += 0.5 p.progress('bar', 20.0, float(i*2)) self.assertEquals(len(fo.out), i*2 + 2 + (i == 10 and 2)) if i == 10: p.end('bar', 20, 'some error') now += 0.5 # check "end" events self.assertEquals([o for o in fo.out if o.endswith('\n')], [ '(1/2): foo 1.0 B/s | 10 B 00:10 \n', '[FAILED] bar: some error \n']) # verify we estimated a sane rate (should be around 3 B/s) self.assertTrue(2.0 < p.rate < 4.0)
def _verify_expected_endpoint_url(region, bucket, key, s3_config, is_secure=True, customer_provided_endpoint=None, expected_url=None): http_response = mock.Mock() http_response.status_code = 200 http_response.headers = {} http_response.content = b'' environ = {} with mock.patch('os.environ', environ): environ['AWS_ACCESS_KEY_ID'] = 'access_key' environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' environ['AWS_CONFIG_FILE'] = 'no-exist-foo' session = create_session() session.config_filename = 'no-exist-foo' config = None if s3_config is not None: config = Config(s3=s3_config) s3 = session.create_client('s3', region_name=region, use_ssl=is_secure, config=config, endpoint_url=customer_provided_endpoint) with mock.patch('botocore.endpoint.Session.send') as mock_send: mock_send.return_value = http_response s3.put_object(Bucket=bucket, Key=key, Body=b'bar') request_sent = mock_send.call_args[0][0] assert_equal(request_sent.url, expected_url)
def download(self, errors=None, err={}): # utility function, calls Base.download_packages() # and returns the list of relative URLs it used. urls = [] def dlp(targets, failfast): target, = targets self.assertEqual(target.__class__.__name__, 'PackageTarget') self.assertTrue(failfast) urls.append(target.relative_url) err = errors and errors.pop(0) if err: # PackageTarget.err is not writable targets[0] = Bunch(cbdata=target.cbdata, err=err) def lock_dir(_dir): return os.path.join(support.USER_RUNDIR, dnf.const.PROGRAM_NAME) with mock.patch('librepo.download_packages', dlp),\ mock.patch('dnf.lock._fit_lock_dir', lock_dir): try: self.base.download_packages([self.pkg]) except dnf.exceptions.DownloadError as e: pass return urls
def test_print_versions(self): yumbase = support.MockYumBase() with mock.patch('sys.stdout') as stdout,\ mock.patch('dnf.sack.rpmdb_sack', return_value=yumbase.sack): dnf.cli.cli.print_versions(['pepper', 'tour'], yumbase) written = ''.join([mc[1][0] for mc in stdout.method_calls if mc[0] == 'write']) self.assertEqual(written, VERSIONS_OUTPUT)
def test_content_sha256_set_if_md5_is_unavailable(self): with mock.patch('botocore.auth.MD5_AVAILABLE', False): with mock.patch('botocore.handlers.MD5_AVAILABLE', False): self.client.put_object(Bucket='foo', Key='bar', Body='baz') sent_headers = self.get_sent_headers() unsigned = 'UNSIGNED-PAYLOAD' self.assertNotEqual(sent_headers['x-amz-content-sha256'], unsigned) self.assertNotIn('content-md5', sent_headers)
def test_drpm_download(self): # the testing drpm is about 150% of the target.. self.pkg.repo.deltarpm = 1 dnf.drpm.APPLYDELTA = '/bin/true' with mock.patch('dnf.drpm.MAX_PERCENTAGE', 50): self.assertEquals(self.download(), ['tour-5-1.noarch.rpm']) with mock.patch('dnf.drpm.MAX_PERCENTAGE', 200): self.assertEquals(self.download(), ['drpms/tour-5-1.noarch.drpm'])
def test_incompatible_openssl_version(self): with mock.patch('ssl.OPENSSL_VERSION_INFO', new=(0, 9, 8, 11, 15)): with mock.patch('warnings.warn') as mock_warn: self.session.create_client('iot-data', 'us-east-1') call_args = mock_warn.call_args[0] warning_message = call_args[0] warning_type = call_args[1] # We should say something specific about the service. self.assertIn('iot-data', warning_message) self.assertEqual(warning_type, UnsupportedTLSVersionWarning)
def setUp(self): self.url = URL self.redmine = Redmine(self.url) self.response = mock.Mock(status_code=200, json=json_response(response)) patcher_get = mock.patch('requests.get', return_value=self.response) patcher_put = mock.patch('requests.put', return_value=self.response) patcher_delete = mock.patch('requests.delete', return_value=self.response) patcher_get.start() patcher_put.start() patcher_delete.start() self.addCleanup(patcher_get.stop) self.addCleanup(patcher_put.stop) self.addCleanup(patcher_delete.stop)
def setUp(self): self.patch_urlparse = mock.patch('freight_forwarder.container_ship.urlparse') self.patch_utils = mock.patch('freight_forwarder.container_ship.utils') self.patch_urllib = mock.patch('freight_forwarder.container_ship.urllib3') self.patch_docker_client = mock.patch('freight_forwarder.container_ship.docker.Client') self.patch_image = mock.patch('freight_forwarder.container_ship.Image') self.injector = InjectorFactory() self.mock_urlparse = self.patch_urlparse.start() self.mock_utils = self.patch_utils.start() self.mock_urllib = self.patch_urllib.start() self.mock_docker_client = self.patch_docker_client.start() self.mock_image = self.patch_image.start()
def setUp(self): self.url = URL self.redmine = Redmine(self.url) self.response = mock.Mock() patcher_get = mock.patch("requests.get", return_value=self.response) patcher_post = mock.patch("requests.post", return_value=self.response) patcher_put = mock.patch("requests.put", return_value=self.response) patcher_get.start() patcher_post.start() patcher_put.start() self.addCleanup(patcher_get.stop) self.addCleanup(patcher_post.stop) self.addCleanup(patcher_put.stop)
def test_setup_stdout(self): # No stdout output can be seen when sys.stdout is patched, debug msgs, # etc. included. with mock.patch('sys.stdout') as mock_stdout: mock_stdout.encoding = None retval = dnf.i18n.setup_stdout() self.assertFalse(retval) with mock.patch('sys.stdout') as mock_stdout: mock_stdout.encoding = 'UTF-8' retval = dnf.i18n.setup_stdout() self.assertTrue(retval) with mock.patch('sys.stdout') as mock_stdout: mock_stdout.encoding = 'ISO-8859-2' retval = dnf.i18n.setup_stdout() self.assertFalse(retval)
def assert_func(self, func, args, expected_ret): m = mock.mock_open() m.return_value = self.data with mock.patch('builtins.open', m, create=True): ret = func('/foo/bar', **args) m.assert_called_once_with('/foo/bar', 'rb') self.assertEqual(expected_ret, ret, str(func))
def test_warning_in_swarm_mode(self): mock_client = mock.create_autospec(docker.APIClient) mock_client.info.return_value = {'Swarm': {'LocalNodeState': 'active'}} with mock.patch('compose.cli.main.log') as fake_log: warn_for_swarm_mode(mock_client) assert fake_log.warning.call_count == 1
def test_use_correct_docstring_writer(self): with mock.patch( 'botocore.docs.docstring' '.document_model_driven_method') as mock_writer: docstring = ClientMethodDocstring() str(docstring) self.assertTrue(mock_writer.called)
def test_use_correct_docstring_writer(self): with mock.patch( 'botocore.docs.docstring' '.document_wait_method') as mock_writer: docstring = WaiterDocstring() str(docstring) self.assertTrue(mock_writer.called)
def test_instantiation(self): # Instantiate the class. dynamodb_class = type( 'dynamodb', (DynamoDBHighLevelResource, ServiceResource), {'meta': self.meta}) with mock.patch('boto3.dynamodb.transform.TransformationInjector') \ as mock_injector: dynamodb_class(client=self.client) # It should have fired the following events upon instantiation. event_call_args = self.events.register.call_args_list self.assertEqual( event_call_args, [mock.call( 'provide-client-params.dynamodb', copy_dynamodb_params, unique_id='dynamodb-create-params-copy'), mock.call( 'before-parameter-build.dynamodb', mock_injector.return_value.inject_condition_expressions, unique_id='dynamodb-condition-expression'), mock.call( 'before-parameter-build.dynamodb', mock_injector.return_value.inject_attribute_value_input, unique_id='dynamodb-attr-value-input'), mock.call( 'after-call.dynamodb', mock_injector.return_value.inject_attribute_value_output, unique_id='dynamodb-attr-value-output')] )
def setUp(self): super(TestS3GetBucketLifecycle, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 's3', self.region) self.session_send_patch = mock.patch('botocore.endpoint.Session.send') self.http_session_send_mock = self.session_send_patch.start()
def test_with_host_option_shorthand_equal(self): with mock.patch('subprocess.call') as fake_call: call_docker(['ps'], {'--host': '=tcp://mydocker.net:2333'}) assert fake_call.call_args[0][0] == [ 'docker', '--host', 'tcp://mydocker.net:2333', 'ps' ]
def test_with_http_host(self): with mock.patch('subprocess.call') as fake_call: call_docker(['ps'], {'--host': 'http://mydocker.net:2333'}) assert fake_call.call_args[0][0] == [ 'docker', '--host', 'tcp://mydocker.net:2333', 'ps', ]
def setUp(self): self.url = URL self.redmine = Redmine(self.url) self.response = mock.Mock(**{'status_code': 200, 'json.return_value': response}) patcher = mock.patch('requests.get', return_value=self.response) patcher.start() self.addCleanup(patcher.stop)
def test_retry_dns_error(self): with mock.patch('socket.gethostbyname', side_effect=socket.gaierror(-5, 'No address associated with hostname')): crawler = get_crawler(SimpleSpider) with LogCapture() as l: yield crawler.crawl("http://example.com/") self._assert_retried(l)
def test_emit_response_received(self): recording_handler = RecordingHandler() self.client.meta.events.register( 'response-received.ec2.DescribeRegions', recording_handler.record) with mock.patch( 'botocore.httpsession.URLLib3Session.send') as mock_send: response_body = ( b'<?xml version="1.0" ?>' b'<DescribeRegionsResponse xmlns="">' b'</DescribeRegionsResponse>' ) mock_send.return_value = mock.Mock( status_code=200, headers={}, content=response_body) self.client.describe_regions() self.assertEqual( recording_handler.recorded_events, [ ('response-received.ec2.DescribeRegions', { 'exception': None, 'response_dict': { 'body': response_body, 'headers': {}, 'context': mock.ANY, 'status_code': 200 }, 'parsed_response': { 'ResponseMetadata': mock.ANY}, 'context': mock.ANY }) ] )
def setUp(self): super(BaseS3OperationTest, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client( 's3', self.region) self.session_send_patch = mock.patch('botocore.endpoint.Session.send') self.http_session_send_mock = self.session_send_patch.start()
def test_inspect_stack(self): with mock.patch('inspect.stack', side_effect=IndexError): with warnings.catch_warnings(record=True) as w: DeprecatedName = create_deprecated_class('DeprecatedName', NewName) class SubClass(DeprecatedName): pass self.assertIn("Error detecting parent module", str(w[0].message))
def test_handle(self): os.path.getsize = lambda p: self.data["size"] os.path.getmtime = lambda p: 123412341234 m = mock.mock_open() m.return_value = io.BytesIO() with mock.patch("builtins.open", m, create=True): self.task.handle() self.assertEqual(1, len(self.task.items_store.get_items_by_id(item_id=self.item.id)))
def assert_will_retry_n_times(self, method, num_retries): num_responses = num_retries + 1 with mock.patch('botocore.endpoint.Session.send') as mock_send: self.add_n_retryable_responses(mock_send, num_responses) with self.assertRaisesRegexp( ClientError, 'reached max retries: %s' % num_retries): method() self.assertEqual(mock_send.call_count, num_responses)
def test_provided_endpoint_url_is_not_mutated(self): s3 = self.session.create_client('s3', endpoint_url='https://foo.com') with mock.patch('botocore.endpoint.Session.send') as mock_send: mock_send.return_value = self.http_response s3.put_object(Bucket='mybucket', Key='mykey', Body='mybody') request_sent = mock_send.call_args[0][0] self.assertEqual( 'https://foo.com/mybucket/mykey', request_sent.url)
def test_throttle(self): self.repo.throttle = '50%' self.repo.bandwidth = '10M' self.assertEquals(self.repo.throttle, 0.5) self.assertEquals(self.repo.bandwidth, 10 << 20) opts = {} with mock.patch('librepo.Handle.setopt', opts.__setitem__): self.repo.get_handle() self.assertEquals(opts[librepo.LRO_MAXSPEED], 5 << 20)
def test_dispatch_generic_connection_error(self): with pytest.raises(errors.ConnectionErrorGeneric): with mock.patch( 'compose.cli.command.call_silently', autospec=True, side_effect=[0, 1] ): with friendly_error_message(): raise ConnectionError()
def test_default_endpoint_for_path_addressing(self): s3 = self.session.create_client( 's3', config=Config(s3={'addressing_style': 'path'})) with mock.patch('botocore.endpoint.Session.send') \ as mock_send: mock_send.return_value = self.http_response s3.put_object(Bucket='mybucket', Key='mykey', Body='mybody') request_sent = mock_send.call_args[0][0] self.assertEqual( 'https://s3.amazonaws.com/mybucket/mykey', request_sent.url)
def test_download_file_proxies_to_transfer_object(self): with mock.patch('boto3.s3.inject.S3Transfer') as transfer: inject.download_file( mock.sentinel.CLIENT, Bucket='bucket', Key='key', Filename='filename') transfer_in_context_manager = \ transfer.return_value.__enter__.return_value transfer_in_context_manager.download_file.assert_called_with( bucket='bucket', key='key', filename='filename', extra_args=None, callback=None)
def test_catch_retryable_http_errors(self): with mock.patch( 'botocore.httpsession.URLLib3Session.send') as send_mock: fetcher = InstanceMetadataFetcher() send_mock.side_effect = ConnectionClosedError(endpoint_url="foo") creds = fetcher.retrieve_iam_role_credentials() self.assertEqual(send_mock.call_count, 2) for call_instance in send_mock.call_args_list: self.assertTrue(call_instance[0][0].url.startswith( fetcher.get_base_url())) self.assertEqual(creds, {})
def test_json_encoding_override(self): json_text = u"{\"test\": \"Α and Ω\"}".encode("cp949") with patch('requests.Response.content', new_callable=PropertyMock) as mock_content: mock_content.return_value = json_text res = requests.Response() res.encoding = "cp949" self.assertEqual(HTTPSession.json(res), {u"test": u"\u0391 and \u03a9"})
def with_host(m: CephadmOrchestrator, name, addr='1.2.3.4', refresh_hosts=True): # type: (CephadmOrchestrator, str) -> None with mock.patch("cephadm.utils.resolve_ip", return_value=addr): wait(m, m.add_host(HostSpec(hostname=name))) if refresh_hosts: CephadmServe(m)._refresh_hosts_and_daemons() yield wait(m, m.remove_host(name))
def test_player_silent_audio_driver(player): with mock.patch('pyglet.media.player.get_audio_driver') as get_audio_driver_mock: get_audio_driver_mock.return_value = None source = SilentTestSource(.1) player.queue(source) player.play() player.wait_for_all_events( 1., 'on_eos', 'on_player_eos')
def test_provided_endpoint_url_for_path_addressing(self): s3 = self.session.create_client( 's3', config=Config(s3={'addressing_style': 'path'}), endpoint_url='https://foo.amazonaws.com') with mock.patch('botocore.endpoint.Session.send') \ as mock_send: mock_send.return_value = self.http_response s3.put_object(Bucket='mybucket', Key='mykey', Body='mybody') request_sent = mock_send.call_args[0][0] self.assertEqual( 'https://foo.amazonaws.com/mybucket/mykey', request_sent.url)
def setUp(self): self.http_response = AWSResponse(None, 200, {}, None) self.parsed_response = {} self.make_request_patch = mock.patch( 'botocore.endpoint.Endpoint.make_request') self.make_request_mock = self.make_request_patch.start() self.make_request_mock.return_value = (self.http_response, self.parsed_response) self.session = Session(aws_access_key_id='dummy', aws_secret_access_key='dummy', region_name='us-east-1')
def test_custom_timeout_error(self): os.environ['COMPOSE_HTTP_TIMEOUT'] = '123' client = docker_client(os.environ) with mock.patch('compose.cli.errors.log') as fake_log: with pytest.raises(errors.ConnectionError): with errors.handle_connection_errors(client): raise errors.RequestsConnectionError( errors.ReadTimeoutError(None, None, None)) assert fake_log.error.call_count == 1 assert '123' in fake_log.error.call_args[0][0] with mock.patch('compose.cli.errors.log') as fake_log: with pytest.raises(errors.ConnectionError): with errors.handle_connection_errors(client): raise errors.ReadTimeout() assert fake_log.error.call_count == 1 assert '123' in fake_log.error.call_args[0][0]
def test_stale_connections(self, cephadm_module): class Connection(object): """ A mocked connection class that only allows the use of the connection once. If you attempt to use it again via a _check, it'll explode (go boom!). The old code triggers the boom. The new code checks the has_connection and will recreate the connection. """ fuse = False @staticmethod def has_connection(): return False def import_module(self, *args, **kargs): return mock.Mock() @staticmethod def exit(): pass def _check(conn, *args, **kargs): if conn.fuse: raise Exception("boom: connection is dead") else: conn.fuse = True return '{}', None, 0 with mock.patch("remoto.Connection", side_effect=[Connection(), Connection(), Connection()]): with mock.patch("remoto.process.check", _check): with with_host(cephadm_module, 'test', refresh_hosts=False): code, out, err = cephadm_module.check_host('test') # First should succeed. assert err is None # On second it should attempt to reuse the connection, where the # connection is "down" so will recreate the connection. The old # code will blow up here triggering the BOOM! code, out, err = cephadm_module.check_host('test') assert err is None
def test_reference_help(self): sample_resource = self.resource.Sample('id') with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: help(sample_resource.__class__.related_sample) reference_docstring = mock_stdout.getvalue() self.assert_contains_lines_in_order( [ " (:py:class:`Sample`) The related related_sample " "if set, otherwise ``None``." ], reference_docstring, )
def test_us_gov_with_virtual_addressing(self): s3 = self.session.create_client( 's3', region_name='us-gov-west-1', config=Config(s3={'addressing_style': 'virtual'})) with mock.patch('botocore.endpoint.Session.send') \ as mock_send: mock_send.return_value = self.http_response s3.put_object(Bucket='mybucket', Key='mykey', Body='mybody') request_sent = mock_send.call_args[0][0] self.assertEqual( 'https://mybucket.s3-us-gov-west-1.amazonaws.com/mykey', request_sent.url)
def test_resize_handler_height_less(self): with mock.patch('%s.height' % TERMINAL, new_callable=mock.PropertyMock) as mockheight: mockheight.side_effect = [25, 23] manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter) counter3 = MockCounter(manager=manager) manager.counters[counter3] = 3 manager.scroll_offset = 4 with mock.patch('enlighten._manager.Manager._set_scroll_area') as ssa: manager._resize_handler() self.assertEqual(ssa.call_count, 1) self.assertEqual(manager.height, 23) self.assertEqual(self.tty.stdread.readline(), manager.term.move(19, 0) + '\n') for _ in range(5): self.assertEqual(self.tty.stdread.readline(), '\n') self.assertEqual(counter3.calls, ['refresh(flush=False, elapsed=None)'])
def setUp(self): super(S3ControlOperationTest, self).setUp() self.region = 'us-west-2' self.client = self.session.create_client('s3control', self.region) self.session_send_patch = mock.patch( 'botocore.endpoint.Endpoint._send') self.http_session_send_mock = self.session_send_patch.start() self.http_response = mock.Mock(spec=AWSResponse) self.http_response.status_code = 200 self.http_response.headers = {} self.http_response.content = '' self.http_session_send_mock.return_value = self.http_response
def test_resize_handler_height_only(self): with mock.patch('%s.height' % TERMINAL, new_callable=mock.PropertyMock) as mockheight: mockheight.side_effect = [25, 23, 28, 30, 30] manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter) counter3 = MockCounter(manager=manager) manager.counters[counter3] = 3 manager.scroll_offset = 4 with mock.patch('enlighten._manager.Manager._set_scroll_area') as ssa: manager._resize_handler() self.assertEqual(ssa.call_count, 1) # Height is set in _set_scroll_area which is mocked self.assertEqual(manager.height, 25) self.tty.stdout.write('X\n') self.assertEqual(self.tty.stdread.readline(), 'X\n') self.assertEqual(counter3.calls, ['refresh(flush=False, elapsed=None)'])
def with_host(m: CephadmOrchestrator, name, addr='1::4', refresh_hosts=True, rm_with_force=True): with mock.patch("cephadm.utils.resolve_ip", return_value=addr): wait(m, m.add_host(HostSpec(hostname=name))) if refresh_hosts: CephadmServe(m)._refresh_hosts_and_daemons() receive_agent_metadata(m, name) yield wait(m, m.remove_host(name, force=rm_with_force))
def subject(self, **params): with patch( "streamlink.plugins.twitch.TwitchAPI.stream_metadata") as mock: mock.return_value = None if params.pop("offline", False) else { "type": params.pop("stream_type", "live") } session = Streamlink() Twitch.bind(session, "tests.plugins.test_twitch") plugin = Twitch("https://www.twitch.tv/foo") plugin.options.set("disable-reruns", params.pop("disable", True)) return plugin._check_for_rerun()
def test_sub_resource_help(self): with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: help(self.resource.Sample) sub_resource_docstring = mock_stdout.getvalue() self.assert_contains_lines_in_order([ ' Creates a Sample resource.::', " sample = myservice.Sample('name')", ' :type name: string', " :param name: The Sample's name identifier.", ' :rtype: :py:class:`MyService.Sample`', ' :returns: A Sample resource', ], sub_resource_docstring)
def with_cephadm_module(module_options=None, store=None): """ :param module_options: Set opts as if they were set before module.__init__ is called :param store: Set the store before module.__init__ is called """ with mock.patch("cephadm.module.CephadmOrchestrator.get_ceph_option", get_ceph_option),\ mock.patch("cephadm.services.osd.RemoveUtil._run_mon_cmd"), \ mock.patch("cephadm.module.CephadmOrchestrator.get_osdmap"), \ mock.patch("cephadm.services.osd.OSDService.get_osdspec_affinity", return_value='test_spec'), \ mock.patch("cephadm.module.CephadmOrchestrator.remote"), \ mock.patch("cephadm.agent.CephadmAgentHelpers._request_agent_acks"), \ mock.patch("cephadm.agent.CephadmAgentHelpers._apply_agent", return_value=False), \ mock.patch("cephadm.agent.CephadmAgentHelpers._agent_down", return_value=False), \ mock.patch('cephadm.agent.CherryPyThread.run'): m = CephadmOrchestrator.__new__(CephadmOrchestrator) if module_options is not None: for k, v in module_options.items(): m._ceph_set_module_option('cephadm', k, v) if store is None: store = {} if '_ceph_get/mon_map' not in store: m.mock_store_set('_ceph_get', 'mon_map', { 'modified': datetime_to_str(datetime_now()), 'fsid': 'foobar', }) if '_ceph_get/mgr_map' not in store: m.mock_store_set( '_ceph_get', 'mgr_map', { 'services': { 'dashboard': 'http://[::1]:8080', 'prometheus': 'http://[::1]:8081' }, 'modules': ['dashboard', 'prometheus'], }) for k, v in store.items(): m._ceph_set_store(k, v) m.__init__('cephadm', 0, 0) m._cluster_fsid = "fsid" m.event_loop = MockEventLoopThread() m.tkey = NamedTemporaryFile(prefix='test-cephadm-identity-') yield m
def test_resize(self): """ Resize lock must be False for handler to run Terminal size is cached unless resize handler runs """ manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter) counter3 = MockCounter(manager=manager) manager.counters[counter3] = 3 manager.scroll_offset = 4 term = manager.term with mock.patch('%s.width' % TERMINAL, new_callable=mock.PropertyMock) as mockwidth: mockwidth.return_value = 70 manager.resize_lock = True with mock.patch('enlighten._manager.Manager._set_scroll_area') as ssa: manager._stage_resize() self.assertFalse(ssa.called) self.assertEqual(manager.width, 80) self.assertTrue(manager.resize_lock) self.tty.stdout.write(u'X\n') self.assertEqual(self.tty.stdread.readline(), 'X\n') self.assertEqual(counter3.calls, []) manager.resize_lock = False with mock.patch('enlighten._manager.Manager._set_scroll_area') as ssa: manager._resize_handler() self.assertEqual(ssa.call_count, 1) self.assertEqual(manager.width, 70) self.assertFalse(manager.resize_lock) self.tty.stdout.write(u'X\n') self.assertEqual(self.tty.stdread.readline(), term.move(21, 0) + term.clear_eos + 'X\n') self.assertEqual(counter3.calls, ['refresh(flush=False, elapsed=None)'])
def with_cephadm_module(module_options=None, store=None): """ :param module_options: Set opts as if they were set before module.__init__ is called :param store: Set the store before module.__init__ is called """ with mock.patch("cephadm.module.CephadmOrchestrator.get_ceph_option", get_ceph_option),\ mock.patch("cephadm.module.CephadmOrchestrator.remote"), \ mock.patch("cephadm.services.osd.RemoveUtil._run_mon_cmd"), \ mock.patch("cephadm.module.CephadmOrchestrator.send_command"), \ mock.patch("cephadm.module.CephadmOrchestrator.get_osdmap"), \ mock.patch("cephadm.module.CephadmOrchestrator.mon_command", mon_command): m = CephadmOrchestrator.__new__(CephadmOrchestrator) if module_options is not None: for k, v in module_options.items(): m._ceph_set_module_option('cephadm', k, v) if store is None: store = {} if '_ceph_get/mon_map' not in store: store['_ceph_get/mon_map'] = { 'modified': datetime.datetime.utcnow().strftime(CEPH_DATEFMT), 'fsid': 'foobar', } for k, v in store.items(): m._ceph_set_store(k, v) m.__init__('cephadm', 0, 0) m._cluster_fsid = "fsid" yield m
def test_proxy_ssl_context_uses_check_hostname(self): cert = ('/some/cert', '/some/key') proxies = {'https': 'https://proxy.com'} proxies_config = {'proxy_client_cert': "path/to/cert"} with mock.patch('botocore.httpsession.create_urllib3_context'): session = URLLib3Session( proxies=proxies, client_cert=cert, proxies_config=proxies_config ) self.request.url = 'https://example.com/' session.send(self.request.prepare()) last_call = self.proxy_manager_fun.call_args[-1] self.assertIs(last_call['ssl_context'].check_hostname, True)
def test_write(self): msg = 'test message' with mock.patch('enlighten._manager.Manager._set_scroll_area') as ssa: manager = _manager.Manager(stream=self.tty.stdout) term = manager.term manager.write(msg, position=3) self.tty.stdout.write('X\n') # Carriage return is getting converted to newline self.assertEqual(self.tty.stdread.readline(), term.move(22, 0) + '\r' + term.clear_eol + msg + 'X\n') self.assertEqual(ssa.call_count, 1)
def test_resize_threaded(self): """ Test a resize event threading behavior """ manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter, threaded=True) counter3 = MockCounter(manager=manager) counter3.last_update = time.time() manager.counters[counter3] = 3 manager.scroll_offset = 4 term = manager.term # simulate resize manager._stage_resize() self.assertTrue(manager._resize) self.assertEqual(counter3.last_update, 0) with mock.patch('%s.width' % TERMINAL, new_callable=mock.PropertyMock) as mockwidth: mockwidth.return_value = 70 # resize doesn't happen until a write is called self.assertEqual(manager.width, 80) with mock.patch( 'enlighten._manager.Manager._set_scroll_area') as ssa: manager.write() self.assertEqual(ssa.call_count, 1) self.assertEqual(manager.width, 70) self.tty.stdout.write(u'X\n') self.assertEqual(self.tty.stdread.readline(), term.move(21, 0) + term.clear_eos + 'X\n') self.assertFalse(manager.resize_lock) self.assertFalse(manager._resize) self.assertEqual(counter3.calls, ['refresh(flush=False, elapsed=None)'])
def test_advanced_tls_options(self): with mock.patch('subprocess.call') as fake_call: call_docker(['ps'], { '--tls': True, '--tlscacert': './ca.pem', '--tlscert': './cert.pem', '--tlskey': './key.pem', }) assert fake_call.call_args[0][0] == [ 'docker', '--tls', '--tlscacert', './ca.pem', '--tlscert', './cert.pem', '--tlskey', './key.pem', 'ps' ]
def test_collection_all_method_help(self): with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: help(self.resource.samples.all) collection_method_docstring = mock_stdout.getvalue() self.assert_contains_lines_in_order([ (' Creates an iterable of all Sample resources in the ' 'collection.'), ' **Request Syntax** ', ' ::', ' sample_iterator = myservice.samples.all()', ' :rtype: list(:py:class:`myservice.Sample`)', ' :returns: A list of Sample resources', ], collection_method_docstring)
def test_load_help(self): sub_resource = self.resource.Sample('Id') with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: help(sub_resource.load) load_docstring = mock_stdout.getvalue() self.assert_contains_lines_in_order([ (' Calls :py:meth:`MyService.Client.sample_operation` to update ' 'the attributes of the Sample resource'), ' **Request Syntax** ', ' ::', ' sample.load()', ' :returns: None', ], load_docstring)
def test_set_scroll_area(self): manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter) manager.counters['dummy'] = 3 term = manager.term stdread = self.tty.stdread self.assertEqual(manager.scroll_offset, 1) self.assertFalse(manager.process_exit) self.assertNotEqual(signal.getsignal(signal.SIGWINCH), manager._resize_handler) with mock.patch('enlighten._manager.atexit') as atexit: with mock.patch.object(term, 'change_scroll'): manager._set_scroll_area() self.assertEqual(term.change_scroll.call_count, 1) # pylint: disable=no-member self.assertEqual(manager.scroll_offset, 4) self.assertEqual(signal.getsignal(signal.SIGWINCH), manager._resize_handler) self.assertEqual(stdread.readline(), term.move(24, 0) + '\n') self.assertEqual(stdread.readline(), '\n') self.assertEqual(stdread.readline(), '\n') self.assertTrue(manager.process_exit) atexit.register.assert_called_with(manager._at_exit) self.tty.stdout.write(u'X\n') self.assertEqual(stdread.readline(), term.move(21, 0) + 'X\n') # Run it again and make sure exit handling isn't reset with mock.patch('enlighten._manager.atexit') as atexit: with mock.patch.object(term, 'change_scroll'): manager._set_scroll_area(force=True) self.assertEqual(term.change_scroll.call_count, 1) # pylint: disable=no-member self.assertFalse(atexit.register.called)
def test_stop_no_set_scroll(self): """ set_scroll is False """ with mock.patch('%s.reset' % TERMINAL) as reset: manager = _manager.Manager(stream=self.tty.stdout, counter_class=MockCounter, set_scroll=False) manager.counters[MockCounter(manager=manager)] = 3 manager.counters[MockCounter(manager=manager)] = 4 term = manager.term with mock.patch('enlighten._manager.atexit'): with mock.patch.object(term, 'change_scroll'): manager._set_scroll_area() self.assertEqual(manager.scroll_offset, 5) self.assertEqual(signal.getsignal(signal.SIGWINCH), manager._resize_handler) self.assertTrue(manager.process_exit) # Stream empty self.tty.stdout.write(u'X\n') self.assertEqual(self.tty.stdread.readline(), 'X\n') manager.stop() self.assertEqual(signal.getsignal(signal.SIGWINCH), manager.sigwinch_orig) self.assertFalse(reset.called) self.tty.stdout.write(u'X\n') self.assertEqual( self.tty.stdread.readline(), term.move(23, 0) + term.clear_eol + term.move(24, 0) + term.clear_eol + term.move(25, 0) + 'X\n') self.assertFalse(manager.process_exit)
def test_instantiation(self): # Instantiate the class. dynamodb_class = type('dynamodb', (DynamoDBHighLevelResource, ServiceResource), {'meta': self.meta}) with mock.patch('boto3_wasabi.dynamodb.transform.TransformationInjector') \ as mock_injector: with mock.patch( 'boto3_wasabi.dynamodb.transform.DocumentModifiedShape.' 'replace_documentation_for_matching_shape') \ as mock_modify_documentation_method: dynamodb_class(client=self.client) # It should have fired the following events upon instantiation. event_call_args = self.events.register.call_args_list self.assertEqual(event_call_args, [ mock.call('provide-client-params.dynamodb', copy_dynamodb_params, unique_id='dynamodb-create-params-copy'), mock.call('before-parameter-build.dynamodb', mock_injector.return_value.inject_condition_expressions, unique_id='dynamodb-condition-expression'), mock.call('before-parameter-build.dynamodb', mock_injector.return_value.inject_attribute_value_input, unique_id='dynamodb-attr-value-input'), mock.call('after-call.dynamodb', mock_injector.return_value.inject_attribute_value_output, unique_id='dynamodb-attr-value-output'), mock.call('docs.*.dynamodb.*.complete-section', mock_modify_documentation_method, unique_id='dynamodb-attr-value-docs'), mock.call('docs.*.dynamodb.*.complete-section', mock_modify_documentation_method, unique_id='dynamodb-key-expression-docs'), mock.call('docs.*.dynamodb.*.complete-section', mock_modify_documentation_method, unique_id='dynamodb-cond-expression-docs') ])
def setUp(self): super(TestMergeExtras, self).setUp() self.file_loader = mock.Mock() self.data_loader = Loader(extra_search_paths=['datapath'], file_loader=self.file_loader, include_default_search_paths=False) self.data_loader.determine_latest_version = mock.Mock( return_value='2015-03-01') self.data_loader.list_available_services = mock.Mock( return_value=['myservice']) isdir_mock = mock.Mock(return_value=True) self.isdir_patch = mock.patch('os.path.isdir', isdir_mock) self.isdir_patch.start()