def test_parse_observing_summary_dbase_file_mock(): """ Ensure that all required data are extracted from the RHESSI observing summary database file mocked in `hessi_data()` """ # We need to mock this test differently for <= 3.7.0 and below. if LooseVersion(platform.python_version()) <= LooseVersion("3.7.0"): mock_file = mock.mock_open() mock_file.return_value.__iter__.return_value = hessi_data().splitlines() else: mock_file = mock.mock_open(read_data=hessi_data()) dbase_data = {} with mock.patch('sunpy.instr.rhessi.open', mock_file, create=True): dbase_data = rhessi.parse_observing_summary_dbase_file(None) assert len(dbase_data.keys()) == 7 # verify each of the 7 fields assert dbase_data['filename'] == ['hsi_obssumm_19721101_139.fit', 'hsi_obssumm_19721102_144.fit'] assert dbase_data['orb_st'] == [7, 9] assert dbase_data['orb_end'] == [8, 10] assert dbase_data['start_time'] == [parse_time((1972, 11, 1, 0, 0)), parse_time((1972, 11, 2, 0, 0))] assert dbase_data['end_time'] == [parse_time((1972, 11, 2, 0, 0)), parse_time((1972, 11, 3, 0, 0))] assert dbase_data['status_flag'] == [3, 4] assert dbase_data['npackets'] == [2, 1]
def test_using_gzip_if_header_present_and_file_available(loop): request = make_mocked_request( 'GET', URL('http://python.org/logo.png'), headers={ hdrs.ACCEPT_ENCODING: 'gzip' } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = True gz_filepath.stat.return_value = mock.MagicMock() gz_filepath.stat.st_size = 1024 filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath file_sender = FileSender() file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.send(request, filepath)) assert not filepath.open.called assert gz_filepath.open.called
def test_generate_command_uwsgi_service_option_nginx_conf_redhat(self, mock_os_path_isfile, mock_file, mock_env, mock_os_path_exists, mock_linux_distribution, mock_context): mock_args = mock.MagicMock() mock_args.type = GEN_UWSGI_SERVICE_OPTION mock_args.directory = None mock_os_path_isfile.return_value = False mock_env.side_effect = ['/foo/conda', 'conda_env'] mock_os_path_exists.return_value = True mock_linux_distribution.return_value = ['redhat'] # First open is for the Template, next two are for /etc/nginx/nginx.conf and /etc/passwd, and the final # open is to "write" out the resulting file. The middle two opens return information about a user, while # the first and last use MagicMock. handlers = (mock_file.return_value, mock.mock_open(read_data='user foo_user').return_value, mock.mock_open(read_data='foo_user:x:1000:1000:Foo User,,,:/foo/nginx:/bin/bash').return_value, mock_file.return_value) mock_file.side_effect = handlers generate_command(args=mock_args) mock_os_path_isfile.assert_called_once() mock_file.assert_called() mock_env.assert_any_call('CONDA_HOME') mock_env.assert_called_with('CONDA_ENV_NAME') mock_os_path_exists.assert_called_once_with('/etc/nginx/nginx.conf') context = mock_context().update.call_args_list[0][0][0] self.assertEqual('http-', context['user_option_prefix'])
def test_UploadedImagesTracker(self, pMockForOpen, pMockForFlock): #Test assertion raised when flock fails. mocked_open = mock_open() with patch("imguploader.open", mocked_open, create=True): with patch("fcntl.flock", MagicMock(side_effect=IOError)): self.assertRaises(imguploader.UploadedImagesTrackerLockAcquiringFailed, imguploader.UploadedImagesTracker, "directory") #Test assertion raised when the activity log file is corrupted. mocked_open = mock_open(read_data="noooooooooooo") with patch("imguploader.open", mocked_open, create=True): with patch("fcntl.flock", MagicMock(return_value=None)): self.assertRaises(imguploader.UploadedImagesTrackerException, imguploader.UploadedImagesTracker, "directory") #Test proper construction of the UploadedImagesTracker.getImageList() returned list. mocked_open = mock_open(read_data="fileName<URLfull<URLthumb") with patch("imguploader.open", mocked_open, create=True): with patch("fcntl.flock", MagicMock(return_value=None)): entry = imguploader.UploadedImagesTracker("directory") assert(entry.getImageList()[0].getImageFileName() == "fileName") assert(entry.isImageAlreadyUploaded("fileName") == True) #Test for proper call to close() on the activity log file. lOpenMocked = mock_open(read_data="") with patch("imguploader.open", lOpenMocked, create=True): with imguploader.UploadedImagesTracker("directory"): pass lOpenMocked().close.assert_called_once_with() #Test for UploadedImagesTracker.addUploadedImage() uploadedImagesTracker = imguploader.UploadedImagesTracker("directory") uploadedImagesTracker.addUploadedImage("imageFileName", "FullURL", "ThumbURL") assert(uploadedImagesTracker._uploadedImages[0].getImageFileName() == "imageFileName") assert(uploadedImagesTracker._uploadedImages[0].getURLFullImage() == "FullURL") assert(uploadedImagesTracker._uploadedImages[0].getURLThumbImage() == "ThumbURL")
def test_saving_and_loading(hass): """Test that we're saving and loading correctly.""" class TestFlow(data_entry_flow.FlowHandler): VERSION = 5 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test Title', data={ 'token': 'abcd' } ) with patch.dict(config_entries.HANDLERS, {'test': TestFlow}): yield from hass.config_entries.flow.async_init('test') class Test2Flow(data_entry_flow.FlowHandler): VERSION = 3 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test 2 Title', data={ 'username': '******' } ) json_path = 'homeassistant.util.json.open' with patch('homeassistant.config_entries.HANDLERS.get', return_value=Test2Flow), \ patch.object(config_entries, 'SAVE_DELAY', 0): yield from hass.config_entries.flow.async_init('test') with patch(json_path, mock_open(), create=True) as mock_write: # To trigger the call_later yield from asyncio.sleep(0, loop=hass.loop) # To execute the save yield from hass.async_block_till_done() # Mock open calls are: open file, context enter, write, context leave written = mock_write.mock_calls[2][1][0] # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) with patch('os.path.isfile', return_value=True), \ patch(json_path, mock_open(read_data=written), create=True): yield from manager.async_load() # Ensure same order for orig, loaded in zip(hass.config_entries.async_entries(), manager.async_entries()): assert orig.version == loaded.version assert orig.domain == loaded.domain assert orig.title == loaded.title assert orig.data == loaded.data assert orig.source == loaded.source
def test_loading_saving_data(hass, registry): """Test that we load/save data correctly.""" orig_entry1 = registry.async_get_or_create('light', 'hue', '1234') orig_entry2 = registry.async_get_or_create('light', 'hue', '5678') assert len(registry.entities) == 2 with patch(YAML__OPEN_PATH, mock_open(), create=True) as mock_write: yield from registry._async_save() # Mock open calls are: open file, context enter, write, context leave written = mock_write.mock_calls[2][1][0] # Now load written data in new registry registry2 = entity_registry.EntityRegistry(hass) with patch('os.path.isfile', return_value=True), \ patch(YAML__OPEN_PATH, mock_open(read_data=written), create=True): yield from registry2._async_load() # Ensure same order assert list(registry.entities) == list(registry2.entities) new_entry1 = registry.async_get_or_create('light', 'hue', '1234') new_entry2 = registry.async_get_or_create('light', 'hue', '5678') assert orig_entry1 == new_entry1 assert orig_entry2 == new_entry2
def test_all_node_names_missing(self): (self.config .corosync_conf.load( filename="corosync-no-node-names.conf", instead="corosync_conf.load" ) .fs.open( settings.pcsd_cert_location, mock.mock_open(read_data=self.pcsd_ssl_cert)(), name="fs.open.pcsd_ssl_cert" ) .fs.open( settings.pcsd_key_location, mock.mock_open(read_data=self.pcsd_ssl_key)(), name="fs.open.pcsd_ssl_key" ) ) self.env_assist.assert_raise_library_error( lambda: pcsd.synchronize_ssl_certificate(self.env_assist.get_env()), [] ) self.env_assist.assert_reports( [ fixture.warn( report_codes.COROSYNC_CONFIG_MISSING_NAMES_OF_NODES, fatal=False, ), fixture.error( report_codes.COROSYNC_CONFIG_NO_NODES_DEFINED, ), ] )
def test_explicit_mock(self): mock = MagicMock() mock_open(mock) with patch('%s.open' % __name__, mock, create=True) as patched: self.assertIs(patched, mock) open('foo') mock.assert_called_once_with('foo')
def test_load(self): """Load redirects from specified file or self.file_name.""" rd1 = Redirect('/one', '/two', 302) rd2 = Redirect('/three', '/four', 302) rd3 = Redirect('/five', '/six', 302) rdf = RedirectsFile('/tmp/foo.json') rdf.add_redirect(rd1) rdf.add_redirect(rd2) rdf.add_redirect(rd3) json_file = StringIO() json_file.write(json.dumps([rd.to_JSON() for rd in rdf.redirects])) json_file.seek(0) rdf2 = RedirectsFile('/tmp/foo.json') m = mock.mock_open() with mock.patch('builtins.open', m, create=True): m.return_value = json_file rdf2.load('/tmp/bar.json') m.assert_called_with('/tmp/bar.json', 'r', encoding='utf-8') json_file = StringIO() json_file.write(json.dumps([rd.to_JSON() for rd in rdf.redirects])) json_file.seek(0) rdf2 = RedirectsFile('/tmp/foo.json') m = mock.mock_open() with mock.patch('builtins.open', m, create=True): m.return_value = json_file rdf2.load() m.assert_called_with('/tmp/foo.json', 'r', encoding='utf-8')
def test_all_node_names_missing(self): auth_file = "auth.file" auth_file_path = os.path.join(settings.booth_config_dir, auth_file) config_content = "authfile={}".format(auth_file_path) auth_file_content = b"auth" (self.config .fs.open( self.config_path, mock.mock_open(read_data=config_content)(), name="open.conf" ) .fs.open( auth_file_path, mock.mock_open(read_data=auth_file_content)(), mode="rb", name="open.authfile", ) .corosync_conf.load(filename="corosync-no-node-names.conf") ) self.env_assist.assert_raise_library_error( lambda: commands.config_sync(self.env_assist.get_env(), self.name), [ fixture.error( report_codes.COROSYNC_CONFIG_NO_NODES_DEFINED, ), ] ) self.env_assist.assert_reports([ fixture.warn( report_codes.COROSYNC_CONFIG_MISSING_NAMES_OF_NODES, fatal=False, ), ])
def test_fail_some_nodes_unknown(self): (self.config .env.set_known_nodes(self.node_names[1:]) .fs.open( settings.pcsd_cert_location, mock.mock_open(read_data=self.pcsd_ssl_cert)(), name="fs.open.pcsd_ssl_cert" ) .fs.open( settings.pcsd_key_location, mock.mock_open(read_data=self.pcsd_ssl_key)(), name="fs.open.pcsd_ssl_key" ) ) self.env_assist.assert_raise_library_error( lambda: pcsd.synchronize_ssl_certificate(self.env_assist.get_env()), [] ) self.env_assist.assert_reports( [ fixture.error( report_codes.HOST_NOT_FOUND, force_code=report_codes.SKIP_OFFLINE_NODES, host_list=[self.node_names[0]] ), ] )
def test_gzip_if_header_present_and_file_not_available(loop) -> None: request = make_mocked_request( 'GET', 'http://python.org/logo.png', headers={ hdrs.ACCEPT_ENCODING: 'gzip' } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = False filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() filepath.stat.st_size = 1024 file_sender = FileResponse(filepath) file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.prepare(request)) assert filepath.open.called assert not gz_filepath.open.called
def test_success(self): (self.config .fs.open( settings.pcsd_cert_location, mock.mock_open(read_data=self.pcsd_ssl_cert)(), name="fs.open.pcsd_ssl_cert" ) .fs.open( settings.pcsd_key_location, mock.mock_open(read_data=self.pcsd_ssl_key)(), name="fs.open.pcsd_ssl_key" ) .http.host.send_pcsd_cert( cert=self.pcsd_ssl_cert, key=self.pcsd_ssl_key, node_labels=self.node_names ) ) pcsd.synchronize_ssl_certificate(self.env_assist.get_env()) self.env_assist.assert_reports( [ fixture.info( report_codes.PCSD_SSL_CERT_AND_KEY_DISTRIBUTION_STARTED, node_name_list=self.node_names ) ] + [ fixture.info( report_codes.PCSD_SSL_CERT_AND_KEY_SET_SUCCESS, node=node, ) for node in self.node_names ] )
def test_write_bed_file(self): v = VariantFileObject(Variant) m = mock.mock_open(read_data='\n'.join(self.vcf_content)) m.return_value.__iter__ = lambda self: self m.return_value.__next__ = lambda self: self.readline() with mock.patch(builtin_open,m ): v.load_variant_file('test.vcf') v.drop_indels() fh = mock.mock_open() with mock.patch(builtin_open, fh): v.write_bed_file('output.bed') calls = mock.call calls_list = [calls('output.bed' ,'w')] fh.assert_has_calls(calls_list, any_order=True) handle = fh() calls_list = [ calls("1\t10\t10\t1;A;C\n",), calls("2\t10\t10\t2;A;C\n",) ] handle.write.assert_has_calls(calls_list) v.write_bed_file('output_with_penalty.bed', -3, 3) calls = mock.call calls_list = [calls('output_with_penalty.bed' ,'w')] fh.assert_has_calls(calls_list, any_order=True) handle = fh() calls_list = [ calls("1\t7\t13\t1;A;C\n",), calls("2\t7\t13\t2;A;C\n",) ] handle.write.assert_has_calls(calls_list)
def setUp(self): xrhelper = MagicMock() xrhelper.get_current_setup = MagicMock(return_value=xr_settings) xrhelper.run_xrandr = MagicMock() self.xrhelper = xrhelper self.mo = mock_open() #init xrprofiler without init data with patch('xrprofiler.open', mock_open(), create=True): self.xrprofiler = xrprofiler.XrProfiler('test.yaml', xrhelper)
def setUp(self): mock.mock_open(read_data='{}') settings = { 'timer': mock.MagicMock(), 'ldap_config_file': 'testdata/test-ldap-config.json', 'statsd_factory': mock.MagicMock(), 'statsd_host_factory': mock.MagicMock(), } self.ldap = controller.LDAPController(settings)
def test_fail_communication(self): error = "an error" (self.config .fs.open( settings.pcsd_cert_location, mock.mock_open(read_data=self.pcsd_ssl_cert)(), name="fs.open.pcsd_ssl_cert" ) .fs.open( settings.pcsd_key_location, mock.mock_open(read_data=self.pcsd_ssl_key)(), name="fs.open.pcsd_ssl_key" ) .http.host.send_pcsd_cert( cert=self.pcsd_ssl_cert, key=self.pcsd_ssl_key, communication_list=[ { "label": self.node_names[0], "response_code": 400, "output": error, } ] + [ dict(label=node) for node in self.node_names[1:] ] ) ) self.env_assist.assert_raise_library_error( lambda: pcsd.synchronize_ssl_certificate(self.env_assist.get_env()), [] ) self.env_assist.assert_reports( [ fixture.info( report_codes.PCSD_SSL_CERT_AND_KEY_DISTRIBUTION_STARTED, node_name_list=self.node_names ) ] + [ fixture.info( report_codes.PCSD_SSL_CERT_AND_KEY_SET_SUCCESS, node=node, ) for node in self.node_names[1:] ] + [ fixture.error( report_codes.NODE_COMMUNICATION_COMMAND_UNSUCCESSFUL, node=self.node_names[0], command="remote/set_certs", reason=error ) ] )
def setUp(self): mock.mock_open(read_data='{}') settings = { 'timer': mock.MagicMock(), 'ldap_config_file': 'testdata/test-ldap-config.json', 'statsd_factory': mock.MagicMock(), 'statsd_host_factory': mock.MagicMock(), } with mock.patch('os.stat', return_value=MockStat()): self.ldap = controller.LDAPController(settings)
def test_proc(self): '''Test reading proc stats with mock data.''' if mock is None: return mock_stat = mock.mock_open(read_data='22411 (cat) R 22301 22411 22301 34818 22411 4194304 82 0 0 0 0 0 0 0 20 0 1 0 709170 8155136 221 18446744073709551615 94052544688128 94052544719312 140729623469552 0 0 0 0 0 0 0 0 0 17 6 0 0 0 0 0 94052546816624 94052546818240 94052566347776 140729623473446 140729623473466 140729623473466 140729623478255 0') mock_status = mock.mock_open(read_data='Name: cat\n\nVmData: 2 kB\nMultiple colons: 1:1') with mock.patch('builtins.open', new_callable=mock.mock_open) as mock_file: mock_file.side_effect = [mock_stat.return_value, mock_status.return_value] procinfo = process._ProcessMemoryInfoProc() self.assertTrue(procinfo.available) self.assertEqual(procinfo.vsz, 8155136) if sys.version_info >= (3, 4): # Python 3.3 doesn't support mock_open.readlines self.assertEqual(procinfo.data_segment, 2048)
def test_12_add__guess(self): mock_open(read_data='1\n') with patch('sys.stdin', mock_open): self.action_add(namespace=self.local_namespace, repository='git-repo', alone=True, tracking='github', auto_slug=True, remotes={ 'origin': 'https://github.com/foo/bar', 'wootwoot': '[email protected]:w00t/w00t', 'duckling': 'ssh://github.com/duck/duck', } )
def test_file_reader_node(self): """if FileReader are functionnal""" reader = nodes.FileReader(filepath='/filepath', filename='badname') channel = FakeChannel(self.loop) reader.channel = channel msg1 = generate_msg() with mock.patch("builtins.open", mock.mock_open(read_data="data")) as mock_file: result = self.loop.run_until_complete(reader.handle(msg1)) mock_file.assert_called_once_with('/filepath', 'r') self.assertEqual(result.payload, "data", "FileReader not working") reader2 = nodes.FileReader() reader2.channel = channel msg2 = generate_msg() msg2.meta['filepath'] = '/filepath2' msg2.meta['filename'] = '/badpath' with mock.patch("builtins.open", mock.mock_open(read_data="data2")) as mock_file: result = self.loop.run_until_complete(reader2.handle(msg2)) mock_file.assert_called_once_with('/filepath2', 'r') self.assertEqual(result.payload, "data2", "FileReader not working with meta") reader3 = nodes.FileReader(filepath=tstfct, filename='badname') reader3.channel = channel msg3 = generate_msg() msg3.meta['filepath'] = '/badpath' msg3.meta['filename'] = 'badname2' with mock.patch("builtins.open", mock.mock_open(read_data="data")) as mock_file: result = self.loop.run_until_complete(reader3.handle(msg3)) mock_file.assert_called_once_with('/fctpath', 'r') reader4 = nodes.FileReader(filename=tstfct2) reader4.channel = channel msg4 = generate_msg() msg4.meta['filepath'] = '/filepath3/badname' msg4.meta['filename'] = 'badname' with mock.patch("builtins.open", mock.mock_open(read_data="data")) as mock_file: result = self.loop.run_until_complete(reader4.handle(msg4)) mock_file.assert_called_once_with('/filepath3/fctname', 'r')
def test_complete_subjob_parses_payload_and_stores_value_in_atom_objects(self): fake_atom_exit_code = 777 mock_open(mock=self.mock_open, read_data=str(fake_atom_exit_code)) build = self._create_test_build(BuildStatus.BUILDING, num_subjobs=1, num_atoms_per_subjob=1) subjob = build.all_subjobs()[0] build.complete_subjob(subjob.subjob_id(), payload=self._FAKE_PAYLOAD) expected_payload_sys_path = join(Configuration['results_directory'], '1', 'artifact_0_0') self.mock_open.assert_called_once_with( join(expected_payload_sys_path, BuildArtifact.EXIT_CODE_FILE), 'r', ) self.assertEqual(subjob.atoms[0].exit_code, fake_atom_exit_code)
def test_set_glsl_es_version(): """test.glsl_parser_test.GLSLParserTest: sets glsl_es_version""" rt = {'glsl_version': '3.00 es'} with mock.patch.object(glsl.GLSLParserTest, '_GLSLParserTest__parser', mock.Mock(return_value=rt)): with mock.patch.object(glsl.GLSLParserTest, '_GLSLParserTest__get_command', return_value=['foo']): with mock.patch('framework.test.glsl_parser_test.open', mock.mock_open(), create=True): with mock.patch('framework.test.glsl_parser_test.os.stat', mock.mock_open()): test = glsl.GLSLParserTest('foo') nt.eq_(test.glsl_es_version, 3.0)
def test_set_exclude_gl_required(): """test.glsl_parser_test.GLSLParserTest: doesn't add excludes to gl_required""" rt = {'require_extensions': 'GL_ARB_foobar !GL_EXT_foobar'} with mock.patch.object(glsl.GLSLParserTest, '_GLSLParserTest__parser', mock.Mock(return_value=rt)): with mock.patch.object(glsl.GLSLParserTest, '_GLSLParserTest__get_command', return_value=['foo']): with mock.patch('framework.test.glsl_parser_test.open', mock.mock_open(), create=True): with mock.patch('framework.test.glsl_parser_test.os.stat', mock.mock_open()): test = glsl.GLSLParserTest('foo') nt.eq_(test.gl_required, set(['GL_ARB_foobar']))
def test_read_json(self): # Test read_json() function json_text = """{ "a" : 1, "b" : { "c" : 2, "d" : "donut", "e": 42 }, "f" : "fjord" }""" data = read_json("filename", opener=mock_open(read_data=json_text)) self.assertEqual(data["a"], 1) self.assertTrue(isinstance(data["b"], dict)) self.assertEqual(data["b"]["c"], 2) self.assertEqual(data["b"]["d"], "donut") self.assertEqual(data["b"]["e"], 42) self.assertEqual(data["f"], "fjord") # FileNotFound self.assertRaises(FileNotFoundError, read_json, "", opener=open) # Improperly formed JSON (ValueError) json_text = """error""" self.assertRaises(ValueError, read_json, "filename", opener=mock_open(read_data=json_text))
async def test_ip_bans_file_creation(hass, aiohttp_client): """Testing if banned IP file created.""" app = web.Application() app['hass'] = hass async def unauth_handler(request): """Return a mock web response.""" raise HTTPUnauthorized app.router.add_get('/', unauth_handler) setup_bans(hass, app, 1) mock_real_ip(app)("200.201.202.204") with patch('homeassistant.components.http.ban.async_load_ip_bans_config', return_value=mock_coro([IpBan(banned_ip) for banned_ip in BANNED_IPS])): client = await aiohttp_client(app) m = mock_open() with patch('homeassistant.components.http.ban.open', m, create=True): resp = await client.get('/') assert resp.status == 401 assert len(app[KEY_BANNED_IPS]) == len(BANNED_IPS) assert m.call_count == 0 resp = await client.get('/') assert resp.status == 401 assert len(app[KEY_BANNED_IPS]) == len(BANNED_IPS) + 1 m.assert_called_once_with(hass.config.path(IP_BANS_FILE), 'a') resp = await client.get('/') assert resp.status == 403 assert m.call_count == 1
def test_block_file_handle(self): mock_lines = [ 'multiplier_block MultiplierBlock', 'lowercase_converter_block LowerCaseConverterBlock', 'uppercase_converter_block UpperCaseConverterBlock', 'char_blocker CharBlock z'] expected_blocks = [ MultiplierBlock(), LowerCaseConverterBlock(), UpperCaseConverterBlock(), CharBlock('z')] test_chars = ['a', 'b', 'y', 'Y', 'z', 'Z', '#'] mock_file = mock_open(read_data = '\n'.join(mock_lines)) with patch('builtins.open', mock_file): created_blocks = block_file_handle('mock.txt') block_behavior_same = lambda expected, created: ( False not in map(lambda char: expected.process(char) == created.process(char), test_chars)) self.assertTrue(False not in map(block_behavior_same, expected_blocks, created_blocks))
def test_config_google_home_entity_id_to_number(): """Test config adheres to the type.""" conf = Config(Mock(), { 'type': 'google_home' }) mop = mock_open(read_data=json.dumps({'1': 'light.test2'})) handle = mop() with patch('homeassistant.components.emulated_hue.open', mop, create=True): number = conf.entity_id_to_number('light.test') assert number == '2' assert handle.write.call_count == 1 assert json.loads(handle.write.mock_calls[0][1][0]) == { '1': 'light.test2', '2': 'light.test', } number = conf.entity_id_to_number('light.test') assert number == '2' assert handle.write.call_count == 1 number = conf.entity_id_to_number('light.test2') assert number == '1' assert handle.write.call_count == 1 entity_id = conf.number_to_entity_id('1') assert entity_id == 'light.test2'
def test_valid_degree_unit(): """test valid degree unit.""" question = '(c)elsius/(f)ahrenheit): ' data = ( ('', 'celsius'), # lowercase ('c', 'celsius'), ('celsius', 'celsius'), ('f', 'fahrenheit'), ('fahrenheit', 'fahrenheit'), # uppercase ('C', 'celsius'), ('CELSIUS', 'celsius'), ('F', 'fahrenheit'), ('FAHRENHEIT', 'fahrenheit'), ) for degree, expected_value in data: m = mock.mock_open() with mock.patch('melissa.profile_populator.raw_input') as mock_input, \ mock.patch('melissa.profile_populator.open', m, create=True), \ mock.patch('melissa.profile_populator.tts_local'), \ mock.patch('melissa.profile_populator.json') as mock_json: side_effect = InputSideEffect(question, degree) mock_input.side_effect = side_effect.func profile_populator() result_json = mock_json.dump.call_args[0][0] assert result_json['degrees'] == expected_value
def test_load_profile(skip_message, isfile_retval): """test load_profile func.""" m_open = mock.mock_open() m_open_path = 'melissa.profile_loader.open' with mock.patch('melissa.profile_loader.profile_populator') as m_pp, \ mock.patch('melissa.profile_loader.os') as m_os, \ mock.patch('sys.stdout', new_callable=StringIO) as m_stdout, \ mock.patch(m_open_path, m_open, create=True), \ mock.patch('melissa.profile_loader.json') as m_json: m_os.path.isfile.return_value = isfile_retval from melissa.profile_loader import load_profile from melissa.utilities import json_decode as jd res = load_profile(skip_message=skip_message) # testing assert res == m_json.load.return_value m_os.path.isfile.assert_called_once_with('profile.json') if isfile_retval: m_pp.assert_not_called() else: m_pp.assert_called_once_with() if skip_message: assert m_stdout.getvalue() == '' else: assert "Loading profile data" in m_stdout.getvalue() m_open.assert_has_calls( [mock.call('profile.json'), mock.call().close()]) m_json.load.assert_called_once_with( m_open.return_value, object_hook=jd.decode_dict)
def test_initialize(self, open, subprocess_call, isfile, which, _get_input, get_yes_no_input) -> None: get_yes_no_input.return_value = True arguments = mock_arguments() # pyre.py does not provide a Configuration instance to # Initialize - this test should do the same configuration = None def exists(path): if path.endswith(".watchmanconfig"): return False elif path.endswith(".pyre_configuration"): return False elif path.endswith(".pyre_configuration.local"): return False else: return True isfile.side_effect = exists # One for shutil.which("watchman"), another for shutil.which(BINARY_NAME). which.side_effect = [True, True] with patch.object(commands.Command, "_call_client"): initialize.Initialize(arguments, configuration, AnalysisDirectory(".")).run() subprocess_call.assert_has_calls( [call(["watchman", "watch-project", "."])]) open.assert_any_call(os.path.abspath(".watchmanconfig"), "w+") arguments.local = True def exists(path): return False isfile.side_effect = exists file = mock_open() with patch("builtins.open", file), patch.object( commands.Command, "_call_client"), patch.object(initialize.Initialize, "_get_local_configuration", return_value={}): initialize.Initialize(arguments, configuration, AnalysisDirectory(".")).run() file().write.assert_has_calls([call("{}"), call("\n")]) def exists(path): if path.endswith(".pyre_configuration"): return True return False isfile.side_effect = exists with patch.object(commands.Command, "_call_client"): with self.assertRaises(EnvironmentException): initialize.Initialize(arguments, configuration, AnalysisDirectory(".")).run() with patch.object(commands.Command, "_call_client"), patch.object( sys, "argv", ["/tmp/pyre/bin/pyre"]): which.reset_mock() which.side_effect = [True, None, "/tmp/pyre/bin/pyre.bin"] initialize.Initialize(arguments, configuration, AnalysisDirectory("."))._get_configuration() which.assert_has_calls([ call("watchman"), call("pyre.bin"), call("/tmp/pyre/bin/pyre.bin") ])
def test_metadata_tags_tagstring(self): pp = self._create({"mode": "tags"}, {"tag_string": "foo, bar, baz"}) with patch("builtins.open", mock_open()) as m: pp.prepare(self.pathfmt) pp.run(self.pathfmt) self.assertEqual(self._output(m), "foo\nbar\nbaz\n")
def test_get_insert_statements_incorrect_not_enough_columns(self): with self.assertRaises(TypeError): with mock.patch( 'builtins.open', mock.mock_open(read_data="a,b\nThe Strokes,8/1/2019")): print(DBLoader("abc").get_insert_statements())
# limitations under the License. # # ------------------------------------------------------------------------------ """Test module for Registry publish methods.""" from pathlib import Path from unittest import TestCase, mock from unittest.mock import mock_open from aea.cli.registry.publish import _compress, publish_agent from aea.test_tools.test_cases import AEATestCase from tests.conftest import CUR_PATH from tests.test_cli.tools_for_testing import ContextMock @mock.patch("builtins.open", mock_open(read_data="test")) @mock.patch("aea.cli.registry.publish.shutil.copy") @mock.patch("aea.cli.registry.publish.try_to_load_agent_config") @mock.patch("aea.cli.registry.publish.check_is_author_logged_in") @mock.patch("aea.cli.registry.utils._rm_tarfiles") @mock.patch("aea.cli.registry.publish.os.getcwd", return_value="cwd") @mock.patch("aea.cli.registry.publish._compress") @mock.patch("aea.cli.registry.publish.request_api", return_value={"public_id": "public-id"}) class PublishAgentTestCase(TestCase): """Test case for publish_agent method.""" @mock.patch("aea.cli.registry.publish.is_readme_present", return_value=True) def test_publish_agent_positive(self, is_readme_present_mock, request_api_mock, *mocks): """Test for publish_agent positive result."""
def test_read_repeated_property(repeats, value): inp_str = f"PROP\n {repeats}*{value} /\n" with patch("builtins.open", mock_open(read_data=inp_str)) as mock_file: with open_grdecl(mock_file, keywords=["PROP"]) as kw: assert list(kw) == [("PROP", [str(value)] * repeats)]
from homeassistant.components.notify import apns from homeassistant.core import State from tests.common import assert_setup_component, get_test_home_assistant CONFIG = { notify.DOMAIN: { 'platform': 'apns', 'name': 'test_app', 'topic': 'testapp.appname', 'cert_file': 'test_app.pem' } } @patch('homeassistant.components.notify.apns.open', mock_open(), create=True) class TestApns(unittest.TestCase): """Test the APNS component.""" def setUp(self): # pylint: disable=invalid-name """Set up things to be run when tests are started.""" self.hass = get_test_home_assistant() def tearDown(self): # pylint: disable=invalid-name """Stop everything that was started.""" self.hass.stop() @patch('os.path.isfile', Mock(return_value=True)) @patch('os.access', Mock(return_value=True)) def _setup_notify(self): assert isinstance(apns.load_yaml_config_file, Mock), \ 'Found unmocked load_yaml'
def setUp(self): with patch('builtins.open', mock_open(read_data='1\n1\n')) as mocked: self.resultado = ler_arquivo(mocked)
def test_read_simple_property(file_data): with patch("builtins.open", mock_open(read_data=file_data)) as mock_file: with open_grdecl(mock_file, keywords=["PROP"]) as kw: assert list(kw) == [("PROP", ["1", "2", "3", "4"])]
class FTPTestCase(unittest.TestCase): def test_it_can_connect(self): service = ftp.FTPFeedingService() if 'FTP_URL' not in os.environ: return config = service.config_from_url(os.environ['FTP_URL']) self.assertEqual('test', config['path']) self.assertEqual('localhost', config['host']) config['dest_path'] = tempfile.mkdtemp(prefix=PREFIX) provider = {'config': config} items = service._update(provider, {}) self.assertEqual(266, len(items)) provider['last_updated'] = utcnow() self.assertEqual(0, len(service._update(provider, {}))) self.assertTrue(os.path.isdir(provider['config']['dest_path'])) self.assertEqual(266, len(os.listdir(provider['config']['dest_path']))) def tearDown(self): for folder in glob.glob('/tmp/%s*' % (PREFIX)): shutil.rmtree(folder) @mock.patch.object(ftp, 'ftp_connect', new_callable=FakeFTP) @mock.patch.object(ftp.FTPFeedingService, 'get_feed_parser', mock.Mock()) @mock.patch('builtins.open', mock.mock_open()) def test_move_ingested(self, ftp_connect): """Check that ingested file is moved if "move" is set feature requested in SDESK-468 """ provider = PROVIDER.copy() service = ftp.FTPFeedingService() service._update(provider, {}) mock_ftp = ftp_connect.return_value.__enter__.return_value mock_ftp.rename.assert_called_once_with('filename.xml', 'dest_move/filename.xml') @mock.patch.object(ftp, 'ftp_connect', new_callable=FakeFTP) @mock.patch.object(ftp.FTPFeedingService, 'get_feed_parser', mock.Mock()) @mock.patch('builtins.open', mock.mock_open()) def test_move_ingested_default(self, ftp_connect): """Check that ingested file is moved to default path if "move" is empty string feature requested in SDESK-1452 """ provider = PROVIDER.copy() provider['config']['move_path'] = "" service = ftp.FTPFeedingService() service._update(provider, {}) mock_ftp = ftp_connect.return_value.__enter__.return_value dest_path = os.path.join(ftp.DEFAULT_SUCCESS_PATH, "filename.xml") mock_ftp.rename.assert_called_once_with('filename.xml', dest_path) @mock.patch.object(ftp, 'ftp_connect', new_callable=FakeFTP) @mock.patch.object(ftp.FTPFeedingService, 'get_feed_parser', mock.Mock()) @mock.patch('builtins.open', mock.mock_open()) def test_move_ingested_no_move(self, ftp_connect): """Check that ingested file is not moved if "move" is not set feature requested in SDESK-468 """ provider = PROVIDER.copy() provider['config']['move'] = False service = ftp.FTPFeedingService() service._update(provider, {}) mock_ftp = ftp_connect.return_value.__enter__.return_value mock_ftp.rename.assert_not_called() @mock.patch.object(ftp, 'ftp_connect', new_callable=FakeFTP) @mock.patch.object(ftp.FTPFeedingService, 'get_feed_parser', _exception) @mock.patch('builtins.open', mock.mock_open()) def test_move_error(self, ftp_connect): """Check that error on ingestion moves item if "move_path_error" is set feature requested in SDESK-1452 """ provider = PROVIDER.copy() service = ftp.FTPFeedingService() service._update(provider, {}) mock_ftp = ftp_connect.return_value.__enter__.return_value mock_ftp.rename.assert_called_once_with('filename.xml', 'error/filename.xml') @mock.patch.object(ftp, 'ftp_connect', new_callable=FakeFTP) @mock.patch.object(ftp.FTPFeedingService, 'get_feed_parser', _exception) @mock.patch('builtins.open', mock.mock_open()) def test_move_error_default(self, ftp_connect): """Check that error on ingestion use default path if "move_path_error" is empty string feature requested in SDESK-1452 """ provider = PROVIDER.copy() provider['config']['move_path_error'] = "" service = ftp.FTPFeedingService() service._update(provider, {}) mock_ftp = ftp_connect.return_value.__enter__.return_value dest_path = os.path.join(ftp.DEFAULT_FAILURE_PATH, "filename.xml") mock_ftp.rename.assert_called_once_with('filename.xml', dest_path)
def test_returns_file_contents(self): with mock.patch('codecs.open', mock.mock_open(read_data='content')): result = cnfg.contents_of('/a/path') self.assertEqual(result, 'content')
async def test_parse_overlapping_homekit_json(hass): """Test migrating .homekit/pairings.json files when hk- exists too.""" accessory = Accessory.create_with_info( "TestDevice", "example.com", "Test", "0001", "0.1" ) service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) on_char.value = 0 accessories = Accessories() accessories.add_accessory(accessory) fake_controller = await setup_platform(hass) pairing = await fake_controller.add_paired_device(accessories) pairing.pairing_data = {"AccessoryPairingID": "00:00:00:00:00:00"} mock_listdir = mock.Mock() mock_listdir.return_value = ["hk-00:00:00:00:00:00", "pairings.json"] mock_path = mock.Mock() mock_path.exists.side_effect = [True, True] # First file to get loaded is .homekit/pairing.json read_data_1 = {"00:00:00:00:00:00": {"AccessoryPairingID": "00:00:00:00:00:00"}} mock_open_1 = mock.mock_open(read_data=json.dumps(read_data_1)) # Second file to get loaded is .homekit/hk-00:00:00:00:00:00 read_data_2 = {"AccessoryPairingID": "00:00:00:00:00:00"} mock_open_2 = mock.mock_open(read_data=json.dumps(read_data_2)) side_effects = [mock_open_1.return_value, mock_open_2.return_value] discovery_info = { "name": "TestDevice", "host": "127.0.0.1", "port": 8080, "properties": {"md": "TestDevice", "id": "00:00:00:00:00:00", "c#": 1, "sf": 0}, } flow = _setup_flow_handler(hass) pairing_cls_imp = ( "homeassistant.components.homekit_controller.config_flow.IpPairing" ) with mock.patch(pairing_cls_imp) as pairing_cls: pairing_cls.return_value = pairing with mock.patch("builtins.open", side_effect=side_effects): with mock.patch("os.path", mock_path): with mock.patch("os.listdir", mock_listdir): result = await flow.async_step_zeroconf(discovery_info) await hass.async_block_till_done() assert result["type"] == "create_entry" assert result["title"] == "TestDevice" assert result["data"]["AccessoryPairingID"] == "00:00:00:00:00:00" assert flow.context == { "hkid": "00:00:00:00:00:00", "title_placeholders": {"name": "TestDevice"}, "unique_id": "00:00:00:00:00:00", }
def test_get_attribute_from_file(self, mock_find_pattern): mock_find_pattern.return_value = 'found_attr' with mock.patch('shaptools.netweaver.open', mock.mock_open(read_data='filecontent')) as mock_open: attr = netweaver.NetweaverInstance.get_attribute_from_file('file', 'attr') mock_find_pattern.assert_called_once_with('attr', 'filecontent') self.assertEqual('found_attr', attr)
class TestTemperatureReadingManager(TestCase): """ Unit Tests for the TemperatureReadingManager Class """ TEST_TEMP_READINGS = [[ "2018-09-23 19:56:01.345", "1", "ABC Sensor Temp M301A", "20.152", "21.367", "22.005", "OK" ], [ "2018-09-23 20:00:01.453", "2", "ABC Sensor Temp M301A", "100.000", "100.000", "100.000", "HIGH_TEMP" ], [ "2018-09-23 20:04:02.001", "3", "ABC Sensor Temp M301A", "-50.000", "-50.000", "-50.000", "LOW_TEMP" ]] def logPoint(self): """ Display test specific information """ currentTest = self.id().split('.')[-1] callingFunction = inspect.stack()[1][3] print('in %s - %s()' % (currentTest, callingFunction)) @patch('builtins.open', mock_open(read_data='1')) def setUp(self, test_readings=TEST_TEMP_READINGS): """ Creates fixtures before each test """ self.logPoint() # This mocks the csv reader to return our test readings csv.reader = MagicMock(return_value=test_readings) self.reading_manager = TemperatureReadingManager( "temp_testresults.csv") reading_datetime = datetime.datetime.strptime( "2018-09-23 19:56:01.345", "%Y-%m-%d %H:%M:%S.%f") self.reading = TemperatureReading(reading_datetime, 1, "ABC Sensor Temp M301A", 20.152, 21.367, 22.005, "OK") self.reading_update = TemperatureReading(reading_datetime, 1, "ABC Sensor Temp M301A", 20.152, 21.367, 22.005, "UPDATED") def tearDown(self): """ Create a test fixture after each test method is run """ try: os.remove("temp_testresults.csv") except: pass self.logPoint() def test_constructor_fail(self): """ 010A - Raises ValueError when filepath is empty """ with self.assertRaises(ValueError): self.temp_sensor = TemperatureReadingManager("") self.temp_sensor = TemperatureReadingManager(None) def test_constructor_success(self): """ 010B - Creates a TemperatureReadingManager instance """ self.assertIsInstance( self.reading_manager, TemperatureReadingManager, "Must create a temperature reading manager with valid attributes") def test_add_reading_list_success(self): """ 020A - Adds a TemperatureReading to a list of readings """ self.setUp([]) intial_len = len(self.reading_manager.get_all_readings()) self.reading_manager.add_reading(self.reading) self.assertEqual(len(self.reading_manager.get_all_readings()), intial_len + 1, "Must add reading to the list of readings") def test_add_reading_file_success(self): """ 020B - Writes a TemperatureReading to a csv file """ open("temp_testresults.csv", 'w').close() self.setUp([]) with open("temp_testresults.csv") as f: initial_num_rows = sum(1 for line in f) self.reading_manager.add_reading(self.reading) with open("temp_testresults.csv") as f: final_num_rows = sum(1 for line in f) self.assertEqual(initial_num_rows + 1, final_num_rows, "Must write reading to the csv file") def test_add_reading_seq_num_success(self): """ 020C - Creates seq num for new readings """ self.setUp([]) self.reading_manager.add_reading(self.reading) first_add_seq = self.reading_manager.get_all_readings( )[0].get_sequence_num() self.reading_manager.add_reading(self.reading) second_add_seq = self.reading_manager.get_all_readings( )[0].get_sequence_num() self.assertEqual(first_add_seq, second_add_seq - 1, "Must create seq num for new readings") def test_add_reading_fail(self): """ 020D - Returns None if input is invalid """ self.setUp([]) self.assertEqual(self.reading_manager.add_reading(None), None, "Must return none for invalid input") def test_update_reading_list_success(self): """ 030A - Updates TemperatureReading in a list of readings """ self.reading_manager.update_reading(self.reading_update) self.assertEqual( self.reading_manager.update_reading(self.reading_update), 1, "Must update reading in the list of readings") def test_update_reading_invalid_input_fail(self): """ 030B - Returns None if the input is invalid """ self.assertEqual(self.reading_manager.update_reading(None), None, "Must return None if the input is invalid") def test_update_reading_not_in_list_fail(self): """ 030C - Doesn't update reading if not in the list """ reading_datetime = datetime.datetime.strptime( "2018-09-23 19:56:01.345", "%Y-%m-%d %H:%M:%S.%f") reading_update_fail = TemperatureReading(reading_datetime, 0, "ABC Sensor Temp M301A", 20.152, 21.367, 22.005, "UPDATED") self.assertEqual( self.reading_manager.update_reading(reading_update_fail), 0, "Must return 0 if seq num is not in the list") def test_delete_reading_list_success(self): """ 040A - Deletes a TemperatureReading from a list of readings """ intial_len = len(self.reading_manager.get_all_readings()) self.reading_manager.delete_reading(1) self.assertEqual(len(self.reading_manager.get_all_readings()), intial_len - 1, "Must delete reading from the list of readings") def test_delete_reading_file_success(self): """ 040B - Deletes a TemperatureReading from a csv file """ initial_num_rows = len(self.reading_manager.get_all_readings()) self.reading_manager.delete_reading(1) with open("temp_testresults.csv") as f: final_num_rows = sum(1 for line in f) self.assertEqual(initial_num_rows - 1, final_num_rows, "Must delete reading from the csv file") def test_delete_reading_invalid_input_fail(self): """ 040C - Raises ValueError when input is invalid """ with self.assertRaises(ValueError): self.reading_manager.delete_reading(None) def test_delete_reading_not_in_list_fail(self): """ 040D - Fails to delete reading if not in the list """ initial_num_rows = len(self.reading_manager.get_all_readings()) self.reading_manager.delete_reading(0) with open("temp_testresults.csv") as f: final_num_rows = sum(1 for line in f) self.assertEqual(initial_num_rows, final_num_rows, "Must not delete reading from the csv file") def test_get_reading_success(self): """ 050A - Gets a TemperatureReading from a list of readings """ test_reading = self.reading_manager.get_reading(1) self.assertEqual(self.reading_manager.get_all_readings()[0], test_reading, "Must get reading from the list of readings") def test_get_reading_invalid_input_fail(self): """ 050B - Fails to get a TemperatureReading from a list of readings """ with self.assertRaises(ValueError): self.reading_manager.get_reading(None) def test_get_reading_not_in_list_fail(self): """ 050C - Fails to get a TemperatureReading from a list of readings """ self.assertEqual(self.reading_manager.get_reading(0), None, "Must return None if seq num is not in the list") def test_get_all_readings_success(self): """ 060A - Gets a TemperatureReading from a list of readings """ test_readings = self.reading_manager.get_all_readings() self.assertEqual(test_readings, self.reading_manager.get_all_readings(), "Must get reading from the list of readings") def test_get_all_readings_empty_success(self): """ 060B - Returns empty list if the file is empty """ self.setUp([]) test_readings = self.reading_manager.get_all_readings() self.assertEqual(test_readings, [], "Must return empty list if the file is empty")
) self.assertEqual(result.exit_code, 0) result = self.runner.invoke( cli, [*CLI_LOG_OPTION, "fingerprint", "skill", public_id], standalone_mode=False, ) self.assertEqual(result.exit_code, 0) def _raise_exception(*args, **kwargs): raise Exception() @mock.patch("aea.cli.fingerprint.Path.open", mock.mock_open()) class FingerprintItemTestCase(TestCase): """Test case for fingerprint_item method.""" @mock.patch("aea.cli.fingerprint.Path.exists", return_value=False) @mock.patch( "aea.cli.fingerprint.ConfigLoader.from_configuration_type", return_value=ConfigLoaderMock(), ) def test_fingerprint_item_package_not_found(self, *mocks): """Test for fingerprint_item package not found result.""" public_id = PublicIdMock() with self.assertRaises(ClickException) as cm: fingerprint_item(ContextMock(), "skill", public_id) self.assertIn("Package not found at path", cm.exception.message) @mock.patch("aea.cli.fingerprint.ConfigLoader.from_configuration_type",
def test_read_prop_raises_error_when_no_forwardslash(undelimited_file_data): with patch("builtins.open", mock_open(read_data=undelimited_file_data)) as mock_file: with open_grdecl(mock_file, keywords=["PROP"]) as kw: with pytest.raises(ValueError): list(kw)
def test_client_token_priotizes_config_over_file(monkeypatch): monkeypatch.setattr("os.path.exists", MagicMock(return_value=True)) monkeypatch.setattr("builtins.open", mock_open(read_data="file-token")) with set_temporary_config({"cloud.auth_token": "config-token"}): client = Client() assert client.token == "config-token"
def rss_feed_content(blog_url, config, default_locale): default_post = { "title": "post title", "slug": "awesome_article", "date": "2012-10-01 22:41", "author": None, "tags": "tags", "link": "link", "description": "description", "enclosure": "http://www.example.org/foo.mp3", "enclosure_length": "5", } meta_mock = mock.Mock(return_value=(defaultdict(str, default_post), None)) with mock.patch("nikola.post.get_meta", meta_mock): with \ mock.patch( "nikola.nikola.utils.os.path.isdir", mock.Mock(return_value=True)), \ mock.patch( "nikola.nikola.Post.text", mock.Mock(return_value="some long text") ): with mock.patch("nikola.post.os.path.isfile", mock.Mock(return_value=True)): example_post = Post( "source.file", config, "blog_folder", True, {"en": ""}, "post.tmpl", FakeCompiler(), ) filename = "testfeed.rss" opener_mock = mock.mock_open() with mock.patch("nikola.nikola.io.open", opener_mock, create=True): Nikola().generic_rss_renderer( default_locale, "blog_title", blog_url, "blog_description", [ example_post, ], filename, True, False, ) opener_mock.assert_called_once_with(filename, "w+", encoding="utf-8") # Python 3 / unicode strings workaround # lxml will complain if the encoding is specified in the # xml when running with unicode strings. # We do not include this in our content. file_content = [ call[1][0] for call in opener_mock.mock_calls[2:-1] ][0] splitted_content = file_content.split("\n") # encoding_declaration = splitted_content[0] content_without_encoding_declaration = splitted_content[1:] yield "\n".join(content_without_encoding_declaration)
def test_open_patch_success(self): with patch("builtins.open", mock_open(read_data="data"), create=True) as m: self.assertEqual(main.open_file(path_to_open).read(), "data") m.assert_called_with(path_to_open, "r")
async def test_camera_content_type(opp, opp_client): """Test local_file camera content_type.""" cam_config_jpg = { "name": "test_jpg", "platform": "local_file", "file_path": "/path/to/image.jpg", } cam_config_png = { "name": "test_png", "platform": "local_file", "file_path": "/path/to/image.png", } cam_config_svg = { "name": "test_svg", "platform": "local_file", "file_path": "/path/to/image.svg", } cam_config_noext = { "name": "test_no_ext", "platform": "local_file", "file_path": "/path/to/image", } await async_setup_component( opp, "camera", { "camera": [cam_config_jpg, cam_config_png, cam_config_svg, cam_config_noext] }, ) await opp.async_block_till_done() client = await opp_client() image = "hello" m_open = mock.mock_open(read_data=image.encode()) with mock.patch("openpeerpower.components.local_file.camera.open", m_open, create=True): resp_1 = await client.get("/api/camera_proxy/camera.test_jpg") resp_2 = await client.get("/api/camera_proxy/camera.test_png") resp_3 = await client.get("/api/camera_proxy/camera.test_svg") resp_4 = await client.get("/api/camera_proxy/camera.test_no_ext") assert resp_1.status == 200 assert resp_1.content_type == "image/jpeg" body = await resp_1.text() assert body == image assert resp_2.status == 200 assert resp_2.content_type == "image/png" body = await resp_2.text() assert body == image assert resp_3.status == 200 assert resp_3.content_type == "image/svg+xml" body = await resp_3.text() assert body == image # default mime type assert resp_4.status == 200 assert resp_4.content_type == "image/jpeg" body = await resp_4.text() assert body == image
def test_client_token_initializes_from_file(monkeypatch): monkeypatch.setattr("os.path.exists", MagicMock(return_value=True)) monkeypatch.setattr("builtins.open", mock_open(read_data="TOKEN")) with set_temporary_config({"cloud.auth_token": None}): client = Client() assert client.token == "TOKEN"
def test_file_config(): from bigchaindb.config_utils import file_config, CONFIG_DEFAULT_PATH with patch('builtins.open', mock_open(read_data='{}')) as m: config = file_config() m.assert_called_once_with(CONFIG_DEFAULT_PATH) assert config == {}
def test_read_string(): inp_str = "PROP\n 'FOO BAR' FOO /\n" with patch("builtins.open", mock_open(read_data=inp_str)) as mock_file: with open_grdecl(mock_file, keywords=["PROP"]) as kw: assert list(kw) == [("PROP", ["FOO BAR", "FOO"])]
from unittest.mock import mock_open, patch from pyramid.scripts.common import get_config_loader from c2cwsgiutils import get_config_defaults @patch( "paste.deploy.loadwsgi.open", mock_open( read_data=""" [app:main] variable = %(VARIABLE)s """ ), ) @patch.dict("c2cwsgiutils.os.environ", {"VARIABLE": "value"}) def test_loader_success() -> None: loader = get_config_loader("c2c:///app/production.ini") assert 'c2cwsgiutils.loader.Loader(uri="c2c:///app/production.ini")' == repr(loader) assert "value" == loader._get_defaults()["VARIABLE"] # pylint: disable=W0212 assert "value" == loader.get_settings("app:main")["variable"]
def test_invalid_file_config(): from bigchaindb.config_utils import file_config from bigchaindb.common import exceptions with patch('builtins.open', mock_open(read_data='{_INVALID_JSON_}')): with pytest.raises(exceptions.ConfigurationError): file_config()
def test_with_failures(self): config_data = 'SBD_DEVICE="/dev1;/dev2;/dev3"\n' self.config.services.is_enabled("sbd") self.config.fs.exists(settings.sbd_config, return_value=True) self.config.fs.open(settings.sbd_config, mock.mock_open(read_data=config_data)()) self.config.runner.sbd.get_device_info("/dev1", stdout="1", return_code=1) self.config.runner.sbd.get_device_info("/dev2", stdout="2", name="list2") self.config.runner.sbd.get_device_dump("/dev2", stdout="4", return_code=1) self.config.runner.sbd.get_device_info("/dev3", stdout="5", name="list3") self.config.runner.sbd.get_device_dump("/dev3", stdout="6", name="dump2") expected_output = [ { "device": "/dev1", "list": None, "dump": None, }, { "device": "/dev2", "list": "2", "dump": None, }, { "device": "/dev3", "list": "5", "dump": "6", }, ] self.assertEqual( expected_output, cmd_sbd.get_local_devices_info(self.env_assist.get_env(), dump=True), ) self.env_assist.assert_reports([ ( Severities.WARNING, reports.codes.SBD_DEVICE_LIST_ERROR, { "device": "/dev1", "reason": "1" }, ), ( Severities.WARNING, reports.codes.SBD_DEVICE_DUMP_ERROR, { "device": "/dev2", "reason": "4" }, ), ])
@mock.patch("aea.cli.common.os.path.exists", return_value=False) @mock.patch("aea.cli.common.os.makedirs") class InitConfigFolderTestCase(TestCase): """Test case for _init_cli_config method.""" def test_init_cli_config_positive(self, makedirs_mock, exists_mock, dirname_mock): """Test for _init_cli_config method positive result.""" _init_cli_config() dirname_mock.assert_called_once() exists_mock.assert_called_once_with("dir-name") makedirs_mock.assert_called_once_with("dir-name") @mock.patch("aea.cli.common._get_or_create_cli_config") @mock.patch("aea.cli.common.yaml.dump") @mock.patch("builtins.open", mock.mock_open()) class UpdateCLIConfigTestCase(TestCase): """Test case for _update_cli_config method.""" def test_update_cli_config_positive(self, dump_mock, icf_mock): """Test for _update_cli_config method positive result.""" _update_cli_config({"some": "config"}) icf_mock.assert_called_once() dump_mock.assert_called_once() def _raise_yamlerror(*args): raise YAMLError() def _raise_file_not_found_error(*args): raise FileNotFoundError()
def test_open_maybe_zipped_normal_file_with_zip_in_name(self): path = '/path/to/fakearchive.zip.other/file.txt' with mock.patch('builtins.open', mock.mock_open(read_data="data")) as mock_file: open_maybe_zipped(path) mock_file.assert_called_once_with(path, mode='r')
def test_ip_glob_format(self): targets = [ "192.168.10.0", "192.168.10.1", "192.168.10.2", "192.168.10.3", "192.168.10.6", "192.168.10.7", "192.168.10.8", "192.168.10.9", "192.168.10.10", "192.168.10.11", "192.168.10.12", "192.168.10.14", "192.168.10.15", "172.16.0.0/16", "10.16.0.0", "10.16.0.1", "10.16.0.2", "10.16.0.3", "10.16.0.4", "10.16.0.5", "10.16.0.6", "10.16.0.7", "10.16.0.8", "10.16.0.9", "10.16.0.10", "10.16.0.11", "10.16.0.12", "10.16.0.13", "10.16.0.14", "10.16.0.15", "10.16.0.16", "10.16.0.17", "10.16.0.18", "10.16.0.19", "10.16.0.20", "10.16.0.21", "10.16.0.22", "10.16.0.23", "10.16.0.24", "10.16.0.25", "10.16.0.26", "10.16.0.27", "10.16.0.28", "10.16.0.29", "10.16.0.30", "10.16.0.31", "10.16.0.32", "10.16.0.33", "10.16.0.34", "10.16.0.35", "10.16.0.36", "10.16.0.37", "10.16.0.38", "10.16.0.39", "10.16.0.40", "10.16.0.41", "10.16.0.42", "10.16.0.43", "10.16.0.44", "10.16.0.45", "10.16.0.46", "10.16.0.47", "10.16.0.48", "10.16.0.49", "10.16.0.50", "10.16.0.51", "10.16.0.52", "10.16.0.53", "10.16.0.54", "10.16.0.55", "10.16.0.56", "10.16.0.57", "10.16.0.58", "10.16.0.59", "10.16.0.60", "10.16.0.61", "10.16.0.62", "10.16.0.63", "10.16.0.64", "10.16.0.65", "10.16.0.66", "10.16.0.67", "10.16.0.68", "10.16.0.69", "10.16.0.70", "10.16.0.71", "10.16.0.72", "10.16.0.73", "10.16.0.74", "10.16.0.75", "10.16.0.76", "10.16.0.77", "10.16.0.78", "10.16.0.79", "10.16.0.80", "10.16.0.81", "10.16.0.82", "10.16.0.83", "10.16.0.84", "10.16.0.85", "10.16.0.86", "10.16.0.87", "10.16.0.88", "10.16.0.89", "10.16.0.90", "10.16.0.91", "10.16.0.92", "10.16.0.93", "10.16.0.94", "10.16.0.95", "10.16.0.96", "10.16.0.97", "10.16.0.98", "10.16.0.99", "10.16.0.100", "10.16.0.101", "10.16.0.102", "10.16.0.103", "10.16.0.104", "10.16.0.105", "10.16.0.106", "10.16.0.107", "10.16.0.108", "10.16.0.109", "10.16.0.110", "10.16.0.111", "10.16.0.112", "10.16.0.113", "10.16.0.114", "10.16.0.115", "10.16.0.116", "10.16.0.117", "10.16.0.118", "10.16.0.119", "10.16.0.120", "10.16.0.121", "10.16.0.122", "10.16.0.123", "10.16.0.124", "10.16.0.125", "10.16.0.126", "10.16.0.127", "10.16.0.128", "10.16.0.129", "10.16.0.130", "10.16.0.131", "10.16.0.132", "10.16.0.133", "10.16.0.134", "10.16.0.135", "10.16.0.136", "10.16.0.137", "10.16.0.138", "10.16.0.139", "10.16.0.140", "10.16.0.141", "10.16.0.142", "10.16.0.143", "10.16.0.144", "10.16.0.145", "10.16.0.146", "10.16.0.147", "10.16.0.148", "10.16.0.149", "10.16.0.150", "10.16.0.151", "10.16.0.152", "10.16.0.153", "10.16.0.154", "10.16.0.155", "10.16.0.156", "10.16.0.157", "10.16.0.158", "10.16.0.159", "10.16.0.160", "10.16.0.161", "10.16.0.162", "10.16.0.163", "10.16.0.164", "10.16.0.165", "10.16.0.166", "10.16.0.167", "10.16.0.168", "10.16.0.169", "10.16.0.170", "10.16.0.171", "10.16.0.172", "10.16.0.173", "10.16.0.174", "10.16.0.175", "10.16.0.176", "10.16.0.177", "10.16.0.178", "10.16.0.179", "10.16.0.180", "10.16.0.181", "10.16.0.182", "10.16.0.183", "10.16.0.184", "10.16.0.185", "10.16.0.186", "10.16.0.187", "10.16.0.188", "10.16.0.189", "10.16.0.190", "10.16.0.191", "10.16.0.192", "10.16.0.193", "10.16.0.194", "10.16.0.195", "10.16.0.196", "10.16.0.197", "10.16.0.198", "10.16.0.199", "10.16.0.200", "10.16.0.201", "10.16.0.202", "10.16.0.203", "10.16.0.204", "10.16.0.205", "10.16.0.206", "10.16.0.207", "10.16.0.208", "10.16.0.209", "10.16.0.210", "10.16.0.211", "10.16.0.212", "10.16.0.213", "10.16.0.214", "10.16.0.215", "10.16.0.216", "10.16.0.217", "10.16.0.218", "10.16.0.219", "10.16.0.220", "10.16.0.221", "10.16.0.222", "10.16.0.223", "10.16.0.224", "10.16.0.225", "10.16.0.226", "10.16.0.227", "10.16.0.228", "10.16.0.229", "10.16.0.230", "10.16.0.231", "10.16.0.232", "10.16.0.233", "10.16.0.234", "10.16.0.235", "10.16.0.236", "10.16.0.237", "10.16.0.238", "10.16.0.239", "10.16.0.240", "10.16.0.241", "10.16.0.242", "10.16.0.243", "10.16.0.244", "10.16.0.245", "10.16.0.246", "10.16.0.247", "10.16.0.248", "10.16.0.249", "10.16.0.250", "10.16.0.251", "10.16.0.252", "10.16.0.253", "10.16.0.254", "10.16.0.255", "10.15.0.0", "10.15.0.1", "10.15.0.2", "10.15.0.3", "10.15.0.4", "10.15.0.5", "10.15.0.6", "10.15.0.7", "10.15.0.8", "10.15.0.9", "10.15.0.10", "10.15.0.11", "10.15.0.12", "10.15.0.13", "10.15.0.14", "10.15.0.15", "10.15.0.16", "10.15.0.17", "10.15.0.18", "10.15.0.19", "10.15.0.20", "10.15.0.21", "10.15.0.22", "10.15.0.23", "10.15.0.24", "10.15.0.25", "10.15.0.26", "10.15.0.27", "10.15.0.28", "10.15.0.29", "10.15.0.30", "10.15.0.31", "10.15.0.32", "10.15.0.33", "10.15.0.34", "10.15.0.35", "10.15.0.36", "10.15.0.37", "10.15.0.38", "10.15.0.39", "10.15.0.40", "10.15.0.41", "10.15.0.42", "10.15.0.43", "10.15.0.44", "10.15.0.45", "10.15.0.46", "10.15.0.47", "10.15.0.48", "10.15.0.49", "10.15.0.50", "10.15.0.51", "10.15.0.52", "10.15.0.53", "10.15.0.54", "10.15.0.55", "10.15.0.56", "10.15.0.57", "10.15.0.58", "10.15.0.59", "10.15.0.60", "10.15.0.61", "10.15.0.62", "10.15.0.63", "10.15.0.64", "10.15.0.65", "10.15.0.66", "10.15.0.67", "10.15.0.68", "10.15.0.69", "10.15.0.70", "10.15.0.71", "10.15.0.72", "10.15.0.73", "10.15.0.74", "10.15.0.75", "10.15.0.76", "10.15.0.77", "10.15.0.78", "10.15.0.79", "10.15.0.80", "10.15.0.81", "10.15.0.82", "10.15.0.83", "10.15.0.84", "10.15.0.85", "10.15.0.86", "10.15.0.87", "10.15.0.88", "10.15.0.89", "10.15.0.90", "10.15.0.91", "10.15.0.92", "10.15.0.93", "10.15.0.94", "10.15.0.95", "10.15.0.96", "10.15.0.97", "10.15.0.98", "10.15.0.99", "10.15.0.100", "10.15.0.101", "10.15.0.102", "10.15.0.103", "10.15.0.104", "10.15.0.105", "10.15.0.106", "10.15.0.107", "10.15.0.108", "10.15.0.109", "10.15.0.110", "10.15.0.111", "10.15.0.112", "10.15.0.113", "10.15.0.114", "10.15.0.115", "10.15.0.116", "10.15.0.117", "10.15.0.118", "10.15.0.119", "10.15.0.120", "10.15.0.121", "10.15.0.122", "10.15.0.123", "10.15.0.124", "10.15.0.125", "10.15.0.126", "10.15.0.127", "10.15.0.128", "10.15.0.129", "10.15.0.130", "10.15.0.131", "10.15.0.132", "10.15.0.133", "10.15.0.134", "10.15.0.135", "10.15.0.136", "10.15.0.137", "10.15.0.138", "10.15.0.139", "10.15.0.140", "10.15.0.141", "10.15.0.142", "10.15.0.143", "10.15.0.144", "10.15.0.145", "10.15.0.146", "10.15.0.147", "10.15.0.148", "10.15.0.149", "10.15.0.150", "10.15.0.151", "10.15.0.152", "10.15.0.153", "10.15.0.154", "10.15.0.155", "10.15.0.156", "10.15.0.157", "10.15.0.158", "10.15.0.159", "10.15.0.160", "10.15.0.161", "10.15.0.162", "10.15.0.163", "10.15.0.164", "10.15.0.165", "10.15.0.166", "10.15.0.167", "10.15.0.168", "10.15.0.169", "10.15.0.170", "10.15.0.171", "10.15.0.172", "10.15.0.173", "10.15.0.174", "10.15.0.175", "10.15.0.176", "10.15.0.177", "10.15.0.178", "10.15.0.179", "10.15.0.180", "10.15.0.181", "10.15.0.182", "10.15.0.183", "10.15.0.184", "10.15.0.185", "10.15.0.186", "10.15.0.187", "10.15.0.188", "10.15.0.189", "10.15.0.190", "10.15.0.191", "10.15.0.192", "10.15.0.193", "10.15.0.194", "10.15.0.195", "10.15.0.196", "10.15.0.197", "10.15.0.198", "10.15.0.199", "10.15.0.200", "10.15.0.201", "10.15.0.202", "10.15.0.203", "10.15.0.204", "10.15.0.205", "10.15.0.206", "10.15.0.207", "10.15.0.208", "10.15.0.209", "10.15.0.210", "10.15.0.211", "10.15.0.212", "10.15.0.213", "10.15.0.214", "10.15.0.215", "10.15.0.216", "10.15.0.217", "10.15.0.218", "10.15.0.219", "10.15.0.220", "10.15.0.221", "10.15.0.222", "10.15.0.223", "10.15.0.224", "10.15.0.225", "10.15.0.226", "10.15.0.227", "10.15.0.228", "10.15.0.229", "10.15.0.230", "10.15.0.231", "10.15.0.232", "10.15.0.233", "10.15.0.234", "10.15.0.235", "10.15.0.236", "10.15.0.237", "10.15.0.238", "10.15.0.239", "10.15.0.240", "10.15.0.241", "10.15.0.242", "10.15.0.243", "10.15.0.244", "10.15.0.245", "10.15.0.246", "10.15.0.247", "10.15.0.248", "10.15.0.249", "10.15.0.250", "10.15.0.251", "10.15.0.252", "10.15.0.253", "10.15.0.254", "10.15.0.255" ] reports_path = "fake/live-targets.work" expected_write = [ call(reports_path, "wt"), call().write('10.15.0.0/24\n'), call().write('10.16.0.0/24\n'), call().write('192.168.10.0/30\n'), call().write('192.168.10.6-12\n'), call().write('192.168.10.14/31\n') ] mock_obj = mock_open() with patch('builtins.open', mock_obj) as mopen: target_optimize = TargetOptimization(reports_path) target_optimize.save(targets) mock_obj().writelines.assert_called_once() mopen.assert_has_calls(expected_write, any_order=True)
def test_reset_mock_on_mock_open_issue_18622(self): a = mock.mock_open() a.reset_mock()
def test_read_repeated_string_literal(): inp_str = "PROP\n 3*'INP ' /\n" with patch("builtins.open", mock_open(read_data=inp_str)) as mock_file: with open_grdecl(mock_file, keywords=["PROP"]) as kw: assert list(kw) == [("PROP", ["INP "] * 3)]