async def test_reader_read_exception(ws_key, key_data, loop) -> None: hresp = mock.Mock() hresp.status = 101 hresp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.request') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(hresp) writer = mock.Mock() WebSocketWriter.return_value = writer writer.close = make_mocked_coro() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect('http://test.org') exc = ValueError() resp._reader.set_exception(exc) msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.ERROR assert resp.exception() is exc await session.close()
def test_log_message(self): # no subscription on level with mock.patch('logging.getLogger') as mocked: mocked_logger = mock.Mock(spec=logging.Logger) mocked.return_value = mocked_logger raw = [b'engine.1.INFO.EMA', b'test'] self.watcher.log_message(raw) mocked_logger.log.assert_called_once_with(ema_logging.INFO, '[engine.1] test') with mock.patch('logging.getLogger') as mocked: mocked_logger = mock.Mock(spec=logging.Logger) mocked.return_value = mocked_logger raw = [b'engine.1.DEBUG.EMA', b'test'] self.watcher.log_message(raw) mocked_logger.log.assert_called_once_with(ema_logging.DEBUG, '[engine.1] test') with mock.patch('logging.getLogger') as mocked: mocked_logger = mock.Mock(spec=logging.Logger) mocked.return_value = mocked_logger raw = [b'engine.1.DEBUG', b'test', b'more'] self.watcher.log_message(raw) raw = [r.decode('utf-8') for r in raw] mocked_logger.error.assert_called_once_with("Invalid log message: %s"%raw) with mock.patch('logging.getLogger') as mocked: mocked_logger = mock.Mock(spec=logging.Logger) mocked.return_value = mocked_logger raw = [b'engine1DEBUG', b'test'] self.watcher.log_message(raw) raw = [r.decode('utf-8') for r in raw] mocked_logger.error.assert_called_once_with("Invalid log message: %s"%raw)
async def test_setup_entry_successful(hass): """Test setup entry is successful.""" entry = Mock() entry.data = {'host': '1.2.3.4', 'port': 80, 'api_key': '1234567890ABCDEF'} with patch.object(hass, 'async_create_task') as mock_add_job, \ patch.object(hass, 'config_entries') as mock_config_entries, \ patch('pydeconz.DeconzSession.async_get_state', return_value=mock_coro(CONFIG)), \ patch('pydeconz.DeconzSession.start', return_value=True), \ patch('homeassistant.helpers.device_registry.async_get_registry', return_value=mock_coro(Mock())): assert await deconz.async_setup_entry(hass, entry) is True assert hass.data[deconz.DOMAIN] assert hass.data[deconz.DATA_DECONZ_ID] == {} assert len(hass.data[deconz.DATA_DECONZ_UNSUB]) == 1 assert len(mock_add_job.mock_calls) == 5 assert len(mock_config_entries.async_forward_entry_setup.mock_calls) == 5 assert mock_config_entries.async_forward_entry_setup.mock_calls[0][1] == \ (entry, 'binary_sensor') assert mock_config_entries.async_forward_entry_setup.mock_calls[1][1] == \ (entry, 'light') assert mock_config_entries.async_forward_entry_setup.mock_calls[2][1] == \ (entry, 'scene') assert mock_config_entries.async_forward_entry_setup.mock_calls[3][1] == \ (entry, 'sensor') assert mock_config_entries.async_forward_entry_setup.mock_calls[4][1] == \ (entry, 'switch')
async def test_ws_connect_deflate_per_message(loop, ws_key, key_data) -> None: resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate', } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.request') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() send = writer.send = make_mocked_coro() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect('http://test.org') await resp.send_str('string', compress=-1) send.assert_called_with('string', binary=False, compress=-1) await resp.send_bytes(b'bytes', compress=15) send.assert_called_with(b'bytes', binary=True, compress=15) await resp.send_json([{}], compress=-9) send.assert_called_with('[{}]', binary=False, compress=-9) await session.close()
def setUp(self): self.patcher1 = patch("samireland.views.EditableText.objects.create") self.patcher2 = patch("samireland.views.EditableText.objects.get") self.mock_create = self.patcher1.start() self.mock_get = self.patcher2.start() self.mock_create.return_value = "EDTEXT" self.mock_get.side_effect = EditableText.DoesNotExist
def test__run_exc(worker, loop): with mock.patch('aiohttp.worker.os') as m_os: m_os.getpid.return_value = 1 m_os.getppid.return_value = 1 worker.servers = [mock.Mock()] worker.ppid = 1 worker.alive = True worker.sockets = [] worker.log = mock.Mock() worker.loop = mock.Mock() worker.notify = mock.Mock() with mock.patch('aiohttp.worker.asyncio.sleep') as m_sleep: slp = asyncio.Future(loop=loop) slp.set_exception(KeyboardInterrupt) m_sleep.return_value = slp worker.close = mock.Mock() worker.close.return_value = asyncio.Future(loop=loop) worker.close.return_value.set_result(1) loop.run_until_complete(worker._run()) assert m_sleep.called assert worker.close.called
async def test_add_new_device(hass): """Test adding a new device generates a signal for platforms.""" entry = Mock() entry.data = {'host': '1.2.3.4', 'port': 80, 'api_key': '1234567890ABCDEF', 'allow_clip_sensor': False} new_event = { "t": "event", "e": "added", "r": "sensors", "id": "1", "sensor": { "config": { "on": "True", "reachable": "True" }, "name": "event", "state": {}, "type": "ZHASwitch" } } with patch.object(deconz, 'async_dispatcher_send') as mock_dispatch_send, \ patch('pydeconz.DeconzSession.async_get_state', return_value=mock_coro(CONFIG)), \ patch('pydeconz.DeconzSession.start', return_value=True): assert await deconz.async_setup_entry(hass, entry) is True hass.data[deconz.DOMAIN].async_event_handler(new_event) await hass.async_block_till_done() assert len(mock_dispatch_send.mock_calls) == 1 assert len(mock_dispatch_send.mock_calls[0]) == 3
def test_handle_message(self): topic = "%s/client/%s" % (self.env.domain, self.env.uuid) with mock.patch.object(self.mqtt, "commandReceived") as m: mocked_handler.simulate_message(topic, "{test}") m.assert_called_with(topic, "{test}") e = EventMaker() msg = e.Event(e.Unknown) with mock.patch.object(self.mqtt.log, "debug") as m: # unhandled message mocked_handler.simulate_message(topic, etree.tostring(msg)) assert m.called with mock.patch.object(self.mqtt.log, "error") as m: mocked_handler.simulate_message(topic, "<askkl><kmnkl&A&>") assert m.called msg = e.Event(e.ClientPoll()) with mock.patch("gosa.client.mqtt_service.zope.event.notify") as m,\ mock.patch("gosa.client.mqtt_service.random.randint", return_value=0): mocked_handler.simulate_message(topic, etree.tostring(msg)) args, kwargs = m.call_args assert isinstance(args[0], Resume) assert mocked_handler.send_event.called
def test_midnight_turnover_after_midnight_inside_period(self): """Test midnight turnover setting before midnight inside period .""" test_time = self.hass.config.time_zone.localize( datetime(2019, 1, 10, 21, 0, 0)).astimezone(pytz.UTC) config = { 'binary_sensor': [ { 'platform': 'tod', 'name': 'Night', 'after': '22:00', 'before': '5:00' }, ] } with patch('homeassistant.components.tod.binary_sensor.dt_util.utcnow', return_value=test_time): setup_component(self.hass, 'binary_sensor', config) state = self.hass.states.get('binary_sensor.night') assert state.state == STATE_OFF self.hass.block_till_done() with patch('homeassistant.components.tod.binary_sensor.dt_util.utcnow', return_value=test_time + timedelta(hours=1)): self.hass.bus.fire(ha.EVENT_TIME_CHANGED, { ha.ATTR_NOW: test_time + timedelta(hours=1)}) self.hass.block_till_done() state = self.hass.states.get('binary_sensor.night') assert state.state == STATE_ON
def test_webhooks_when_update_task(settings): settings.WEBHOOKS_ENABLED = True project = f.ProjectFactory() f.WebhookFactory.create(project=project) f.WebhookFactory.create(project=project) obj = f.TaskFactory.create(project=project) with patch("taiga.webhooks.tasks._send_request") as send_request_mock: services.take_snapshot(obj, user=obj.owner) assert send_request_mock.call_count == 2 obj.subject = "test webhook update" obj.save() with patch("taiga.webhooks.tasks._send_request") as send_request_mock: services.take_snapshot(obj, user=obj.owner, comment="test_comment") assert send_request_mock.call_count == 2 (webhook_id, url, key, data) = send_request_mock.call_args[0] assert data["action"] == "change" assert data["type"] == "task" assert data["by"]["id"] == obj.owner.id assert "date" in data assert data["data"]["id"] == obj.id assert data["data"]["subject"] == obj.subject assert data["change"]["comment"] == "test_comment" assert data["change"]["diff"]["subject"]["to"] == data["data"]["subject"] assert data["change"]["diff"]["subject"]["from"] != data["data"]["subject"]
def test_entry_existence(self, capsys): """ Test recognition of already inserted entries. """ apath = os.path.abspath(os.getcwd()) sqlpath = os.path.join(apath, "bbarchivist.db") with mock.patch('bbarchivist.iniconfig.config_homepath', mock.MagicMock(return_value=apath)): try: cnxn = sqlite3.connect(sqlpath) with cnxn: crsr = cnxn.cursor() crsr.execute("DROP TABLE IF EXISTS Swrelease") reqid = "INTEGER PRIMARY KEY" reqs = "TEXT NOT NULL UNIQUE COLLATE NOCASE" reqs2 = "TEXT" table = "Swrelease(Id {0}, Os {1}, Software {1}, Available {2}, Date {2})".format( *(reqid, reqs, reqs2)) crsr.execute("CREATE TABLE IF NOT EXISTS " + table) except sqlite3.Error: assert False assert not bs.check_exists("70.OSVERSION", "80.SWVERSION") bs.insert("70.OSVERSION", "80.SWVERSION", "available") assert bs.check_exists("70.OSVERSION", "80.SWVERSION") with mock.patch("sqlite3.connect", mock.MagicMock(side_effect=sqlite3.Error)): bs.check_exists("70.OSVERSION", "80.SWVERSION") assert "\n" in capsys.readouterr()[0]
def test_prettify(project_1_fixture): def colored_side_effect(v, c='', **kwargs): return '{}: {}{}'.format(c, v, kwargs) with patch('srmlf.project.colored', side_effect=colored_side_effect): with patch('prettytable.PrettyTable') as pt: with LocaleMock(('en_US', 'UTF-8'), [locale.LC_TIME, locale.LC_MONETARY]): table = project_1_fixture.prettify() pt.assert_any_call(['red: Description{}', 'red: Date{}', 'red: Alice{}', 'red: Bob{}']) assert isinstance(table, MagicMock) assert table.add_row.call_count == 4 table.add_row.assert_any_call( ['', ': TOTAL{\'attrs\': [\'bold\']}', ': %s{\'attrs\': [\'bold\']}' % locale.currency(10.0), ': %s{\'attrs\': [\'bold\']}' % locale.currency(5.0) ]) table.add_row.assert_any_call( ['', '', ': 66.67%{\'attrs\': [\'bold\']}', ': 33.33%{\'attrs\': [\'bold\']}' ])
def test_zwave_ready_wait(hass, mock_openzwave): """Test that zwave continues after waiting for network ready.""" # Initialize zwave yield from async_setup_component(hass, 'zwave', {'zwave': {}}) yield from hass.async_block_till_done() sleeps = [] def utcnow(): return datetime.fromtimestamp(len(sleeps)) asyncio_sleep = asyncio.sleep @asyncio.coroutine def sleep(duration, loop=None): if duration > 0: sleeps.append(duration) yield from asyncio_sleep(0) with patch('homeassistant.components.zwave.dt_util.utcnow', new=utcnow): with patch('asyncio.sleep', new=sleep): with patch.object(zwave, '_LOGGER') as mock_logger: hass.data[DATA_NETWORK].state = MockNetwork.STATE_STARTED hass.bus.async_fire(EVENT_HOMEASSISTANT_START) yield from hass.async_block_till_done() assert len(sleeps) == const.NETWORK_READY_WAIT_SECS assert mock_logger.warning.called assert len(mock_logger.warning.mock_calls) == 1 assert mock_logger.warning.mock_calls[0][1][1] == \ const.NETWORK_READY_WAIT_SECS
def test_new_object_with_two_webhook(settings): settings.WEBHOOKS_ENABLED = True project = f.ProjectFactory() f.WebhookFactory.create(project=project) f.WebhookFactory.create(project=project) objects = [ f.IssueFactory.create(project=project), f.TaskFactory.create(project=project), f.UserStoryFactory.create(project=project), f.WikiPageFactory.create(project=project) ] for obj in objects: with patch('taiga.webhooks.tasks.create_webhook') as create_webhook_mock: services.take_snapshot(obj, user=obj.owner, comment="test") assert create_webhook_mock.call_count == 2 for obj in objects: with patch('taiga.webhooks.tasks.change_webhook') as change_webhook_mock: services.take_snapshot(obj, user=obj.owner, comment="test") assert change_webhook_mock.call_count == 2 for obj in objects: with patch('taiga.webhooks.tasks.change_webhook') as change_webhook_mock: services.take_snapshot(obj, user=obj.owner) assert change_webhook_mock.call_count == 0 for obj in objects: with patch('taiga.webhooks.tasks.delete_webhook') as delete_webhook_mock: services.take_snapshot(obj, user=obj.owner, comment="test", delete=True) assert delete_webhook_mock.call_count == 2
def test_blacklist(self): with patch('recommendation.mozlog.middleware.current_app') as app: mock_request = self._request(path=LOG_PATH_BLACKLIST[0]) mock_response = self._response() with patch(REQUEST_PATH, mock_request): request_summary(mock_response) eq_(app.logger.info.call_count, 0)
def test_create_post(self): perm = Permission.objects.get(codename='add_milestone') self.admin.user_permissions.add(perm) with patch('traq.projects.forms.MilestoneForm.is_valid', return_value=True) as data: with patch('traq.projects.forms.MilestoneForm.save', return_value=True): response = self.client.post(reverse('milestones-create', args=[self.project.pk,]), data=data) self.assertEqual(response.status_code, 302)
def test_edit_post(self): perm = Permission.objects.get(codename='change_milestone') self.admin.user_permissions.add(perm) with patch('traq.projects.forms.MilestoneForm.is_valid', return_value=True) as data: with patch('traq.projects.forms.MilestoneForm.save', return_value=True): response = self.client.post(reverse('milestones-edit', args=[self.milestone.pk,])) self.assertTrue(response.status_code, 302)
def test_submission_delete(submission_page, terminal, refresh_token): # Log in submission_page.config.refresh_token = refresh_token submission_page.oauth.authorize() # Can't delete the submission curses.flash.reset_mock() submission_page.controller.trigger('d') assert curses.flash.called # Move down to the first comment with mock.patch.object(submission_page, 'clear_input_queue'): submission_page.controller.trigger('j') # Try to delete the first comment - wrong author curses.flash.reset_mock() submission_page.controller.trigger('d') assert curses.flash.called # Spoof the author and try to delete again data = submission_page.content.get(submission_page.nav.absolute_index) data['author'] = submission_page.reddit.user.name with mock.patch('praw.objects.Comment.delete') as delete, \ mock.patch.object(terminal.stdscr, 'getch') as getch, \ mock.patch('time.sleep'): getch.return_value = ord('y') submission_page.controller.trigger('d') assert delete.called
def test_update_stale(self): """Test stalled update.""" scanner = get_component(self.hass, 'device_tracker.test').SCANNER scanner.reset() scanner.come_home('DEV1') register_time = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) scan_time = datetime(2015, 9, 15, 23, 1, tzinfo=dt_util.UTC) with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=register_time): with assert_setup_component(1, device_tracker.DOMAIN): assert setup_component(self.hass, device_tracker.DOMAIN, { device_tracker.DOMAIN: { CONF_PLATFORM: 'test', device_tracker.CONF_CONSIDER_HOME: 59, }}) self.hass.block_till_done() assert STATE_HOME == \ self.hass.states.get('device_tracker.dev1').state scanner.leave_home('DEV1') with patch('homeassistant.components.device_tracker.dt_util.utcnow', return_value=scan_time): fire_time_changed(self.hass, scan_time) self.hass.block_till_done() assert STATE_NOT_HOME == \ self.hass.states.get('device_tracker.dev1').state
def test_submission_save(submission_page, refresh_token): # Log in submission_page.config.refresh_token = refresh_token submission_page.oauth.authorize() # Test save on the submission with mock.patch('praw.objects.Submission.save') as save, \ mock.patch('praw.objects.Submission.unsave') as unsave: data = submission_page.content.get(submission_page.nav.absolute_index) # Save submission_page.controller.trigger('w') assert save.called assert data['saved'] is True # Unsave submission_page.controller.trigger('w') assert unsave.called assert data['saved'] is False # Save - exception save.side_effect = KeyboardInterrupt submission_page.controller.trigger('w') assert data['saved'] is False
def test_submission_comment_save(submission_page, terminal, refresh_token): # Log in submission_page.config.refresh_token = refresh_token submission_page.oauth.authorize() # View a submission with the pager with mock.patch.object(terminal, 'open_pager'): submission_page.controller.trigger('l') assert terminal.open_pager.called # Move down to the first comment with mock.patch.object(submission_page, 'clear_input_queue'): submission_page.controller.trigger('j') data = submission_page.content.get(submission_page.nav.absolute_index) # Test save on the coment submission with mock.patch('praw.objects.Comment.save') as save, \ mock.patch('praw.objects.Comment.unsave') as unsave: # Save submission_page.controller.trigger('w') assert save.called assert data['saved'] is True # Unsave submission_page.controller.trigger('w') assert unsave.called assert data['saved'] is False # Save - exception save.side_effect = KeyboardInterrupt submission_page.controller.trigger('w') assert data['saved'] is False
def test_random_song(self): with mock.patch("pubbot.squeezecenter.receivers.command") as command: with mock.patch("random.choice") as choice: choice.side_effect = lambda x: x[0] r = receivers.random_song(None, content='random') command.assert_called_with("playlist loadtracks track.titlesearch=A") self.assertEqual(r['had_side_effect'], True)
def test_saving_and_loading(hass): """Test that we're saving and loading correctly.""" class TestFlow(data_entry_flow.FlowHandler): VERSION = 5 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test Title', data={ 'token': 'abcd' } ) with patch.dict(config_entries.HANDLERS, {'test': TestFlow}): yield from hass.config_entries.flow.async_init('test') class Test2Flow(data_entry_flow.FlowHandler): VERSION = 3 @asyncio.coroutine def async_step_init(self, user_input=None): return self.async_create_entry( title='Test 2 Title', data={ 'username': '******' } ) json_path = 'homeassistant.util.json.open' with patch('homeassistant.config_entries.HANDLERS.get', return_value=Test2Flow), \ patch.object(config_entries, 'SAVE_DELAY', 0): yield from hass.config_entries.flow.async_init('test') with patch(json_path, mock_open(), create=True) as mock_write: # To trigger the call_later yield from asyncio.sleep(0, loop=hass.loop) # To execute the save yield from hass.async_block_till_done() # Mock open calls are: open file, context enter, write, context leave written = mock_write.mock_calls[2][1][0] # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) with patch('os.path.isfile', return_value=True), \ patch(json_path, mock_open(read_data=written), create=True): yield from manager.async_load() # Ensure same order for orig, loaded in zip(hass.config_entries.async_entries(), manager.async_entries()): assert orig.version == loaded.version assert orig.domain == loaded.domain assert orig.title == loaded.title assert orig.data == loaded.data assert orig.source == loaded.source
def test_new_object_with_two_webhook_signals(settings): settings.WEBHOOKS_ENABLED = True project = f.ProjectFactory() f.WebhookFactory.create(project=project) f.WebhookFactory.create(project=project) objects = [ f.IssueFactory.create(project=project), f.TaskFactory.create(project=project), f.UserStoryFactory.create(project=project), f.WikiPageFactory.create(project=project) ] response = Mock(status_code=200, headers={}, content="ok") response.elapsed.total_seconds.return_value = 100 for obj in objects: with patch("taiga.webhooks.tasks.requests.Session.send", return_value=response) as session_send_mock: services.take_snapshot(obj, user=obj.owner, comment="test") assert session_send_mock.call_count == 2 for obj in objects: with patch("taiga.webhooks.tasks.requests.Session.send", return_value=response) as session_send_mock: services.take_snapshot(obj, user=obj.owner, comment="test") assert session_send_mock.call_count == 2 for obj in objects: with patch("taiga.webhooks.tasks.requests.Session.send", return_value=response) as session_send_mock: services.take_snapshot(obj, user=obj.owner) assert session_send_mock.call_count == 0 for obj in objects: with patch("taiga.webhooks.tasks.requests.Session.send", return_value=response) as session_send_mock: services.take_snapshot(obj, user=obj.owner, comment="test", delete=True) assert session_send_mock.call_count == 2
def setUp(self): super().setUp() class Options: build_parameters = [] kconfigfile = None kconfigflavour = None kdefconfig = [] kconfigs = [] build_attributes = [] self.options = Options() self.project_options = snapcraft.ProjectOptions( target_deb_arch=self.deb_arch) patcher = mock.patch('snapcraft.internal.common.run') self.run_mock = patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch('snapcraft.ProjectOptions.is_cross_compiling') patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch.dict(os.environ, {}) self.env_mock = patcher.start() self.addCleanup(patcher.stop)
def setUp(self): warnings.simplefilter("ignore", Warning) self.readListFromSettingsMock = patch('ReText.window.readListFromSettings', return_value=[]).start() self.writeListToSettingsMock = patch('ReText.window.writeListToSettings').start() self.writeToSettingsMock = patch('ReText.window.writeToSettings').start() self.globalSettingsMock = patch('ReText.window.globalSettings', MagicMock(**ReText.configOptions)).start() self.fileSystemWatcherMock = patch('ReText.window.QFileSystemWatcher').start()
def test__common_submit(tmpdir, db): from Ganga.Core import BackendError j = Job() j.id = 0 j.backend = db db._parent = j name = str(tmpdir.join('submit_script')) with open(name, 'w') as fd: fd.write(script_template.replace('###PARAMETRIC_INPUTDATA###', str([['a'], ['b']]))) with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={}): db.id = 1234 db.actualCE = 'test' db.status = 'test' with pytest.raises(BackendError): db._common_submit(name) assert db.id is None, 'id not None' assert db.actualCE is None, 'actualCE not None' assert db.status is None, 'status not None' with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True, 'Value': 12345}) as execute: assert db._common_submit(name) execute.assert_called_once_with("execfile('%s')" % name) assert db.id == 12345, 'id not set' with patch('GangaDirac.Lib.Backends.DiracBase.execute', return_value={'OK': True, 'Value': [123, 456]}): with patch.object(db, '_setup_bulk_subjobs') as _setup_bulk_subjobs: db._common_submit(name) _setup_bulk_subjobs.assert_called_once_with([123, 456], name)
def setUp(self): super().setUp() class props: rosdistro = 'indigo' catkin_packages = ['my_package'] source_space = 'src' source_subdir = None include_roscore = False self.properties = props() self.project_options = snapcraft.ProjectOptions() patcher = mock.patch('snapcraft.repo.Ubuntu') self.ubuntu_mock = patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch( 'snapcraft.plugins.catkin._find_system_dependencies') self.dependencies_mock = patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch('snapcraft.plugins.catkin._Rosdep') self.rosdep_mock = patcher.start() self.addCleanup(patcher.stop)
def test_webhook_action_test(client, data): url1 = reverse('webhooks-test', kwargs={"pk": data.webhook1.pk}) url2 = reverse('webhooks-test', kwargs={"pk": data.webhook2.pk}) blocked_url = reverse('webhooks-test', kwargs={"pk": data.blocked_webhook.pk}) users = [ None, data.registered_user, data.project_owner ] with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock: _send_request_mock.return_value = data.webhooklog1 results = helper_test_http_method(client, 'post', url1, None, users) assert results == [404, 404, 200] assert _send_request_mock.called is True with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock: _send_request_mock.return_value = data.webhooklog1 results = helper_test_http_method(client, 'post', url2, None, users) assert results == [404, 404, 404] assert _send_request_mock.called is False with mock.patch('taiga.webhooks.tasks._send_request') as _send_request_mock: _send_request_mock.return_value = data.webhooklog1 results = helper_test_http_method(client, 'post', blocked_url, None, users) assert results == [404, 404, 451] assert _send_request_mock.called is False
def setUp(self): super().setUp() class Options: makefile = None make_parameters = [] rust_features = [] rust_revision = "" rust_channel = "" source_subdir = "" self.options = Options() self.project_options = snapcraft.ProjectOptions(target_deb_arch=self.deb_arch) patcher = mock.patch("snapcraft.internal.common.run") self.run_mock = patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch("snapcraft.internal.common.run_output") patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch("snapcraft.ProjectOptions.is_cross_compiling") patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch.dict(os.environ, {}) self.env_mock = patcher.start() self.addCleanup(patcher.stop)
def test_calls_collect_results(self): mommy.make(Course) with patch('evap.results.tools.collect_results') as mock: management.call_command('refresh_results_cache', stdout=StringIO()) self.assertEqual(mock.call_count, Course.objects.count())
def test_is_subclass(self, parent_class_path, sub_class_path): with mock.patch("{}.__init__".format(parent_class_path)): parent_class_path = self.get_class_from_path(parent_class_path) sub_class_path = self.get_class_from_path(sub_class_path) self.assert_is_subclass(sub_class_path, parent_class_path)
def test_init(self, browser_mock): with patch('RatS.base.base_site.Site') as site_mock: parser = RatingsParser(site_mock, None) self.assertEqual(parser.site, site_mock)
def setUp(self): self.load_file_patch = patch('cloudinit.ssh_util.util.load_file') self.load_file = self.load_file_patch.start() self.isfile_patch = patch('cloudinit.ssh_util.os.path.isfile') self.isfile = self.isfile_patch.start() self.isfile.return_value = True
def test_affects_correct_addons(self): # *not considered* - Non auto-approved add-on addon_factory() # *not considered* - Non auto-approved add-on that has an # AutoApprovalSummary entry AutoApprovalSummary.objects.create( version=addon_factory().current_version, verdict=amo.NOT_AUTO_APPROVED ) # *not considered* -Add-on with the current version not auto-approved extra_addon = addon_factory() AutoApprovalSummary.objects.create( version=extra_addon.current_version, verdict=amo.AUTO_APPROVED ) extra_addon.current_version.update(created=self.days_ago(1)) version_factory(addon=extra_addon) # *not considered* - current version is auto-approved but doesn't # have recent abuse reports or low ratings auto_approved_addon = addon_factory() AutoApprovalSummary.objects.create( version=auto_approved_addon.current_version, verdict=amo.AUTO_APPROVED ) # *considered* - current version is auto-approved and # has a recent rating with rating <= 3 auto_approved_addon1 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon1.current_version, verdict=amo.AUTO_APPROVED ) Rating.objects.create( created=summary.modified + timedelta(days=3), addon=auto_approved_addon1, version=auto_approved_addon1.current_version, rating=2, body='Apocalypse', user=user_factory(), ), # *not considered* - current version is auto-approved but # has a recent rating with rating > 3 auto_approved_addon2 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon2.current_version, verdict=amo.AUTO_APPROVED ) Rating.objects.create( created=summary.modified + timedelta(days=3), addon=auto_approved_addon2, version=auto_approved_addon2.current_version, rating=4, body='Apocalypse', user=user_factory(), ), # *not considered* - current version is auto-approved but # has a recent rating with rating > 3 auto_approved_addon3 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon3.current_version, verdict=amo.AUTO_APPROVED ) Rating.objects.create( created=summary.modified + timedelta(days=3), addon=auto_approved_addon3, version=auto_approved_addon3.current_version, rating=4, body='Apocalypse', user=user_factory(), ), # *not considered* - current version is auto-approved but # has a low rating that isn't recent enough auto_approved_addon4 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon4.current_version, verdict=amo.AUTO_APPROVED ) Rating.objects.create( created=summary.modified - timedelta(days=3), addon=auto_approved_addon4, version=auto_approved_addon4.current_version, rating=1, body='Apocalypse', user=user_factory(), ), # *considered* - current version is auto-approved and # has a recent abuse report auto_approved_addon5 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon5.current_version, verdict=amo.AUTO_APPROVED ) AbuseReport.objects.create( addon=auto_approved_addon5, created=summary.modified + timedelta(days=3) ) # *not considered* - current version is auto-approved but # has an abuse report that isn't recent enough auto_approved_addon6 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon6.current_version, verdict=amo.AUTO_APPROVED ) AbuseReport.objects.create( addon=auto_approved_addon6, created=summary.modified - timedelta(days=3) ) # *considered* - current version is auto-approved and # has an abuse report through it's author that is recent enough author = user_factory() auto_approved_addon7 = addon_factory(users=[author]) summary = AutoApprovalSummary.objects.create( version=auto_approved_addon7.current_version, verdict=amo.AUTO_APPROVED ) AbuseReport.objects.create( user=author, created=summary.modified + timedelta(days=3) ) # *not considered* - current version is auto-approved and # has an abuse report through it's author that is recent enough # BUT the abuse report is deleted. author = user_factory() auto_approved_addon8 = addon_factory(users=[author]) summary = AutoApprovalSummary.objects.create( version=auto_approved_addon8.current_version, verdict=amo.AUTO_APPROVED ) AbuseReport.objects.create( user=author, state=AbuseReport.STATES.DELETED, created=summary.modified + timedelta(days=3), ) # *not considered* - current version is auto-approved and # has a recent rating with rating <= 3 # but the rating is deleted. auto_approved_addon9 = addon_factory() summary = AutoApprovalSummary.objects.create( version=auto_approved_addon9.current_version, verdict=amo.AUTO_APPROVED ) Rating.objects.create( created=summary.modified + timedelta(days=3), addon=auto_approved_addon9, version=auto_approved_addon9.current_version, deleted=True, rating=2, body='Apocalypse', user=user_factory(), ), # *considered* - current version is auto-approved and # has an abuse report through it's author that is recent enough # Used to test that we only recalculate the weight for # the most recent version author = user_factory() auto_approved_addon8 = addon_factory( users=[author], version_kw={'version': '0.1'} ) AutoApprovalSummary.objects.create( version=auto_approved_addon8.current_version, verdict=amo.AUTO_APPROVED ) # Let's create a new `current_version` and summary current_version = version_factory(addon=auto_approved_addon8, version='0.2') summary = AutoApprovalSummary.objects.create( version=current_version, verdict=amo.AUTO_APPROVED ) AbuseReport.objects.create( user=author, created=summary.modified + timedelta(days=3) ) mod = 'olympia.reviewers.tasks.AutoApprovalSummary.calculate_weight' with mock.patch(mod) as calc_weight_mock: with count_subtask_calls( process_addons.recalculate_post_review_weight ) as calls: call_command( 'process_addons', task='constantly_recalculate_post_review_weight' ) assert len(calls) == 1 assert calls[0]['kwargs']['args'] == [ [ auto_approved_addon1.pk, auto_approved_addon5.pk, auto_approved_addon7.pk, auto_approved_addon8.pk, ] ] # Only 4 calls for each add-on, doesn't consider the extra version # that got created for addon 8 assert calc_weight_mock.call_count == 4
def test_dumpdata_called(self): with patch('evap.evaluation.management.commands.dump_testdata.call_command') as mock: management.call_command('dump_testdata') outfile_name = os.path.join(settings.BASE_DIR, "evaluation", "fixtures", "test_data.json") mock.assert_called_once_with("dumpdata", "auth.group", "evaluation", "rewards", "grades", indent=2, output=outfile_name)
def test_update_courses_called(self): with patch('evap.evaluation.models.Course.update_courses') as mock: management.call_command('update_course_states') self.assertEqual(mock.call_count, 1)
def test_assert_no_special_field(): with patch("writer.core.write_request.is_reserved_field") as irf: irf.return_value = True with pytest.raises(InvalidFormat): assert_no_special_field(MagicMock())
def test_retry_deferred_cron_1(self): with patch('mailer.management.commands.retry_deferred.logging' ) as logging: call_command_with_cron_arg('retry_deferred', 1) logging.basicConfig.assert_called_with(level=logging.ERROR, format=ANY)
def test_calls_runserver(self): args = ["manage.py", "runserver", "0.0.0.0:8000"] with patch('django.core.management.execute_from_command_line') as mock: management.call_command('run', stdout=StringIO()) mock.assert_called_once_with(args)
def test_send_mail_cron_1(self): with patch('mailer.management.commands.send_mail.logging') as logging: call_command_with_cron_arg('send_mail', 1) logging.basicConfig.assert_called_with(level=logging.ERROR, format=ANY)
def youtubekeyboard(): kb = YouTubeKeyboard() with mock.patch("stbt.press", kb.press), \ mock.patch("stbt.press_and_wait", kb.press_and_wait): yield kb
def test_wait_for_ready(self): """ Test waiting for db when db is available """ with patch('django.db.utils.ConnectionHandler.__getitem__') as gi: gi.return_value = True call_command('wait_for_db') self.assertEqual(gi.call_count, 1)
def test_retry_deferred_no_cron(self): with patch('mailer.management.commands.retry_deferred.logging' ) as logging: call_command('retry_deferred') logging.basicConfig.assert_called_with(level=logging.DEBUG, format=ANY)
def test_start_run_with_parent_non_nested(): with mock.patch("mlflow.tracking.fluent._active_run_stack", [mock.Mock()]): with pytest.raises(Exception, match=r"Run with UUID .+ is already active"): start_run()
def test_send_mail_no_cron(self): with patch('mailer.management.commands.send_mail.logging') as logging: call_command('send_mail') logging.basicConfig.assert_called_with(level=logging.DEBUG, format=ANY)
def empty_active_run_stack(): with mock.patch("mlflow.tracking.fluent._active_run_stack", []): yield
def test_wait_for_db(self, ts): """ Test waiting for db """ with patch('django.db.utils.ConnectionHandler.__getitem__') as gi: gi.side_effect = [OperationalError] * 5 + [True] call_command('wait_for_db') self.assertEqual(gi.call_count, 6)
async def test_perform_action_raise_exception_if_no_url_and_action_is_scan( self): with patch("vane.core.custom_event_loop", MagicMock()): with self.assertRaises(ValueError): self.vane.perform_action(action="scan")
def test_start_run_defaults_databricks_notebook( empty_active_run_stack, ): # pylint: disable=unused-argument mock_experiment_id = mock.Mock() experiment_id_patch = mock.patch( "mlflow.tracking.fluent._get_experiment_id", return_value=mock_experiment_id ) databricks_notebook_patch = mock.patch( "mlflow.utils.databricks_utils.is_in_databricks_notebook", return_value=True ) mock_user = mock.Mock() user_patch = mock.patch( "mlflow.tracking.context.default_context._get_user", return_value=mock_user ) mock_source_version = mock.Mock() source_version_patch = mock.patch( "mlflow.tracking.context.git_context._get_source_version", return_value=mock_source_version ) mock_notebook_id = mock.Mock() notebook_id_patch = mock.patch( "mlflow.utils.databricks_utils.get_notebook_id", return_value=mock_notebook_id ) mock_notebook_path = mock.Mock() notebook_path_patch = mock.patch( "mlflow.utils.databricks_utils.get_notebook_path", return_value=mock_notebook_path ) mock_webapp_url = mock.Mock() webapp_url_patch = mock.patch( "mlflow.utils.databricks_utils.get_webapp_url", return_value=mock_webapp_url ) workspace_info_patch = mock.patch( "mlflow.utils.databricks_utils.get_workspace_info_from_dbutils", return_value=("https://databricks.com", "123456"), ) expected_tags = { mlflow_tags.MLFLOW_USER: mock_user, mlflow_tags.MLFLOW_SOURCE_NAME: mock_notebook_path, mlflow_tags.MLFLOW_SOURCE_TYPE: SourceType.to_string(SourceType.NOTEBOOK), mlflow_tags.MLFLOW_GIT_COMMIT: mock_source_version, mlflow_tags.MLFLOW_DATABRICKS_NOTEBOOK_ID: mock_notebook_id, mlflow_tags.MLFLOW_DATABRICKS_NOTEBOOK_PATH: mock_notebook_path, mlflow_tags.MLFLOW_DATABRICKS_WEBAPP_URL: mock_webapp_url, mlflow_tags.MLFLOW_DATABRICKS_WORKSPACE_URL: "https://databricks.com", mlflow_tags.MLFLOW_DATABRICKS_WORKSPACE_ID: "123456", } create_run_patch = mock.patch.object(MlflowClient, "create_run") with multi_context( experiment_id_patch, databricks_notebook_patch, user_patch, source_version_patch, notebook_id_patch, notebook_path_patch, webapp_url_patch, workspace_info_patch, create_run_patch, ): active_run = start_run() MlflowClient.create_run.assert_called_once_with( experiment_id=mock_experiment_id, tags=expected_tags ) assert is_from_run(active_run, MlflowClient.create_run.return_value)
def test_add_new_document(self): self.assertNotIn('113', app.directories['3']) with patch('app.input', side_effect=['113', 'pas', 'Rick', '3']): app.add_new_doc() self.assertIn('113', app.directories['3'])
def test_split_fasta(self): m = mock.mock_open(read_data="\n".join(self.fasta_file_content)) with mock.patch('builtins.open', m): results = split_fasta('random_fasta_file') self.assertDictEqual(results, self.fasta_sequences)
def setUpClass(cls): super(CompletionSetUpMixin, cls).setUpClass() cls.waffle_patcher = mock.patch('completion.waffle.waffle') cls.mock_waffle = cls.waffle_patcher.start() cls.mock_waffle.return_value.is_enabled.return_value = cls.COMPLETION_SWITCH_ENABLED
def test_remove_document(self): self.assertIn('10006', app.directories['2']) with patch('app.input', return_value='10006'): app.delete_doc() self.assertNotIn('10006', app.directories['2'])
def test_do_convert_data(self) -> None: mattermost_data_dir = self.fixture_file_name("", "mattermost_fixtures") output_dir = self.make_import_output_dir("mattermost") with patch("builtins.print") as mock_print, self.assertLogs( level="WARNING") as warn_log: do_convert_data( mattermost_data_dir=mattermost_data_dir, output_dir=output_dir, masking_content=False, ) self.assertEqual( mock_print.mock_calls, [ call("Generating data for", "gryffindor"), call("Generating data for", "slytherin") ], ) self.assertEqual( warn_log.output, [ "WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export", "WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export", ], ) harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor") self.assertEqual( os.path.exists(os.path.join(harry_team_output_dir, "avatars")), True) self.assertEqual( os.path.exists(os.path.join(harry_team_output_dir, "emoji")), True) self.assertEqual( os.path.exists( os.path.join(harry_team_output_dir, "attachment.json")), True) realm = self.read_file(harry_team_output_dir, "realm.json") self.assertEqual("Organization imported from Mattermost!", realm["zerver_realm"][0]["description"]) exported_user_ids = self.get_set(realm["zerver_userprofile"], "id") exported_user_full_names = self.get_set(realm["zerver_userprofile"], "full_name") self.assertEqual({"Harry Potter", "Ron Weasley", "Severus Snape"}, exported_user_full_names) exported_user_emails = self.get_set(realm["zerver_userprofile"], "email") self.assertEqual( {"*****@*****.**", "*****@*****.**", "*****@*****.**"}, exported_user_emails) self.assertEqual(len(realm["zerver_stream"]), 3) exported_stream_names = self.get_set(realm["zerver_stream"], "name") self.assertEqual( exported_stream_names, { "Gryffindor common room", "Gryffindor quidditch team", "Dumbledores army" }, ) self.assertEqual(self.get_set(realm["zerver_stream"], "realm"), {realm["zerver_realm"][0]["id"]}) self.assertEqual(self.get_set(realm["zerver_stream"], "deactivated"), {False}) self.assertEqual(len(realm["zerver_defaultstream"]), 0) exported_recipient_ids = self.get_set(realm["zerver_recipient"], "id") self.assertEqual(len(exported_recipient_ids), 6) exported_recipient_types = self.get_set(realm["zerver_recipient"], "type") self.assertEqual(exported_recipient_types, {1, 2}) exported_recipient_type_ids = self.get_set(realm["zerver_recipient"], "type_id") self.assertEqual(len(exported_recipient_type_ids), 3) exported_subscription_userprofile = self.get_set( realm["zerver_subscription"], "user_profile") self.assertEqual(len(exported_subscription_userprofile), 3) exported_subscription_recipients = self.get_set( realm["zerver_subscription"], "recipient") self.assertEqual(len(exported_subscription_recipients), 6) messages = self.read_file(harry_team_output_dir, "messages-000001.json") exported_messages_id = self.get_set(messages["zerver_message"], "id") self.assertIn(messages["zerver_message"][0]["sender"], exported_user_ids) self.assertIn(messages["zerver_message"][0]["recipient"], exported_recipient_ids) self.assertIn(messages["zerver_message"][0]["content"], "harry joined the channel.\n\n") exported_usermessage_userprofiles = self.get_set( messages["zerver_usermessage"], "user_profile") self.assertEqual(len(exported_usermessage_userprofiles), 3) exported_usermessage_messages = self.get_set( messages["zerver_usermessage"], "message") self.assertEqual(exported_usermessage_messages, exported_messages_id) with self.assertLogs(level="INFO"): do_import_realm( import_dir=harry_team_output_dir, subdomain="gryffindor", ) realm = get_realm("gryffindor") self.assertFalse(get_user("*****@*****.**", realm).is_mirror_dummy) self.assertFalse(get_user("*****@*****.**", realm).is_mirror_dummy) self.assertTrue(get_user("*****@*****.**", realm).is_mirror_dummy) messages = Message.objects.filter(sender__realm=realm) for message in messages: self.assertIsNotNone(message.rendered_content)
def test_util_context_success(self): with mock.patch("builtins.open", mock.mock_open(read_data="hello world")) as mock_open: with util.FileContext("/some/path", "r") as result: self.assertEqual(result.contents, "hello world")
def test_user_error(self): with mock.patch('builtins.open', mock_open_data(self.yaml_data)), \ self.assertRaises(YamlParseError): # noqa with load_file('file.yml', Loader=SafeLineLoader) as data: raise MarkedYAMLError('context', data.mark.start, 'problem', data.marks['zoo'].start)
def test_do_convert_data_with_direct_messages(self) -> None: mattermost_data_dir = self.fixture_file_name("direct_channel", "mattermost_fixtures") output_dir = self.make_import_output_dir("mattermost") with patch("builtins.print") as mock_print, self.assertLogs( level="INFO"): do_convert_data( mattermost_data_dir=mattermost_data_dir, output_dir=output_dir, masking_content=False, ) self.assertEqual( mock_print.mock_calls, [ call("Generating data for", "gryffindor"), ], ) harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor") self.assertEqual( os.path.exists(os.path.join(harry_team_output_dir, "avatars")), True) self.assertEqual( os.path.exists(os.path.join(harry_team_output_dir, "emoji")), True) self.assertEqual( os.path.exists( os.path.join(harry_team_output_dir, "attachment.json")), True) realm = self.read_file(harry_team_output_dir, "realm.json") self.assertEqual("Organization imported from Mattermost!", realm["zerver_realm"][0]["description"]) exported_user_ids = self.get_set(realm["zerver_userprofile"], "id") exported_user_full_names = self.get_set(realm["zerver_userprofile"], "full_name") self.assertEqual( {"Harry Potter", "Ron Weasley", "Ginny Weasley", "Tom Riddle"}, exported_user_full_names) exported_user_emails = self.get_set(realm["zerver_userprofile"], "email") self.assertEqual( { "*****@*****.**", "*****@*****.**", "*****@*****.**", "*****@*****.**" }, exported_user_emails, ) self.assertEqual(len(realm["zerver_stream"]), 3) exported_stream_names = self.get_set(realm["zerver_stream"], "name") self.assertEqual( exported_stream_names, { "Gryffindor common room", "Gryffindor quidditch team", "Dumbledores army" }, ) self.assertEqual(self.get_set(realm["zerver_stream"], "realm"), {realm["zerver_realm"][0]["id"]}) self.assertEqual(self.get_set(realm["zerver_stream"], "deactivated"), {False}) self.assertEqual(len(realm["zerver_defaultstream"]), 0) exported_recipient_ids = self.get_set(realm["zerver_recipient"], "id") self.assertEqual(len(exported_recipient_ids), 8) exported_recipient_types = self.get_set(realm["zerver_recipient"], "type") self.assertEqual(exported_recipient_types, {1, 2, 3}) exported_recipient_type_ids = self.get_set(realm["zerver_recipient"], "type_id") self.assertEqual(len(exported_recipient_type_ids), 4) exported_subscription_userprofile = self.get_set( realm["zerver_subscription"], "user_profile") self.assertEqual(len(exported_subscription_userprofile), 4) exported_subscription_recipients = self.get_set( realm["zerver_subscription"], "recipient") self.assertEqual(len(exported_subscription_recipients), 8) messages = self.read_file(harry_team_output_dir, "messages-000001.json") exported_messages_id = self.get_set(messages["zerver_message"], "id") self.assertIn(messages["zerver_message"][0]["sender"], exported_user_ids) self.assertIn(messages["zerver_message"][0]["recipient"], exported_recipient_ids) self.assertIn(messages["zerver_message"][0]["content"], "ron joined the channel.\n\n") exported_usermessage_userprofiles = self.get_set( messages["zerver_usermessage"], "user_profile") self.assertEqual(len(exported_usermessage_userprofiles), 3) exported_usermessage_messages = self.get_set( messages["zerver_usermessage"], "message") self.assertEqual(exported_usermessage_messages, exported_messages_id) with self.assertLogs(level="INFO"): do_import_realm( import_dir=harry_team_output_dir, subdomain="gryffindor", ) realm = get_realm("gryffindor") messages = Message.objects.filter(sender__realm=realm) for message in messages: self.assertIsNotNone(message.rendered_content) self.assertEqual(len(messages), 11) stream_messages = messages.filter( recipient__type=Recipient.STREAM).order_by("date_sent") stream_recipients = stream_messages.values_list("recipient", flat=True) self.assertEqual(len(stream_messages), 4) self.assertEqual(len(set(stream_recipients)), 2) self.assertEqual(stream_messages[0].sender.email, "*****@*****.**") self.assertEqual(stream_messages[0].content, "ron joined the channel.\n\n") huddle_messages = messages.filter( recipient__type=Recipient.HUDDLE).order_by("date_sent") huddle_recipients = huddle_messages.values_list("recipient", flat=True) self.assertEqual(len(huddle_messages), 3) self.assertEqual(len(set(huddle_recipients)), 1) self.assertEqual(huddle_messages[0].sender.email, "*****@*****.**") self.assertEqual(huddle_messages[0].content, "Who is going to Hogsmeade this weekend?\n\n") personal_messages = messages.filter( recipient__type=Recipient.PERSONAL).order_by("date_sent") personal_recipients = personal_messages.values_list("recipient", flat=True) self.assertEqual(len(personal_messages), 4) self.assertEqual(len(set(personal_recipients)), 3) self.assertEqual(personal_messages[0].sender.email, "*****@*****.**") self.assertEqual(personal_messages[0].content, "hey harry\n\n")
def test_parse_error(self): with mock.patch('builtins.open', mock_open_data('&')), \ self.assertRaises(YamlParseError): # noqa with load_file('file.yml'): pass
def tagger(): with mock.patch("fh_immuta_utils.tagging.Tagger.read_configs", return_value=None): obj = tg.Tagger(config_root="") obj.tag_map_datadict = TAG_MAP obj.tag_map_datasource = DATA_SOURCE_TAGS return obj