def test_syncFromSynapse__folder_contains_one_file(): folder = Folder(name="the folder", parent="whatever", id="syn123") file = File(name="a file", parent=folder, id="syn456") with patch.object(syn, "getChildren", return_value=[file]) as patch_syn_get_children,\ patch.object(syn, "get", return_value=file): assert_equals([file], synapseutils.syncFromSynapse(syn, folder)) patch_syn_get_children.called_with(folder['id'])
def test_contextmanager(self): "Can we use it as a contextmanager?" with patch.object(self.lk, 'acquire') as Pac: with patch.object(self.lk, 'release') as Prel: with self.lk: Pac.assert_called_once_with() Prel.assert_called_once_with()
def test_SplitQueryFn_with_exception(self): """A test that verifies that no split is performed when failures occur.""" with patch.object(helper, 'get_datastore', return_value=self._mock_datastore): # Force SplitQueryFn to compute the number of query splits num_splits = 0 expected_num_splits = 1 entity_bytes = (expected_num_splits * ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES) with patch.object(ReadFromDatastore, 'get_estimated_size_bytes', return_value=entity_bytes): with patch.object(query_splitter, 'get_splits', side_effect=ValueError("Testing query split error")): split_query_fn = ReadFromDatastore.SplitQueryFn( self._PROJECT, self._query, None, num_splits) split_query_fn.start_bundle() returned_split_queries = [] for split_query in split_query_fn.process(self._query): returned_split_queries.append(split_query) self.assertEqual(len(returned_split_queries), expected_num_splits) self.assertEqual(returned_split_queries[0][1], self._query) self.assertEqual(0, len(self._mock_datastore.run_query.call_args_list)) self.verify_unique_keys(returned_split_queries)
def test_readManifestFile__synapseStore_values_are_set(): project_id = "syn123" header = 'path\tparent\tsynapseStore\n' path1 = os.path.abspath(os.path.expanduser('~/file1.txt')) path2 = 'http://www.synapse.org' path3 = os.path.abspath(os.path.expanduser('~/file3.txt')) path4 = 'http://www.github.com' path5 = os.path.abspath(os.path.expanduser('~/file5.txt')) path6 = 'http://www.checkoutmymixtapefam.com/fire.mp3' row1 = '%s\t%s\tTrue\n' % (path1, project_id) row2 = '%s\t%s\tTrue\n' % (path2, project_id) row3 = '%s\t%s\tFalse\n' % (path3, project_id) row4 = '%s\t%s\tFalse\n' % (path4, project_id) row5 = '%s\t%s\t""\n' % (path5, project_id) row6 = '%s\t%s\t""\n' % (path6, project_id) expected_synapseStore = { str(path1): True, str(path2): False, str(path3): False, str(path4): False, str(path5): True, str(path6): False } manifest = StringIO(header+row1+row2+row3+row4+row5+row6) with patch.object(syn, "get", return_value=Project()),\ patch.object(os.path, "isfile", return_value=True): # mocks values for: file1.txt, file3.txt, file5.txt manifest_dataframe = synapseutils.sync.readManifestFile(syn, manifest) actual_synapseStore = (manifest_dataframe.set_index('path')['synapseStore'].to_dict()) assert_dict_equal(expected_synapseStore, actual_synapseStore)
def test_SplitQueryFn_without_num_splits(self): with patch.object(helper, 'get_datastore', return_value=self._mock_datastore): # Force SplitQueryFn to compute the number of query splits num_splits = 0 expected_num_splits = 23 entity_bytes = (expected_num_splits * ReadFromDatastore._DEFAULT_BUNDLE_SIZE_BYTES) with patch.object(ReadFromDatastore, 'get_estimated_size_bytes', return_value=entity_bytes): def fake_get_splits(datastore, query, num_splits, partition=None): return self.split_query(query, num_splits) with patch.object(query_splitter, 'get_splits', side_effect=fake_get_splits): split_query_fn = ReadFromDatastore.SplitQueryFn( self._PROJECT, self._query, None, num_splits) split_query_fn.start_bundle() returned_split_queries = [] for split_query in split_query_fn.process(self._query): returned_split_queries.append(split_query) self.assertEqual(len(returned_split_queries), expected_num_splits) self.assertEqual(0, len(self._mock_datastore.run_query.call_args_list)) self.verify_unique_keys(returned_split_queries)
def _runTransientErrorRetryTest(self, numErrors): for errorCode in _ALL_RETRIABLE_ERROR_CODES: clientSideEffects = [sqlalchemy.exc.OperationalError( orig=MySQLdb.OperationalError(errorCode), statement="err", params=None)] \ * numErrors + [DEFAULT] serverSideEffects = [sqlalchemy.exc.InternalError( orig=MySQLdb.InternalError(errorCode), statement="err", params=None)] \ * numErrors + [DEFAULT] if 3000 > errorCode >= 2000: # The error is client side. Return one operationalError, then pass with patch.object(Engine, "execute", spec_set=Engine.execute, side_effect=clientSideEffects) \ as mockExecute: retryOnTransientErrors(mockExecute)(Mock()) self.assertEqual(mockExecute.call_count, numErrors + 1) elif errorCode >= 1000: # The error is server side. Return one internalError, then pass with patch.object(Engine, "execute", spec_set=Engine.execute, side_effect=serverSideEffects) \ as mockExecute: retryOnTransientErrors(mockExecute)(Mock()) self.assertEqual(mockExecute.call_count, numErrors + 1) else: self.fail("Error code is neither client nor server: %s" % errorCode)
def test_graceful_stop_on_one_container_error(runner_factory, rabbit_config): runner = runner_factory(rabbit_config, ExampleService, SecondService) runner.start() container = get_container(runner, ExampleService) second_container = get_container(runner, SecondService) original_stop = second_container.stop with patch.object(second_container, 'stop', autospec=True, wraps=original_stop) as stop: rpc_consumer = get_extension(container, RpcConsumer) with patch.object( rpc_consumer, 'handle_result', autospec=True) as handle_result: exception = Exception("error") handle_result.side_effect = exception # use a standalone rpc proxy to call exampleservice.task() with ServiceRpcProxy("exampleservice", rabbit_config) as proxy: # proxy.task() will hang forever because it generates an error # in the remote container (so never receives a response). proxy.task.call_async() # verify that the error bubbles up to runner.wait() with pytest.raises(Exception) as exc_info: runner.wait() assert exc_info.value == exception # Check that the second service was stopped due to the first # service being killed stop.assert_called_once_with()
def test_run(): obj = IsClusterManaged() assert obj.parameters is not None obj.parameters = maps.NamedDict() obj.parameters["TendrlContext.integration_id"] = \ "test_uuid" setattr(__builtin__, "NS", maps.NamedDict()) setattr(NS, "tendrl", maps.NamedDict()) setattr(NS.tendrl, "objects", maps.NamedDict()) NS.tendrl.objects.Cluster = Cluster with patch.object( NS.tendrl.objects.Cluster, 'load', load_managed_cluster ): ret_val = obj.run() assert ret_val is True with patch.object( NS.tendrl.objects.Cluster, 'load', load_unmanaged_cluster ): ret_val = obj.run() assert ret_val is False with patch.object( NS.tendrl.objects.Cluster, 'load', load_cluster_failed ): ret_val = obj.run() assert ret_val is True
def test_perfscale_longrun_perf(self): client = Mock() pprof_collector = Mock() new_client = Mock() new_models = [Mock(), Mock()] args = argparse.Namespace(run_length=1) with patch.object( pl, 'until_timeout', autospec=True, return_value=[1]): with patch.object(pl, 'action_rest', autospec=True): with patch.object( pl, 'action_create', autospec=True, return_value=new_client) as m_ac: with patch.object( pl, 'action_busy', autospec=True, return_value=new_models) as m_ab: with patch.object( pl, 'action_cleanup', autospec=True) as m_acu: pl.perfscale_longrun_perf( client, pprof_collector, args) m_ac.assert_called_once_with(client) m_ab.assert_called_once_with(new_client, ['dummy-sink']) m_acu.assert_called_once_with(new_client, new_models)
def test_cache_unavailable(self): self.manager.set('foo', 'bar') self.store.flush_local_cache() with patch.object(self.store.cache, 'get', side_effect=Exception()): assert self.manager.get('foo') == 'bar' self.store.flush_local_cache() with patch.object(self.store.cache, 'set', side_effect=Exception()): assert self.manager.get('foo') == 'bar' self.store.flush_local_cache() # we should still be able to write a new value self.manager.set('foo', 'baz') self.store.flush_local_cache() # the cache should be incorrect now, but sync_options will eventually # correct the state assert self.manager.get('foo') == 'bar' self.store.flush_local_cache() # when the cache poofs, the db will be return the most-true answer with patch.object(self.store.cache, 'get', side_effect=Exception()): assert self.manager.get('foo') == 'baz' self.store.flush_local_cache() with patch.object(self.store.cache, 'set', side_effect=Exception()): assert self.manager.get('foo') == 'baz' self.store.flush_local_cache()
def test_get_listed_users(self): usr1 = models.CassandraUser(self._get_random_name(1025)) usr2 = models.CassandraUser(self._get_random_name(1025)) usr3 = models.CassandraUser(self._get_random_name(1025)) db1 = models.CassandraSchema('db1') db2 = models.CassandraSchema('db2') usr1.databases.append(db1.serialize()) usr3.databases.append(db2.serialize()) rv_1 = NonCallableMagicMock() rv_1.configure_mock(name=usr1.name, super=False) rv_2 = NonCallableMagicMock() rv_2.configure_mock(name=usr2.name, super=False) rv_3 = NonCallableMagicMock() rv_3.configure_mock(name=usr3.name, super=True) with patch.object(self.conn, 'execute', return_value=iter( [rv_1, rv_2, rv_3])): with patch.object(self.admin, '_get_acl', return_value={usr1.name: {db1.name: {'SELECT'}, db2.name: {}}, usr3.name: {db2.name: {'SELECT'}}} ): usrs = self.manager.list_users(self.context) self.conn.execute.assert_has_calls([ call(self.__LIST_USR_FORMAT), ], any_order=True) self.assertIn(usr1.serialize(), usrs[0]) self.assertIn(usr2.serialize(), usrs[0]) self.assertIn(usr3.serialize(), usrs[0])
def test_calls_provided_test(self): client = fake_juju_client() with temp_dir() as juju_home: client.env.juju_home = juju_home bs_manager = make_bootstrap_manager(client) bs_manager.log_dir = os.path.join(juju_home, 'log-dir') os.mkdir(bs_manager.log_dir) timing = gpr.TimingData(datetime.utcnow(), datetime.utcnow()) deploy_details = gpr.DeployDetails('test', dict(), timing) noop_test = Mock(return_value=deploy_details) pprof_collector = Mock() with patch.object(gpr, 'dump_performance_metrics_logs', autospec=True): with patch.object(gpr, 'generate_reports', autospec=True): with patch.object( gpr, 'PPROFCollector', autospec=True) as p_pc: p_pc.return_value = pprof_collector gpr.run_perfscale_test( noop_test, bs_manager, get_default_args()) noop_test.assert_called_once_with( client, pprof_collector, get_default_args())
def test_loop_body(self): # Shifting the schedule must be handled. self.rf_sensor._started = True with patch.object(TDMA_Scheduler, "shift") as shift_mock: with patch.object(TDMA_Scheduler, "update") as update_mock: with patch.object(RF_Sensor_Physical_Texas_Instruments, "_receive") as receive_mock: try: self.rf_sensor._loop_body() except DisabledException: pass receive_mock.assert_called_once_with() self.assertEqual(shift_mock.call_count, 1) self.assertEqual(update_mock.call_count, 1) self.assertNotEqual(self.rf_sensor._polling_time, 0.0) # Regular updates must be handled. self.rf_sensor._started = False with patch.object(RF_Sensor_Physical_Texas_Instruments, "_receive") as receive_mock: # The receive method must be called. self.rf_sensor._loop_body() receive_mock.assert_called_once_with()
def test_create_report_graph_returns_base_file_path(self): """The returned filepath should just be the basename.""" generator = Mock() start = 0000 end = 9999 file_list = ['example.rrd'] rrd_dir = '/foo' output_file = '/bar/test.png' output_file_base = 'test.png' graph_period = '0' with patch.object( gpr.os, 'listdir', autospec=True, return_value=file_list) as m_list: with patch.object( gpr, 'get_duration_points', autospec=True, return_value=(start, end)) as m_gdp: self.assertEqual( output_file_base, gpr.create_report_graph( rrd_dir, output_file, generator, graph_period) ) m_gdp.assert_called_once_with('/foo/example.rrd', graph_period) m_list.assert_called_once_with(rrd_dir) generator.assert_called_once_with( start, end, rrd_dir, output_file)
def test_returns_ordered_dictionary_of_details(self): """Must be ordered on the event_range.""" # Use ordered dict here so we can check the returned order has changed # later on. first = '2016-10-16 20:28:06 - 2016-10-16 20:29:44' second = '2016-10-16 20:30:13 - 2016-10-16 20:32:21' fake_data = OrderedDict() fake_data[second] = {'20:30:13 - 20:30:33': 'Log message second'} fake_data[first] = {'20:28:06 - 20:28:26': 'Log message'} name_lookup = {first: 'First', second: 'Second'} with patch.object( gpr, '_get_chunked_log', return_value=fake_data, autospec=True): with patch.object( gpr, '_get_log_name_lookup_table', return_value=name_lookup, autospec=True): details = gpr.breakdown_log_by_events_timeframe( '/tmp', 'boostrap', 'cleanup', []) self.assertIsInstance(details, OrderedDict) items = details.items() self.assertEqual(items[0][0], first) self.assertEqual(items[1][0], second)
def test_that_a_template_can_be_rendered_correctly(self, mock_render): with nested( patch.object(self.near, 'extract_latlon'), patch.object(self.near, 'extract_page'), patch.object(self.near, 'extract_sort'), patch.object(self.near, 'paginated_results') ) as (mock_latlon, mock_page, mock_sort, mock_results): request = rf.get('/') mock_paginated_results = Mock() mock_latlon.return_value = ('lat', 'lon') mock_page.return_value = 1 mock_sort.return_value = ('sort', 'order') mock_results.return_value = mock_paginated_results mock_response = Mock() mock_render.return_value = mock_response self.near.render(request, 'template-path', 999) mock_latlon.assert_called_with(request) mock_page.assert_called_with(request) mock_results.assert_called_with('lat', 'lon', 'order', 999, 1) mock_render.assert_called_with(request, 'template-path', { 'results': mock_paginated_results, 'latitude': 'lat', 'longitude': 'lon', 'sort': 'sort', })
def test_setup_for_ha_enabled(self): client = fake_juju_client() client.bootstrap() client.enable_ha() admin_client = client.get_controller_client() with patch.object( gpr, '_setup_system_monitoring', autospec=True) as m_ssm: with patch.object( gpr, '_enable_monitoring', autospec=True) as m_em: self.assertListEqual( ['0', '1', '2'], gpr.setup_system_monitoring(admin_client)) self.assertListEqual( m_ssm.call_args_list, [ call(admin_client, '0'), call(admin_client, '1'), call(admin_client, '2')]) self.assertListEqual( m_em.call_args_list, [ call(admin_client, '0'), call(admin_client, '1'), call(admin_client, '2')])
def test_build_circuit_timeout_after_progress(self): """ Similar to above but we timeout after Tor has ack'd our circuit-creation attempt, but before reaching BUILT. """ class FakeRouter: def __init__(self, i): self.id_hex = i self.flags = [] class FakeCircuit(Circuit): def close(self): return defer.succeed(None) path = [] for x in range(3): path.append(FakeRouter("$%040d" % x)) def fake_queue(cmd): self.assertTrue(cmd.startswith('EXTENDCIRCUIT 0')) return defer.succeed("EXTENDED 1234") queue_command = patch.object(self.protocol, 'queue_command', fake_queue) circuit_factory = patch.object(self.state, 'circuit_factory', FakeCircuit) with queue_command, circuit_factory: timeout = 10 clock = task.Clock() d = build_timeout_circuit(self.state, clock, path, timeout, using_guards=False) clock.advance(timeout + 1) def check_for_timeout_error(f): self.assertTrue(isinstance(f.type(), CircuitBuildTimedOutError)) d.addErrback(check_for_timeout_error) return d
def test_check_disk_space_for_deployment(self): min_size = 100000 volumes_metadata = self.env.get_default_volumes_metadata() volumes_roles_mapping = volumes_metadata['volumes_roles_mapping'] for role, space_info in volumes_roles_mapping.iteritems(): node, min_installation_size = self.\ create_node_and_calculate_min_size( role, space_info, volumes_metadata) self.update_node_with_single_disk(node, min_size) vm = node.volume_manager with patch.object(vm, '_VolumeManager' '__calc_minimal_installation_size', return_value=min_size): vm.check_disk_space_for_deployment() self.update_node_with_single_disk(node, min_size - 1) vm = node.volume_manager with patch.object(vm, '_VolumeManager' '__calc_minimal_installation_size', return_value=min_size): self.assertRaises( errors.NotEnoughFreeSpace, vm.check_disk_space_for_deployment )
def test_probe_building_disabled(self, recwarn): """Test that build methods are not called and that a warning is raised if probe building is disabled. """ # Create test network with nengo.Network() as network: a = nengo.Ensemble(100, 2) p_a = nengo.Probe(a, label="Output") # Create a model model = Model() # Dummy neurons builder ens_build = mock.Mock(name="ensemble builder") # Define a probe build function build_ens_probe = mock.Mock() # Build the model probe_builders = {nengo.Ensemble: build_ens_probe} with patch.object(model, "builders", new={nengo.Ensemble: ens_build}),\ patch.object(model, "probe_builders", new=probe_builders): model.build(network, build_probes=False) # Assert the probes were NOT built assert p_a not in model.seeds assert build_ens_probe.call_count == 0 # And that a warning was raised w = recwarn.pop() assert "Probes" in str(w.message) assert "disabled" in str(w.message)
def test_backup_incremental_metadata(self): with patch.object(backupagent, 'get_storage_strategy', return_value=MockSwift): MockStorage.save_metadata = Mock() with patch.object(MockSwift, 'load_metadata', return_value={'lsn': '54321'}): meta = { 'lsn': '12345', 'parent_location': 'fake', 'parent_checksum': 'md5', } mysql_impl.InnoBackupExIncremental.metadata = MagicMock( return_value=meta) mysql_impl.InnoBackupExIncremental.run = MagicMock( return_value=True) mysql_impl.InnoBackupExIncremental.__exit__ = MagicMock( return_value=True) agent = backupagent.BackupAgent() bkup_info = {'id': '123', 'location': 'fake-location', 'type': 'InnoBackupEx', 'checksum': 'fake-checksum', 'parent': {'location': 'fake', 'checksum': 'md5'} } agent.execute_backup(TroveContext(), bkup_info, '/var/lib/mysql') self.assertTrue(MockStorage.save_metadata.called_once_with( ANY, meta))
def test_to_swift_req_subrequest_proxy_access_log(self): container = 'bucket' obj = 'obj' method = 'GET' # force_swift_request_proxy_log is True req = Request.blank('/%s/%s' % (container, obj), environ={'REQUEST_METHOD': method, 'swift.proxy_access_log_made': True}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) with patch.object(Request, 'get_response') as m_swift_resp, \ patch.object(Request, 'remote_user', 'authorized'): m_swift_resp.return_value = FakeSwiftResponse() s3_req = S3AclRequest( req.environ, MagicMock(), force_request_log=True) sw_req = s3_req.to_swift_req(method, container, obj) self.assertFalse(sw_req.environ['swift.proxy_access_log_made']) # force_swift_request_proxy_log is False req = Request.blank('/%s/%s' % (container, obj), environ={'REQUEST_METHOD': method, 'swift.proxy_access_log_made': True}, headers={'Authorization': 'AWS test:tester:hmac', 'Date': self.get_date_header()}) with patch.object(Request, 'get_response') as m_swift_resp, \ patch.object(Request, 'remote_user', 'authorized'): m_swift_resp.return_value = FakeSwiftResponse() s3_req = S3AclRequest( req.environ, MagicMock(), force_request_log=False) sw_req = s3_req.to_swift_req(method, container, obj) self.assertTrue(sw_req.environ['swift.proxy_access_log_made'])
def max_header_length_test(): message = create.from_string(LONG_HEADER) # this used to fail because exceeded max depth recursion ok_(message.headers.getraw('subject').encode("utf-8") in message.to_string()) unicode_subject = (u"Это сообщение с длинным сабжектом " u"специально чтобы проверить кодировки") ascii_subject = "This is simple ascii subject" with patch.object( headers.encoding, 'MAX_HEADER_LENGTH', len(ascii_subject) + 1): eq_(Header(ascii_subject.encode("ascii"), "ascii", header_name="Subject"), encode_unstructured("Subject", ascii_subject)) with patch.object( headers.encoding, 'MAX_HEADER_LENGTH', len(unicode_subject) + 1): eq_(Header(unicode_subject.encode("utf-8"), "utf-8", header_name="Subject"), encode_unstructured("Subject", unicode_subject)) with patch.object(headers.encoding, 'MAX_HEADER_LENGTH', 1): eq_(ascii_subject.encode("utf-8"), encode_unstructured("Subject", ascii_subject)) eq_(unicode_subject.encode("utf-8"), encode_unstructured("Subject", unicode_subject))
def test_corrupted_read_writes_new(library): with ArcticTransaction(library, symbol, 'u1', 'l1') as mt: mt.write(symbol, ts1) res = library.read(symbol) assert res.version == 1 with ArcticTransaction(library, symbol, 'u1', 'l2') as mt: mt.write(symbol, ts2) res = library.read(symbol) assert res.version == 2 with patch.object(library, 'read') as l: l.side_effect = OperationFailure('some failure') with ArcticTransaction(library, symbol, 'u1', 'l2') as mt: mt.write(symbol, ts3, metadata={'a': 1, 'b': 2}) res = library.read(symbol) # Corrupted data still increments on write to next version correctly with new data assert res.version == 3 assert_frame_equal(ts3, library.read(symbol, 3).data) assert res.metadata == {'a': 1, 'b': 2} with patch.object(library, 'read') as l: l.side_effect = OperationFailure('some failure') with ArcticTransaction(library, symbol, 'u1', 'l2') as mt: mt.write(symbol, ts3, metadata={'a': 1, 'b': 2}) res = library.read(symbol) # Corrupted data still increments to next version correctly with ts & metadata unchanged assert res.version == 4 assert_frame_equal(ts3, library.read(symbol, 4).data) assert res.metadata == {'a': 1, 'b': 2}
def test_predict_1(self): with patch.object(self.xgb._model, "predict_proba", return_value=IV): with patch.object(self.xgb, "_clf", None): ov = np.zeros((1, 5)) self.xgb._predict({}, ov, 0) assert np.allclose(IV, ov)
def test_connect_subscribes_if_subscription_is_set(self): with patch.object(self.external_queue, 'close'), \ patch.object(self.external_queue, '_subscribe') as mock_subscribe: self.external_queue.subscription = 'routing_key' self.external_queue._connect() self.assertEqual(mock_subscribe.call_count, 1)
def test_watchers_are_finished(self, *args): """ Test for asserting that watchers are closed in LibevConnection This test simulates a process termination without calling cluster.shutdown(), which would trigger LibevConnection._libevloop._cleanup. It will check the watchers have been closed Finally it will restore the LibevConnection reactor so it doesn't affect the rest of the tests @since 3.10 @jira_ticket PYTHON-747 @expected_result the watchers are closed @test_category connection """ with patch.object(LibevConnection._libevloop, "_thread"), \ patch.object(LibevConnection._libevloop, "notify"): self.make_connection() # We have to make a copy because the connections shouldn't # be alive when we verify them live_connections = set(LibevConnection._libevloop._live_conns) # This simulates the process ending without cluster.shutdown() # being called, then with atexit _cleanup for libevreactor would # be called libev__cleanup(weakref.ref(LibevConnection._libevloop)) for conn in live_connections: for watcher in (conn._write_watcher, conn._read_watcher): self.assertTrue(watcher.stop.mock_calls) LibevConnection._libevloop._shutdown = False
def test_bookmarklet(self): """ Does api/bookmarklet fetch, save, and return a response for the recipe? """ fromTest = fromdir(__file__) loc = fromTest('recipe_page_source.html') pageSource = open(loc).read() pGet = patch.object(treq, 'get', return_value=defer.succeed(None), autospec=True) pTreqContent = patch.object(treq, 'content', return_value=defer.succeed(pageSource), autospec=True) with pGet, pTreqContent: # normal bookmarketing u = self._users()[0] req = self.requestJSON([], session_user=u) req.args['uri'] = ['http://www.foodandwine.com/recipes/poutine-style-twice-baked-potatoes'] ret = yield self.handler('bookmarklet', req) self.assertEqual(len(recipe.Recipe.objects()), 1) expectedResults = '{"status": "ok", "recipes": [{"name": "Delicious Meatless Meatballs", "urlKey": "weirdo-gmail-com-delicious-meatless-meatballs-"}], "message": ""}' assert ret == expectedResults # # not signed in to noms; bookmarketing should not be allowed req = self.requestJSON([]) req.args['uri'] = ['http://www.foodandwine.com/recipes/poutine-style-twice-baked-potatoes'] ret = yield self.handler('bookmarklet', req) expectedResults = '{"status": "error", "recipes": [], "message": "User was not logged in."}' assert ret == expectedResults
def test_validate_instance_flavors(self, create_nove_cli_mock): patch.object( create_nove_cli_mock.return_value, 'flavors', new_callable=PropertyMock(return_value=Mock())) mock_flv = create_nove_cli_mock.return_value.flavors.get.return_value mock_flv.ephemeral = 0 test_instances = [{'flavor_id': 1, 'volume_size': 10}, {'flavor_id': 1, 'volume_size': 1.5, 'region_name': 'home'}, {'flavor_id': 2, 'volume_size': 3, 'region_name': 'work'}] models.validate_instance_flavors(Mock(), test_instances, True, True) create_nove_cli_mock.assert_has_calls([call(ANY, None), call(ANY, 'home'), call(ANY, 'work')]) self.assertRaises(exception.LocalStorageNotSpecified, models.validate_instance_flavors, Mock(), test_instances, False, True) mock_flv.ephemeral = 1 models.validate_instance_flavors(Mock(), test_instances, False, True)
def test_make_connection_no_signal(self, source_getter, sink_getter): """Test that building connections adds a new signal to the model.""" model = Model() class A(object): pass # Create a connection from a to b connection = mock.Mock() connection.pre_obj = A() connection.post_obj = A() # Create a mock network network = mock.Mock() network.seed = None network.connections = [connection] network.ensembles = [] network.nodes = [] network.networks = [] network.probes = [] # Patch the getters, add a null builder with patch.object(model, "source_getters", {A: source_getter}), \ patch.object(model, "sink_getters", {A: sink_getter}), \ patch.object(model, "connection_parameter_builders", {A: mock.Mock()}): # Build the network model.build(network) # Assert that no signal exists assert connection not in model.connections_signals
def test_handle_list(self): processor = CommandListProcessor() with patch.object(processor, 'run_command') as run_command: kwargs = {'a': 1, 'b': 2} processor.handle_list(['a.txt', 'b.js', 'c.css'], **kwargs) run_command.assert_called_with('a.txt b.js c.css', **kwargs)
def test_yehua_file_passed_in_command_line(): args = ["yehua", "/tmp/yehua.yml"] with patch("yehua.main.Project") as mocked_project: with patch.object(sys, "argv", args): main() mocked_project.assert_called()
def test_main_dash_h(): args = ["yehua", "-h"] with patch("sys.stdout", new_callable=StringIO) as out: with patch.object(sys, "argv", args): main() eq_(out.getvalue(), HELP_TEXT)
def test_assess_training_example_database_error(self): training_api.get_training_example(self.submission_uuid, RUBRIC, EXAMPLES) with patch.object(StudentTrainingWorkflow.objects, 'get') as mock_db: mock_db.side_effect = DatabaseError("Kaboom!") with self.assertRaises(StudentTrainingInternalError): training_api.assess_training_example(self.submission_uuid, EXAMPLES[0]['options_selected'])
def test_close_closes_the_client_connection(self): with patch.object(self.session, 'close') as close_mock: response = self.sendRequest(u'close') close_mock.assertEqualResponsecalled_once_with() self.assertEqualResponse(u'OK')
def test_parse_syntax_only_false(): # syntax + validation valid_tld_list = [i + '@ai' for i in valid_localparts()] valid_domain_list = [i + '@mailgun.org' for i in valid_localparts()] valid_subdomain_list = [ i + '@fakecompany.mailgun.org' for i in valid_localparts() ] invalid_mx_list = [i + '@example.com' for i in valid_localparts(True)] invalid_tld_list = [i + '@com' for i in invalid_localparts(True)] invalid_domain_list = [ i + '@example.com' for i in invalid_localparts(True) ] invalid_subdomain_list = [ i + '@sub.example.com' for i in invalid_localparts(True) ] all_valid_list = valid_tld_list + valid_domain_list + valid_subdomain_list all_invalid_list = invalid_domain_list + invalid_subdomain_list + invalid_tld_list + invalid_mx_list all_list = all_valid_list + all_invalid_list # all valid with patch.object(address, 'mail_exchanger_lookup') as mock_method: mock_method.side_effect = mock_exchanger_lookup parse, unpar = address.validate_list(', '.join(valid_tld_list), as_tuple=True) assert_equal(parse, valid_tld_list) assert_equal(unpar, []) parse, unpar = address.validate_list(', '.join(valid_domain_list), as_tuple=True) assert_equal(parse, valid_domain_list) assert_equal(unpar, []) parse, unpar = address.validate_list(', '.join(valid_subdomain_list), as_tuple=True) assert_equal(parse, valid_subdomain_list) assert_equal(unpar, []) # all invalid parse, unpar = address.validate_list(invalid_mx_list, as_tuple=True) assert_equal(parse, []) assert_equal(unpar, invalid_mx_list) parse, unpar = address.validate_list(invalid_tld_list, as_tuple=True) assert_equal(parse, []) assert_equal(unpar, invalid_tld_list) parse, unpar = address.validate_list(invalid_domain_list, as_tuple=True) assert_equal(parse, []) assert_equal(unpar, invalid_domain_list) parse, unpar = address.validate_list(invalid_subdomain_list, as_tuple=True) assert_equal(parse, []) assert_equal(unpar, invalid_subdomain_list) parse, unpar = address.validate_list(all_list, as_tuple=True) assert_equal(parse, all_valid_list) assert_equal(unpar, all_invalid_list)
def test_handle_file(self): processor = CommandFileProcessor() with patch.object(processor, 'run_command') as run_command: kwargs = {'a': 1, 'b': 2} processor.handle_file('a.txt', **kwargs) run_command.assert_called_with('a.txt', **kwargs)
def _assert_template_for_offset(self, offset, message_count): # lint-amnesty, pylint: disable=missing-function-docstring current_day, offset, target_day, upgrade_deadline = self._get_dates( offset) # lint-amnesty, pylint: disable=unused-variable user = UserFactory.create() for course_index in range(message_count): self._schedule_factory( offset=offset, enrollment__user=user, enrollment__course__id=CourseKey.from_string( 'edX/toy/course{}'.format(course_index))) patch_policies(self, [StubPolicy([ChannelType.PUSH])]) mock_channel = Mock( channel_type=ChannelType.EMAIL, action_links=[], tracker_image_sources=[], ) channel_map = ChannelMap([ ['sailthru', mock_channel], ]) sent_messages = [] with self.settings(TEMPLATES=self._get_template_overrides()): with patch.object(self.task, 'async_send_task') as mock_schedule_send: mock_schedule_send.apply_async = lambda args, *_a, **_kw: sent_messages.append( args) num_expected_queries = NUM_QUERIES_FIRST_MATCH if self.queries_deadline_for_each_course: # one query per course for opt-out and one for course modes num_expected_queries += (message_count * 2) - 1 else: num_expected_queries += 1 with self.assertNumQueries(num_expected_queries, table_blacklist=WAFFLE_TABLES): self.task().apply(kwargs=dict( site_id=self.site_config.site.id, target_day_str=serialize(target_day), day_offset=offset, bin_num=self._calculate_bin_for_user(user), )) num_expected_messages = 1 if self.consolidates_emails_for_learner else message_count assert len(sent_messages) == num_expected_messages with self.assertNumQueries(NUM_QUERIES_PER_MESSAGE_DELIVERY): with patch( 'openedx.core.djangoapps.schedules.tasks.segment.track' ) as mock_segment_track: with patch('edx_ace.channel.channels', return_value=channel_map): self.deliver_task(*sent_messages[0]) assert mock_segment_track.call_count == 1 assert mock_channel.deliver.call_count == 1 for (_name, (_msg, email), _kwargs) in mock_channel.deliver.mock_calls: for template in attr.astuple(email): assert 'TEMPLATE WARNING' not in template assert '{{' not in template assert '}}' not in template return mock_channel.deliver.mock_calls
def testGetConfig(self): """Test that the config Object is returned""" with patch.object(requests, 'get') as mockMethod: mockMethod.return_value.status_code = 200 mockMethod.return_value.text = """ <nutcallstatus> <!--this is similar to all.xml, but with more values--> <!--all temperatures are displayed in tenths F, regardless of setting of unit--> <!--all temperatures sent by browser to unit should be in F. you can send--> <!--tenths F with a decimal place, ex: 123.5--> <COOK> <COOK_NAME>Big Green Egg</COOK_NAME> <COOK_TEMP>3220</COOK_TEMP> <COOK_SET>4000</COOK_SET> <COOK_STATUS>0</COOK_STATUS> </COOK> <FOOD1> <FOOD1_NAME>Chicken Quarters</FOOD1_NAME> <FOOD1_TEMP>1493</FOOD1_TEMP> <FOOD1_SET>1750</FOOD1_SET> <FOOD1_STATUS>0</FOOD1_STATUS> </FOOD1> <FOOD2> <FOOD2_NAME>Food2</FOOD2_NAME> <FOOD2_TEMP>OPEN</FOOD2_TEMP> <FOOD2_SET>1000</FOOD2_SET> <FOOD2_STATUS>4</FOOD2_STATUS> </FOOD2> <FOOD3> <FOOD3_NAME>Food3</FOOD3_NAME> <FOOD3_TEMP>OPEN</FOOD3_TEMP> <FOOD3_SET>1000</FOOD3_SET> <FOOD3_STATUS>4</FOOD3_STATUS> </FOOD3> <OUTPUT_PERCENT>100</OUTPUT_PERCENT> <TIMER_CURR>00:00:00</TIMER_CURR> <TIMER_STATUS>0</TIMER_STATUS> <SYSTEM> <MENU_SCROLLING>1</MENU_SCROLLING> <LCD_BACKLIGHT>47</LCD_BACKLIGHT> <LCD_CONTRAST>10</LCD_CONTRAST> <DEG_UNITS>1</DEG_UNITS> <ALARM_BEEPS>0</ALARM_BEEPS> <KEY_BEEPS>0</KEY_BEEPS> </SYSTEM> <CONTROL> <TIMEOUT_ACTION>0</TIMEOUT_ACTION> <COOKHOLD>2000</COOKHOLD> <ALARMDEV>500</ALARMDEV> <COOK_RAMP>0</COOK_RAMP> <OPENDETECT>1</OPENDETECT> <CYCTIME>6</CYCTIME> <PROPBAND>500</PROPBAND> </CONTROL> <WIFI> <IP>10.0.1.30</IP> <NM>255.255.255.0</NM> <GW>10.0.1.1</GW> <DNS>10.0.1.1</DNS> <WIFIMODE>0</WIFIMODE> <DHCP>0</DHCP> <SSID>Wireless Network</SSID> <WIFI_ENC>6</WIFI_ENC> <WIFI_KEY>SsecretKey</WIFI_KEY> <HTTP_PORT>80</HTTP_PORT> </WIFI> <SMTP> <SMTP_HOST>smtp.hostname.com</SMTP_HOST> <SMTP_PORT>0</SMTP_PORT> <SMTP_USER></SMTP_USER> <SMTP_PWD></SMTP_PWD> <SMTP_TO>[email protected]</SMTP_TO> <SMTP_FROM>[email protected]</SMTP_FROM> <SMTP_SUBJ>Temperature Controller Status E-Mail</SMTP_SUBJ> <SMTP_ALERT>0</SMTP_ALERT> </SMTP> </nutcallstatus> """ configObj = CyberQInterface("127.0.0.1").getConfig() self.assertEqual(configObj.CONTROL.OPENDETECT, 1)
def test_handle_file(self): processor = BaseFileProcessor() with patch.object(processor, 'handle_file') as handle_file: kwargs = {'a': 1, 'b': 2} processor.handle_list(['/path/to/file.txt'], **kwargs) handle_file.assert_called_with('/path/to/file.txt', **kwargs)
def _run_with_config(self, cfg): with patch.object(utils, 'parse_openstack_ini', return_value=cfg): if hasattr(self.func, 'im_func'): return self.func.im_func(self.conf) else: return self.func.__func__(self.conf)
def mock_dvcignore(dvcignore_path, patterns): tree = MagicMock() with patch.object(tree, "open", mock_open(read_data="\n".join(patterns))): ignore_patterns = DvcIgnorePatterns(dvcignore_path, tree) return ignore_patterns
def test_parameter_passing(self): with patch.object(SelfRegistrationInvitation, 'initiate_workflow', return_value=([], [], [])) as init: response = self.make_api_post( self.domain1, 'admin@reg-api-test-1', 'admin@reg-api-test-1-password', { 'app_id': '123', 'users': [{ 'phone_number': '999123' }], }, ) self.assertEqual(response.status_code, 200) init.assert_called_once_with( self.domain1.name, [SelfRegistrationUserInfo('999123')], app_id='123', custom_first_message=None, android_only=False, require_email=False, ) with patch.object(SelfRegistrationInvitation, 'initiate_workflow', return_value=([], [], [])) as init: response = self.make_api_post( self.domain1, 'admin@reg-api-test-1', 'admin@reg-api-test-1-password', { 'app_id': '123', 'users': [{ 'phone_number': '999123', 'custom_user_data': { 'abc': 'def' } }] }, ) self.assertEqual(response.status_code, 200) init.assert_called_once_with( self.domain1.name, [SelfRegistrationUserInfo('999123', {'abc': 'def'})], app_id='123', custom_first_message=None, android_only=False, require_email=False, ) with patch.object(SelfRegistrationInvitation, 'initiate_workflow', return_value=([], [], [])) as init: response = self.make_api_post( self.domain1, 'admin@reg-api-test-1', 'admin@reg-api-test-1-password', { 'app_id': '123', 'users': [{ 'phone_number': '999123' }], 'android_only': True, }, ) self.assertEqual(response.status_code, 200) init.assert_called_once_with( self.domain1.name, [SelfRegistrationUserInfo('999123')], app_id='123', custom_first_message=None, android_only=True, require_email=False, ) with patch.object(SelfRegistrationInvitation, 'initiate_workflow', return_value=([], [], [])) as init: response = self.make_api_post( self.domain1, 'admin@reg-api-test-1', 'admin@reg-api-test-1-password', { 'app_id': '123', 'users': [{ 'phone_number': '999123' }], 'require_email': True, }, ) self.assertEqual(response.status_code, 200) init.assert_called_once_with( self.domain1.name, [SelfRegistrationUserInfo('999123')], app_id='123', custom_first_message=None, android_only=False, require_email=True, ) with patch.object(SelfRegistrationInvitation, 'initiate_workflow', return_value=([], [], [])) as init: response = self.make_api_post( self.domain1, 'admin@reg-api-test-1', 'admin@reg-api-test-1-password', { 'app_id': '123', 'users': [{ 'phone_number': '999123' }], 'custom_registration_message': 'Hello', }, ) self.assertEqual(response.status_code, 200) init.assert_called_once_with( self.domain1.name, [SelfRegistrationUserInfo('999123')], app_id='123', custom_first_message='Hello', android_only=False, require_email=False, )
def test_custom_firefox_profile(self): patch_object = patch.object(webdriver, 'FirefoxProfile').start() self.addCleanup(patch.stopall) browser_kwargs_tuple = bok_choy.browser._local_browser_class('firefox') # pylint: disable=protected-access assert 'firefox_profile' in browser_kwargs_tuple[2] patch_object.assert_called_with('/foo/path')
def test_appiter_close_is_called(self): app = self.make_test_app() with patch.object(AppIter, 'close') as close: app.get('/') self.assertEqual(close.call_count, 1)
def test_no_group(self): with patch.object(pwd, 'getpwnam', return_value=self.pwd_root): with patch.object(grp, 'getgrnam', side_effect=KeyError()): res = test_neutron.config_permission(self.conf) self.assertEqual(res.result, Result.SKIP)
def YouCompleteMe_OnPeriodicTick_ServerNotReady_test(ycm, *args): with patch.object(ycm, 'IsServerAlive', return_value=True): with patch.object(ycm, 'IsServerReady', return_value=False): assert_that(ycm.OnPeriodicTick(), equal_to(True))
def test_android_registration_from_invite(self): self.domain_obj.sms_mobile_worker_registration_enabled = True self.domain_obj.enable_registration_welcome_sms_for_mobile_worker = True self.domain_obj.save() user_data = {'abc': 'def'} # Initiate Registration Workflow SelfRegistrationInvitation.initiate_workflow( self.domain, [SelfRegistrationUserInfo('999123', user_data)], app_id=self.app_id, ) self.assertRegistrationInvitation( phone_number='999123', app_id=self.app_id, phone_type=None, android_only=False, require_email=False, custom_user_data=user_data, status=SelfRegistrationInvitation.STATUS_PENDING, ) self.assertLastOutgoingSMS( '+999123', [_MESSAGES[MSG_MOBILE_WORKER_INVITATION_START]]) # Choose phone type 'android' with patch('corehq.apps.sms.models.SelfRegistrationInvitation.odk_url') as mock_odk_url, \ patch.object(SelfRegistrationInvitation, 'get_user_registration_url', return_value=DUMMY_REGISTRATION_URL), \ patch.object(SelfRegistrationInvitation, 'get_app_info_url', return_value=DUMMY_APP_INFO_URL): mock_odk_url.__get__ = Mock(return_value=DUMMY_APP_ODK_URL) incoming('+999123', '1', self.backend.hq_api_id) self.assertRegistrationInvitation( phone_number='999123', app_id=self.app_id, phone_type=SelfRegistrationInvitation.PHONE_TYPE_ANDROID, android_only=False, require_email=False, custom_user_data=user_data, status=SelfRegistrationInvitation.STATUS_PENDING, ) self.assertLastOutgoingSMS('+999123', [ _MESSAGES[MSG_MOBILE_WORKER_ANDROID_INVITATION].format( DUMMY_REGISTRATION_URL), '[commcare app - do not delete] {}'.format(DUMMY_APP_INFO_URL), ]) invite = self._get_sms_registration_invitation() c = Client() response = c.post( '/a/{}/settings/users/commcare/register/{}/'.format( self.domain, invite.token), { 'username': '******', 'password': '******', 'password2': 'abc', 'email': '*****@*****.**', }) self.assertEqual(response.status_code, 200) user = CommCareUser.get_by_username( format_username('new_user', self.domain)) self.assertIsNotNone(user) self.assertEqual(user.user_data, user_data) self.assertEqual(user.email, '*****@*****.**') self.assertEqual(PhoneNumber.by_phone('999123').owner_id, user.get_id) self.assertRegistrationInvitation( status=SelfRegistrationInvitation.STATUS_REGISTERED, )
def test_extraction_with_single_result(self): # type: () -> None with patch.object(SQLAlchemyExtractor, '_get_connection') as mock_connection: connection = MagicMock() mock_connection.return_value = connection sql_execute = MagicMock() connection.execute = sql_execute table = {'schema_name': 'test_schema', 'name': 'test_table', 'description': '', 'cluster': self.conf['extractor.athena_metadata.{}'.format(AthenaMetadataExtractor.CATALOG_KEY)], } sql_execute.return_value = [ self._union( {'col_name': 'col_id1', 'col_type': 'bigint', 'col_description': 'description of id1', 'col_sort_order': 0, 'extras': None}, table), self._union( {'col_name': 'col_id2', 'col_type': 'bigint', 'col_description': 'description of id2', 'col_sort_order': 1, 'extras': None}, table), self._union( {'col_name': 'is_active', 'col_type': 'boolean', 'col_description': None, 'col_sort_order': 2, 'extras': None}, table), self._union( {'col_name': 'source', 'col_type': 'varchar', 'col_description': 'description of source', 'col_sort_order': 3, 'extras': None}, table), self._union( {'col_name': 'etl_created_at', 'col_type': 'timestamp', 'col_description': None, 'col_sort_order': 4, 'extras': 'partition key'}, table), self._union( {'col_name': 'ds', 'col_type': 'varchar', 'col_description': None, 'col_sort_order': 5, 'extras': None}, table) ] extractor = AthenaMetadataExtractor() extractor.init(self.conf) actual = extractor.extract() expected = TableMetadata('athena', self.conf['extractor.athena_metadata.{}'. format(AthenaMetadataExtractor.CATALOG_KEY)], 'test_schema', 'test_table', '', [ColumnMetadata('col_id1', 'description of id1', 'bigint', 0), ColumnMetadata('col_id2', 'description of id2', 'bigint', 1), ColumnMetadata('is_active', None, 'boolean', 2), ColumnMetadata('source', 'description of source', 'varchar', 3), ColumnMetadata('etl_created_at', 'partition key', 'timestamp', 4), ColumnMetadata('ds', None, 'varchar', 5)]) self.assertEqual(expected.__repr__(), actual.__repr__()) self.assertIsNone(extractor.extract())
def test_no_need_admin(self): """ need_admin is False if 1+ admins """ access = IMutableAccessBackend(None) with patch.object(access, 'user_data') as user_data: user_data.return_value = [{'admin': False}, {'admin': True}] self.assertFalse(access.need_admin())
def test_oembed_invalid_request(self): config = {'side_effect': URLError('foo')} with patch.object(django.utils.six.moves.urllib.request, 'urlopen', **config): self.assertRaises(EmbedNotFoundException, tuiuiu_oembed, "http://www.youtube.com/watch/")
def test_extraction_with_single_result(self): # type: () -> None with patch.object(SQLAlchemyExtractor, '_get_connection') as mock_connection: connection = MagicMock() mock_connection.return_value = connection sql_execute = MagicMock() connection.execute = sql_execute table = { 'schema_name': 'test_schema', 'name': 'test_table', 'description': 'a table for testing', 'cluster': self.conf['extractor.mssql_metadata.{}'.format( MSSQLMetadataExtractor.CLUSTER_KEY)] } sql_execute.return_value = [ self._union( { 'col_name': 'col_id1', 'col_type': 'bigint', 'col_description': 'description of id1', 'col_sort_order': 0 }, table), self._union( { 'col_name': 'col_id2', 'col_type': 'bigint', 'col_description': 'description of id2', 'col_sort_order': 1 }, table), self._union( { 'col_name': 'is_active', 'col_type': 'boolean', 'col_description': None, 'col_sort_order': 2 }, table), self._union( { 'col_name': 'source', 'col_type': 'varchar', 'col_description': 'description of source', 'col_sort_order': 3 }, table), self._union( { 'col_name': 'etl_created_at', 'col_type': 'timestamp', 'col_description': 'description of etl_created_at', 'col_sort_order': 4 }, table), self._union( { 'col_name': 'ds', 'col_type': 'varchar', 'col_description': None, 'col_sort_order': 5 }, table) ] extractor = MSSQLMetadataExtractor() extractor.init(self.conf) actual = extractor.extract() expected = TableMetadata( 'mssql', 'MY_CLUSTER', 'test_schema', 'test_table', 'a table for testing', [ ColumnMetadata('col_id1', 'description of id1', 'bigint', 0), ColumnMetadata('col_id2', 'description of id2', 'bigint', 1), ColumnMetadata('is_active', None, 'boolean', 2), ColumnMetadata('source', 'description of source', 'varchar', 3), ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4), ColumnMetadata('ds', None, 'varchar', 5) ], tags='test_schema') self.assertEqual(expected.__repr__(), actual.__repr__()) self.assertIsNone(extractor.extract())
def test_returns_stripped_value(self): response = StringIO.StringIO('simple server.\n') with patch.object(amm, 'urlopen', autospec=True, return_value=response): self.assertEqual(amm.get_server_response('192.168.1.2'), 'simple server.')
def _section_send_email(course, access): """ Provide data for the corresponding bulk email section """ course_key = course.id # Monkey-patch applicable_aside_types to return no asides for the duration of this render with patch.object(course.runtime, 'applicable_aside_types', null_applicable_aside_types): # This HtmlBlock is only being used to generate a nice text editor. html_module = HtmlBlock( course.system, DictFieldData({'data': ''}), ScopeIds(None, None, None, course_key.make_usage_key('html', 'fake'))) fragment = course.system.render(html_module, 'studio_view') fragment = wrap_xblock( 'LmsRuntime', html_module, 'studio_view', fragment, None, extra_data={"course-id": six.text_type(course_key)}, usage_id_serializer=lambda usage_id: quote_slashes( six.text_type(usage_id)), # Generate a new request_token here at random, because this module isn't connected to any other # xblock rendering. request_token=uuid.uuid1().hex) cohorts = [] if is_course_cohorted(course_key): cohorts = get_course_cohorts(course) course_modes = [] if not VerifiedTrackCohortedCourse.is_verified_track_cohort_enabled( course_key): course_modes = CourseMode.modes_for_course(course_key, include_expired=True, only_selectable=False) email_editor = fragment.content section_data = { 'section_key': 'send_email', 'section_display_name': _('Email'), 'access': access, 'send_email': reverse('send_email', kwargs={'course_id': six.text_type(course_key)}), 'editor': email_editor, 'cohorts': cohorts, 'course_modes': course_modes, 'default_cohort_name': DEFAULT_COHORT_NAME, 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': six.text_type(course_key)}), 'email_background_tasks_url': reverse('list_background_email_tasks', kwargs={'course_id': six.text_type(course_key)}), 'email_content_history_url': reverse('list_email_content', kwargs={'course_id': six.text_type(course_key)}), } return section_data
def test_record_crash(runner, has_crashed, mock_marionette): with patch.object(runner, "marionette", mock_marionette): assert runner.record_crash() == has_crashed _check_crash_counts(has_crashed, runner, runner.marionette)
def patch_amm(target): return patch.object(amm, target, autospec=True)
def test_assess_create_budget(self): with patch.object(self.fake_client, 'get_juju_output'): with patch("assess_budget.create_budget"): assess_create_budget(self.fake_client, self.budget_name, self.budget_value, self.budget_limit)
def test_uses_protocol_and_ipaddress(self): with patch.object(amm, 'urlopen', autospec=True) as m_uopen: amm.get_server_response('192.168.1.2') m_uopen.assert_called_once_with('http://192.168.1.2')
def test_assess_list_budgets(self): with patch.object(self.fake_client, 'get_juju_output'): with patch("assess_budget.json.loads", return_value=self.fake_budgets_json): assess_list_budgets(self.fake_client, self.fake_budget_json)
def _raw_moban(self, args, folder, expected, output): os.chdir(os.path.join("docs", folder)) with patch.object(sys, "argv", args): main() _verify_content(output, expected) os.unlink(output)