def test_different_api_class(self): registry = Mock() registry.settings = { 'bowab.api_class': 'sixfeetup.bowab.tests.subscribers.Dummy', } api_class = get_api_class(registry) self.assertEqual(api_class, Dummy)
def mock_sublime_view(regions=None, substr=lambda sel: None): view = Mock() view.settings = Mock(return_value=mock_sublime_settings({'tab_size': 4})) view.sel = Mock(return_value=regions) view.substr = substr view.replace = Mock() return view
def test_settings(self, mock_current_registry): fake_username = "******" mock_response = Mock() mock_response.settings = {'cloud_username': fake_username} mock_current_registry.return_value = mock_response result = settings('cloud_username') self.assertEqual(result, fake_username)
def test_unconfigured_init(self): crawler = Mock() # don't include `DATADOG_API_KEY` and make sure it's not in the env os.environ.pop('DATADOG_API_KEY', None) crawler.settings = Settings({'DATADOG_APP_KEY': 'azertyuiop123456789'}) with self.assertRaises(NotConfigured): self.extension = DatadogExtension.from_crawler(crawler)
def test_configured_init(self, handler): token = 'azertyuiop123456789' crawler = Mock() crawler.settings = {'LOGENTRIES_TOKEN': token, } extension = LogentriesExtension.from_crawler(crawler) handler.assert_called_once_with(token) extension.handler
def test_unconfigured_init(self): crawler = Mock() crawler.settings = { 'DATADOG_API_KEY': None, 'DATADOG_APP_KEY': 'azertyuiop123456789' } with self.assertRaises(NotConfigured): self.extension = DatadogExtension.from_crawler(crawler)
def test_get_all_jobs_when_no_url_set(self): mock_workflow = Mock(Workflow) mock_workflow.settings = {} interface = JenkinsInterface(mock_workflow) mock_workflow.send_feedback.side_effect = TestException() try: interface.get_all_jobs() except TestException: pass mock_workflow.send_feedback.assert_called_once()
def mock_sublime_view(regions = None, substr = lambda sel: None): view = Mock() view.settings = Mock(return_value = mock_sublime_settings({ 'tab_size': 4 })) view.sel = Mock(return_value = regions) view.substr = substr view.replace = Mock() return view
def test_request_timing_slow_request(self, mock_log, mock_time): mock_time.time = mock_timer() request = Mock(method='GET', url='http://example.com') registry = Mock() registry.settings = {'request_timing.slow_request_threshold': 0} handler = lambda request: request timing = tweens.request_timing(handler, registry) timing(request) mock_log.warning.assert_called_once_with( 'GET (http://example.com) request took 1 seconds') assert not mock_log.debug.called
def _generate_event(rendering_val=None, registry=None): if rendering_val is None: rendering_val = {} event = BeforeRender({}, rendering_val=rendering_val) event.rendering_val = rendering_val event['request'] = testing.DummyRequest() if registry is None: registry = Mock() registry.settings = {} event['request'].registry = registry return event
def test_celeryd(self, workercommand): from pyramid_celery.commands.celeryd import main worker = Mock() run = Mock() worker.run = run workercommand.return_value = worker settings = {'CELERY_ALWAYS_EAGER': True} registry = Mock() registry.settings = settings main()
def test_celery(self): from pyramid_celery import Celery settings = {"CELERY_ALWAYS_EAGER": True} registry = Mock() registry.settings = settings env = {"registry": registry} celery = Celery(env) new_settings = celery.loader.read_configuration() reduced_args = celery.__reduce_args__() assert reduced_args[0] == env assert settings == new_settings assert celery.env == env
def test_celery(self): from pyramid_celery import Celery settings = { 'CELERY_ALWAYS_EAGER': 'true', 'CELERYD_CONCURRENCY': '1', 'BROKER_URL': '"redis:://*****:*****@bar"), ("Baz Qux", "baz@qux"))', 'CELERYD_ETA_SCHEDULER_PRECISION': '0.1', 'CASSANDRA_SERVERS': '["foo", "bar"]', 'CELERY_ANNOTATIONS': '[1, 2, 3]', # any 'CELERY_ROUTERS': 'some.string', # also any 'SOME_KEY': 'SOME VALUE', 'CELERY_IMPORTS': '("myapp.tasks", )' } registry = Mock() registry.settings = settings env = { 'registry': registry } celery = Celery(env) new_settings = celery.loader.read_configuration() reduced_args = celery.__reduce_args__() assert reduced_args[0] == env assert settings == new_settings assert celery.env == env # Check conversions assert new_settings['CELERY_ALWAYS_EAGER'] == True assert new_settings['CELERYD_CONCURRENCY'] == 1 assert new_settings['ADMINS'] == ( ("Foo Bar", "foo@bar"), ("Baz Qux", "baz@qux") ) assert new_settings['BROKER_TRANSPORT_OPTIONS'] == {"foo": "bar"} assert new_settings['CELERYD_ETA_SCHEDULER_PRECISION'] > 0.09 assert new_settings['CELERYD_ETA_SCHEDULER_PRECISION'] < 0.11 assert new_settings['CASSANDRA_SERVERS'] == ["foo", "bar"] assert new_settings['CELERY_ANNOTATIONS'] == [1, 2, 3] assert new_settings['CELERY_ROUTERS'] == 'some.string' assert new_settings['SOME_KEY'] == settings['SOME_KEY'] assert new_settings['CELERY_IMPORTS'] == ("myapp.tasks", )
def test_configured_init(self): dd_api_key = 'azertyuiop123456789' dd_app_key = 'azertyuiop123456789' scrapy_id = '000' crawler = Mock() crawler.settings = { 'DATADOG_API_KEY': dd_api_key, 'DATADOG_APP_KEY': dd_app_key, 'SCRAPY_PROJECT_ID': scrapy_id } raised = False try: DatadogExtension.from_crawler(crawler) except: raised = True self.assertFalse(raised, 'Exception raised')
def test_configured_init(self): dd_api_key = 'azertyuiop123456789' dd_app_key = 'azertyuiop123456789' scrapy_id = '000' crawler = Mock() crawler.settings = Settings({ 'DATADOG_API_KEY': dd_api_key, 'DATADOG_APP_KEY': dd_app_key, 'SCRAPY_PROJECT_ID': scrapy_id, }) raised = None try: DatadogExtension.from_crawler(crawler) except Exception as e: # noqa raised = e self.assertIsNone(raised, 'Exception raised: {}'.format(raised))
def apprequest(dbsession, _registry): from ringo.lib.cache import Cache request = testing.DummyRequest() request.cache_item_modul = Cache() request.cache_item_list = Cache() user = Mock() user.news = [] user.settings = {'searches': {'foo': 'bar'}} request.user = user request.accept_language = Mock(return_value="en") request.translate = lambda x: x request.db = dbsession request.context = Mock() request.session.get_csrf_token = lambda: "xxx" return request
def test_celery_quoted_values(self): from pyramid_celery import Celery settings = { 'BROKER_URL': '"redis://localhost:6379/0"', 'BROKER_TRANSPORT_OPTIONS': '{"foo": "bar"}', } registry = Mock() registry.settings = settings env = { 'registry': registry } celery = Celery(env) new_settings = celery.loader.read_configuration() assert new_settings['BROKER_URL'] == 'redis://localhost:6379/0'
def test_from_crawler_not_configured(self): # asserts that from_crawler will raise NotConfigured the # constructor raises NotConfigured exception crawler = Mock() for conn_sett, table_name, insert_options in self.init_mocks_iter(): crawler.settings = self.get_pipeline_settings( conn_sett, table_name, insert_options ) with self.pipeline_cls_patcher as pipeline_cls, \ self.driver_patcher as driver_klass: pipeline_cls.side_effect = NotConfigured self.assertRaises(NotConfigured, RethinkDBPipeline.from_crawler, crawler) driver_klass.assert_called_once_with(conn_sett) pipeline_cls.assert_called_once_with( driver_klass.return_value, table_name, insert_options )
def test_celeryd(self, bootstrap, workercommand, celery): from pyramid_celery.celeryd import main worker = Mock() run = Mock() worker.run = run workercommand.return_value = worker settings = {"CELERY_ALWAYS_EAGER": True} registry = Mock() registry.settings = settings env = {"registry": registry} bootstrap.return_value = env main(["", "config.ini"]) workercommand.assert_called_with(app=celery(env)) bootstrap.assert_called_with("config.ini") run.assert_called_once_with()
def test_from_crawler_configured(self): # asserts that from_crawler will return a pipeline instance if the # constructor returns None, as expected crawler = Mock() comb_iter = self.init_mocks_iter() for conn_sett, table_name, insert_options in comb_iter: crawler.settings = self.get_pipeline_settings( conn_sett, table_name, insert_options ) with self.pipeline_cls_patcher as pipeline_cls, \ self.driver_patcher as driver_klass: pipeline_cls.return_value = None pipeline = RethinkDBPipeline.from_crawler(crawler) self.assertIsInstance(pipeline, RethinkDBPipeline) driver_klass.assert_called_once_with(conn_sett) pipeline_cls.assert_called_once_with( driver_klass.return_value, table_name, insert_options )
def test_no_api_class_defined(self): registry = Mock() registry.settings = {} api_class = get_api_class(registry) self.assertEqual(api_class, TemplateAPI)
def test_mapping_added(): mock_web_app = Mock() mock_web_app.settings = {'base_url': 'nb_base_url'} setup_handlers(mock_web_app) mock_web_app.add_handlers.assert_called_once_with(".*", ANY)
def test_unconfigured_init(self): crawler = Mock() crawler.settings = {'LOGENTRIES_TOKEN': None, } with self.assertRaises(NotConfigured): self.extension = LogentriesExtension.from_crawler(crawler)
logging.basicConfig(level=10) logger = logging.getLogger(__name__) parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.append(parentdir) sys.path.append(os.path.join(parentdir, 'src')) logger.debug("parentdir: %s" % parentdir) from installation_process import InstallationProcess # set needed config options settings = dict() settings['data'] = os.path.join(parentdir, 'data') # create mock object to test just one method and not the __init__ mobject = Mock(InstallationProcess) mobject.settings = settings mobject.dest_dir = '/tmp' mobject.arch = 'x86_64' mobject.write_file = InstallationProcess.write_file.__get__(mobject) InstallationProcess.create_pacman_conf_file(mobject) logger.debug('Done') assert os.path.isfile('/tmp/pacman.conf')