def test_list_under_parent(self): """Establish that listing with a parent specified works.""" with mock.patch( 'tower_cli.models.base.ResourceMethods.list') as mock_list: with mock.patch( 'tower_cli.resources.group.Resource.lookup_with_inventory' ): self.gr.list(parent="foo_group") mock_list.assert_called_once_with()
def test_post_save_hook_was_connected(self): with mock.patch('django.db.models.signals.post_save') as mocked_post_save, \ mock.patch('django.db.models.signals.pre_delete'): caching_framework.register(TestModel) mocked_post_save.connect.assert_called_once_with(mock.ANY, sender=TestModel, weak=False, dispatch_uid='%s_cache_object' % TestModel._meta.db_table)
def test_associate(self): """Establish that associate commands work.""" with mock.patch( 'tower_cli.models.base.ResourceMethods._assoc') as mock_assoc: with mock.patch( 'tower_cli.resources.group.Resource.lookup_with_inventory' ) as mock_lookup: mock_lookup.return_value = {'id': 1} self.gr.associate(group=1, parent=2) mock_assoc.assert_called_once_with('children', 1, 1)
def test_role_write_user_exists_FOF(self): """Simulate granting user permission where they already have it.""" with mock.patch( 'tower_cli.models.base.ResourceMethods.read') as mock_read: mock_read.return_value = {'results': [copy(example_role_data)], 'count': 1} with mock.patch('tower_cli.api.Client.post'): with self.assertRaises(exc.NotFound): self.res.role_write(user=2, inventory=3, type='admin', fail_on_found=True)
def test_role_grant_user(self): """Simulate granting user permission.""" with mock.patch( 'tower_cli.models.base.ResourceMethods.read') as mock_read: mock_read.return_value = { 'results': [copy(example_role_data)], 'count': 0} with mock.patch('tower_cli.api.Client.post') as mock_post: self.res.role_write(user=2, inventory=3, type='admin') mock_post.assert_called_once_with( 'users/2/roles/', data={'id': 1})
def test_reading_invalid_token_from_server(self): self.expires += timedelta(hours=-1) expires = self.expires.strftime(TOWER_DATETIME_FMT) with mock.patch('six.moves.builtins.open', new_callable=mock.mock_open()): with mock.patch('tower_cli.api.json.load', return_value={'token': 'foobar', 'expires': expires}): with client.test_mode as t: with self.assertRaises(exc.AuthError): t.register('/authtoken/', json.dumps({}), status_code=200, method='OPTIONS') t.register('/authtoken/', json.dumps({'invalid': 'invalid'}), status_code=200, method='POST') self.auth(self.req)
def test_reading_invalid_token(self): self.expires += timedelta(hours=1) expires = self.expires.strftime(TOWER_DATETIME_FMT) with mock.patch('six.moves.builtins.open', new_callable=mock.mock_open()): with mock.patch('tower_cli.api.json.load', return_value="invalid"): with client.test_mode as t: t.register('/authtoken/', json.dumps({}), status_code=200, method='OPTIONS') t.register('/authtoken/', json.dumps({'token': 'barfoo', 'expires': expires}), status_code=200, method='POST') self.auth(self.req) self.assertEqual(self.req.headers['Authorization'], 'Token barfoo')
def setUp(self): super(MockServiceWithConfigTestCase, self).setUp() self.environ = {} self.config = {} self.config_patch = mock.patch('boto.provider.config.get', self.get_config) self.has_config_patch = mock.patch('boto.provider.config.has_option', self.has_config) self.environ_patch = mock.patch('os.environ', self.environ) self.config_patch.start() self.has_config_patch.start() self.environ_patch.start()
def test_router_urls_with_custom_lookup_field(self): """Establish that a router with a viewset attached gets expected URLs. """ # Create a model and viewset with a special lookup field. class PhonyModelIII(models.Model): class Meta: app_label = 'tests' class PhonyViewSetIII(viewsets.ModelViewSet): model = PhonyModelIII lookup_field = 'foo' @base_action(set(['POST'])) def special(self, request): pass # Create the router and register our viewset. with mock.patch('drf_toolbox.routers.ModelSerializer'): router = routers.Router() router.register('phony', PhonyViewSetIII) # Attempt to establish that we got back what we expected. for urlpattern in router.urls: pattern = urlpattern.regex.pattern base_regex = routers.base_regex if '<foo>' in pattern: self.assertIn('(?P<foo>%s)' % base_regex.pattern, pattern) if '<format>' in urlpattern.regex.pattern: self.assertFalse(pattern.endswith(r'/\.(?P<format>[a-z]+)$'))
def test_router_urls_uuid(self): """Establish that a router with a viewset attached gets the expected URLs. """ # Create a model and viewset with at least one special method. class PhonyModelII(models.Model): id = models.UUIDField(auto_add=True, primary_key=True) class Meta: app_label = 'tests' class PhonyViewSetII(viewsets.ModelViewSet): model = PhonyModelII @base_action(set(['POST'])) def special(self, request): pass # Create the router and register our viewset. with mock.patch('drf_toolbox.routers.ModelSerializer'): router = routers.Router() router.register('phony', PhonyViewSetII) # Attempt to establish that we got back what we expected. for urlpattern in router.urls: pattern = urlpattern.regex.pattern uuid_regex = routers.uuid_regex if '<pk>' in pattern: self.assertIn('(?P<pk>%s)' % uuid_regex.pattern, pattern) if '<format>' in urlpattern.regex.pattern: self.assertFalse(pattern.endswith(r'/\.(?P<format>[a-z]+)$'))
def test_router_urls_using_serializer_class_only(self): """Establish that a router with a viewset attached gets the expected URLs, even if the viewset uses a serializer class instead of a model. See #2: https://github.com/feedmagnet/drf-toolbox/issues/2 """ # Create a model, serializer class, and viewset. # The viewset should reference the serializer class only. class PhonyModelV(models.Model): class Meta: app_label = 'tests' class PhonySerializerV(serializers.ModelSerializer): class Meta: model = PhonyModelV class PhonyViewSetV(viewsets.ModelViewSet): serializer_class = PhonySerializerV # Create the router and register our viewset. with mock.patch('drf_toolbox.routers.ModelSerializer'): router = routers.Router() router.register('phony', PhonyViewSetV) # Attempt to establish that we got back what we expected. for urlpattern in router.urls: pattern = urlpattern.regex.pattern integer_regex = routers.integer_regex if '<pk>' in pattern: self.assertIn('(?P<pk>%s)' % integer_regex.pattern, pattern) if '<format>' in urlpattern.regex.pattern: self.assertFalse(pattern.endswith(r'/\.(?P<format>[a-z]+)$'))
def test_get_job_validate_checksum_success(self): response = GlacierResponse(mock.Mock(), None) response['TreeHash'] = 'tree_hash' self.api.get_job_output.return_value = response with mock.patch('boto.glacier.job.tree_hash_from_str') as t: t.return_value = 'tree_hash' self.job.get_output(byte_range=(1, 1024), validate_checksum=True)
def setUp(self): with mock.patch( GCS_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_base_gcp_hook_default_project_id, ): self.gcs_hook = gcs_hook.GoogleCloudStorageHook( google_cloud_storage_conn_id='test')
def setUp(self, _orig_class=orig_class): _orig_class.setUp(self) def find_near_matches_dropin(subsequence, sequence, *args, **kwargs): if isinstance(sequence, (tuple, list)): self.skipTest('skipping word-list tests with find_near_matches_in_file') try: from Bio.Seq import Seq except ImportError: pass else: if isinstance(sequence, Seq): self.skipTest('skipping BioPython Seq tests with find_near_matches_in_file') tempfilepath = tempfile.mktemp() if isinstance(sequence, text_type): f = io.open(tempfilepath, 'w+', encoding='utf-8') else: f = open(tempfilepath, 'w+b') try: f.write(sequence) f.seek(0) return find_near_matches_in_file(subsequence, f, *args, **kwargs) finally: f.close() os.remove(tempfilepath) patcher = mock.patch( 'tests.test_find_near_matches.find_near_matches', find_near_matches_dropin) self.addCleanup(patcher.stop) patcher.start()
def test_keyring_is_used(self): self.config = { 'Credentials': { 'aws_access_key_id': 'cfg_access_key', 'keyring': 'test', } } import sys try: import keyring imported = True except ImportError: sys.modules['keyring'] = keyring = type(mock)('keyring', '') imported = False try: with mock.patch('keyring.get_password', create=True): keyring.get_password.side_effect = ( lambda kr, login: kr+login+'pw') p = provider.Provider('aws') self.assertEqual(p.access_key, 'cfg_access_key') self.assertEqual(p.secret_key, 'testcfg_access_keypw') self.assertIsNone(p.security_token) finally: if not imported: del sys.modules['keyring']
def test_monitoring(self): """Establish that if the first status call returns a pending job, and the second a success, that both calls are made, and a success finally returned. """ # Set up our data object. data = {'elapsed': 1335024000.0, 'failed': False, 'status': 'pending'} # Register the initial request's response. with client.test_mode as t: t.register_json('/jobs/42/', copy(data)) # Create a way to assign a successful data object to the request. def assign_success(*args): t.clear() t.register_json('/jobs/42/', dict(data, status='successful')) # Make the successful state assignment occur when time.sleep() # is called between requests. with mock.patch.object(time, 'sleep') as sleep: sleep.side_effect = assign_success with mock.patch.object(click, 'secho') as secho: with mock.patch('tower_cli.models.base.is_tty') as tty: tty.return_value = True self.res.monitor(42, min_interval=0.21) self.assertTrue(secho.call_count >= 100) # We should have gotten two requests total, to the same URL. self.assertEqual(len(t.requests), 2) self.assertEqual(t.requests[0].url, t.requests[1].url)
def test_write_global_setting_deprecated(self): """Establish that if we attempt to write a valid setting, that the parser's write method is run. """ # Invoke the command, but trap the file-write at the end # so we don't plow over real things. mock_open = mock.mock_open() warning_text = 'The `--global` option is deprecated and will be '\ 'removed. Use `--scope=global` to get the same effect.' with mock.patch('tower_cli.cli.misc.open', mock_open, create=True): with mock.patch.object(os.path, 'isdir') as isdir: with mock.patch.object(os, 'chmod'): with mock.patch.object(warnings, 'warn') as warn: isdir.return_value = True result = self.runner.invoke( config, ['username', 'meagan', '--global'], ) warn.assert_called_once_with(warning_text, DeprecationWarning) self.assertEqual(warn.mock_calls[0][1][1], DeprecationWarning) isdir.assert_called_once_with('/etc/tower/') # Ensure that the command completed successfully. self.assertEqual(result.exit_code, 0) self.assertEqual('Configuration updated successfully.', result.output.strip()) # Ensure that the output seems to be correct. self.assertIn(mock.call('/etc/tower/tower_cli.cfg', 'w'), mock_open.mock_calls) self.assertIn(mock.call().write('username = meagan\n'), mock_open.mock_calls)
def test_grant_user_role(self): """Assure that super method is called granting role""" with mock.patch( 'tower_cli.resources.role.Resource.role_write') as mock_write: kwargs = dict(user=1, type='read', project=3) self.res.grant(**kwargs) mock_write.assert_called_once_with(fail_on_found=False, **kwargs)
def test_list_user(self): """Assure that super method is called with right parameters""" with mock.patch( 'tower_cli.models.base.ResourceMethods.list') as mock_list: mock_list.return_value = {'results': [example_role_data]} self.res.list(user=1) mock_list.assert_called_once_with(members__in=1)
def test_write_global_setting(self): """Establish that if we attempt to write a valid setting, that the parser's write method is run. """ # Invoke the command, but trap the file-write at the end # so we don't plow over real things. mock_open = mock.mock_open() with mock.patch('tower_cli.commands.config.open', mock_open, create=True): with mock.patch.object(os.path, 'isdir') as isdir: isdir.return_value = True result = self.runner.invoke(config, ['username', 'luke', '--scope=global'], ) isdir.assert_called_once_with('/etc/awx/') # Ensure that the command completed successfully. self.assertEqual(result.exit_code, 0) self.assertEqual(result.output.strip(), 'Configuration updated successfully.') # Ensure that the output seems to be correct. self.assertIn(mock.call('/etc/awx/tower_cli.cfg', 'w'), mock_open.mock_calls) self.assertIn(mock.call().write('username = luke\n'), mock_open.mock_calls)
def test_router_urls_with_custom_lookup_regex(self): """Establish that a router with a viewset attached gets expected URLs when the viewset has a custom regex. """ # Create a model and viewset with a special lookup field. class PhonyModelIV(models.Model): class Meta: app_label = 'tests' class PhonyViewSetIV(viewsets.ModelViewSet): model = PhonyModelIV lookup_regex = '[0123456789]+' @base_action({ 'POST' }) def special(self, request): pass # Create the router and register our viewset. with mock.patch('drf_toolbox.routers.ModelSerializer'): router = routers.Router() router.register('phony', PhonyViewSetIV) # Attempt to establish that we got back what we expected. for urlpattern in router.urls: pattern = urlpattern.regex.pattern if '<pk>' in pattern: self.assertIn('(?P<pk>[0123456789]+)', pattern) if '<format>' in urlpattern.regex.pattern: self.assertFalse(pattern.endswith(r'/\.(?P<format>[a-z]+)$'))
def test_personal_access_token(self): """Establish that if `tower-cli login` is called with a username and password, we obtain and write an oauth token to the config file """ # Invoke the command. mock_open = mock.mock_open() with mock.patch('tower_cli.cli.misc.open', mock_open, create=True): with mock.patch.object(os, 'chmod'): with client.test_mode as t: # You have to modify this internal private registry to # register a URL endpoint that _doesn't_ have the version # prefix prefix = Client().get_prefix(include_version=False) t._registry[URL(prefix + 'o/', method='HEAD')] = Resp( ''.encode('utf-8'), 200, {} ) t.register('/users/bob/personal_tokens/', json.dumps({ 'token': 'abc123' }), status_code=201, method='POST') result = self.runner.invoke( login, ['bob', '--password', 'secret', '--scope', 'read'] ) # Ensure that we got a zero exit status self.assertEqual(result.exit_code, 0) assert json.loads(t.requests[-1].body)['scope'] == 'read' # Ensure that the output seems to be correct. self.assertIn(mock.call(os.path.expanduser('~/.tower_cli.cfg'), 'w'), mock_open.mock_calls) self.assertIn(mock.call().write('oauth_token = abc123\n'), mock_open.mock_calls)
def test_concurrent_upload_file(self): v = vault.Vault(None, None) with mock.patch("boto.glacier.vault.ConcurrentUploader") as c: c.return_value.upload.return_value = "archive_id" archive_id = v.concurrent_create_archive_from_file("filename", "my description") c.return_value.upload.assert_called_with("filename", "my description") self.assertEqual(archive_id, "archive_id")
def test_monitoring_not_tty(self): """Establish that the monitor command prints more useful output for logging if not connected to a tty. """ # Set up our data object. data = {'elapsed': 1335024000.0, 'failed': False, 'status': 'pending'} # Register the initial request's response. with client.test_mode as t: t.register_json('/jobs/42/', copy(data)) # Create a way to assign a successful data object to the request. def assign_success(*args): t.clear() t.register_json('/jobs/42/', dict(data, status='successful')) # Make the successful state assignment occur when time.sleep() # is called between requests. with mock.patch.object(time, 'sleep') as sleep: sleep.side_effect = assign_success with mock.patch.object(click, 'echo') as echo: with mock.patch('tower_cli.resources.job.is_tty') as tty: tty.return_value = False result = self.res.monitor(42, min_interval=0.21) self.assertTrue(echo.call_count >= 1) # We should have gotten two requests total, to the same URL. self.assertEqual(len(t.requests), 2) self.assertEqual(t.requests[0].url, t.requests[1].url)
def test_create_without_special_fields(self): """Establish that a create without user, team, or credential works""" with mock.patch( 'tower_cli.models.base.Resource.create') as mock_create: cred_res = tower_cli.get_resource('credential') cred_res.create(name="foobar") mock_create.assert_called_once_with(name="foobar")
def test_hooks_header_from_extra_is_overridden(self, m): with mock.patch( 'airflow.hooks.base_hook.BaseHook.get_connection', side_effect=get_airflow_connection ): conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"}) self.assertEqual(conn.headers.get('bareer'), 'newT0k3n')
def test_execute_job_polling_loop(self): jenkins_mock = mock.Mock(spec=jenkins.Jenkins, auth='secret') jenkins_mock.get_job_info.return_value = {'nextBuildNumber': '1'} jenkins_mock.get_build_info.side_effect = \ [{'result': None}, {'result': 'SUCCESS', 'url': 'http://aaa.fake-url.com/congratulation/its-a-job'}] jenkins_mock.build_job_url.return_value = \ 'http://www.jenkins.url/somewhere/in/the/universe' hook_mock = mock.Mock(spec=JenkinsHook) hook_mock.get_jenkins_server.return_value = jenkins_mock the_parameters = {'a_param': 'blip', 'another_param': '42'} with mock.patch.object(JenkinsJobTriggerOperator, "get_hook") as get_hook_mocked,\ mock.patch('airflow.contrib.operators.jenkins_job_trigger_operator' '.jenkins_request_with_headers') as mock_make_request: mock_make_request.side_effect = \ [{'body': '', 'headers': {'Location': 'http://what-a-strange.url/18'}}, {'body': '{"executable":{"number":"1"}}', 'headers': {}}] get_hook_mocked.return_value = hook_mock operator = JenkinsJobTriggerOperator( dag=None, task_id="operator_test", job_name="a_job_on_jenkins", jenkins_connection_id="fake_jenkins_connection", # The hook is mocked, this connection won't be used parameters=the_parameters, sleep_time=1) operator.execute(None) self.assertEqual(jenkins_mock.get_build_info.call_count, 2)
def test_write_local_setting(self): """Establish that if we attempt to write a valid setting locally, that the correct parser's write method is run. """ # Invoke the command, but trap the file-write at the end # so we don't plow over real things. mock_open = mock.mock_open() with mock.patch('tower_cli.cli.misc.open', mock_open, create=True): with mock.patch.object(os, 'chmod') as chmod: result = self.runner.invoke( config, ['username', 'meagan', '--scope=local'], ) filename = ".tower_cli.cfg" chmod.assert_called_once_with(filename, int('0600', 8)) # Ensure that the command completed successfully. self.assertEqual(result.exit_code, 0) self.assertEqual(result.output.strip(), 'Configuration updated successfully.') # Ensure that the output seems to be correct. self.assertIn(mock.call('.tower_cli.cfg', 'w'), mock_open.mock_calls) self.assertIn(mock.call().write('username = meagan\n'), mock_open.mock_calls)
def test_concurrent_upload_forwards_kwargs(self): v = vault.Vault(None, None) with mock.patch("boto.glacier.vault.ConcurrentUploader") as c: c.return_value.upload.return_value = "archive_id" archive_id = v.concurrent_create_archive_from_file( "filename", "my description", num_threads=10, part_size=1024 * 1024 * 1024 * 8 ) c.assert_called_with(None, None, num_threads=10, part_size=1024 * 1024 * 1024 * 8)
def test_role_write_user_exists(self): """Simulate granting user permission where they already have it.""" with mock.patch( 'tower_cli.models.base.ResourceMethods.read') as mock_read: mock_read.return_value = {'results': [copy(example_role_data)], 'count': 1} r = self.res.role_write(user=2, inventory=3, type='admin') self.assertEqual(r['user'], 2)
def test_hooks_header_from_extra_is_overridden(self, m): with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection', side_effect=get_airflow_connection): conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"}) self.assertEqual(conn.headers.get('bareer'), 'newT0k3n')
def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataproc_hook = DataProcHook()
def setUp(self): with mock.patch('airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_no_default_project_id): self.spanner_hook_no_default_project_id = CloudSpannerHook( gcp_conn_id='test')
def setUp(self): with mock.patch( "airflow.gcp.hooks.video_intelligence.CloudVideoIntelligenceHook.__init__", new=mock_base_gcp_hook_default_project_id, ): self.hook = CloudVideoIntelligenceHook(gcp_conn_id="test")
def test_ready_prefix_on_cmdline_zombie(self): self.child.cmdline.return_value = [] self.process.children.return_value = [self.child] with mock.patch('psutil.Process', return_value=self.process): self.assertEqual(self.monitor._get_num_ready_workers_running(), 0)
def test_backfill(self, mock_run): cli.backfill(self.parser.parse_args([ 'dags', 'backfill', 'example_bash_operator', '-s', DEFAULT_DATE.isoformat()])) mock_run.assert_called_once_with( start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, conf=None, delay_on_limit_secs=1.0, donot_pickle=False, ignore_first_depends_on_past=False, ignore_task_deps=False, local=False, mark_success=False, pool=None, rerun_failed_tasks=False, run_backwards=False, verbose=False, ) mock_run.reset_mock() dag = self.dagbag.get_dag('example_bash_operator') with mock.patch('sys.stdout', new_callable=io.StringIO) as mock_stdout: cli.backfill(self.parser.parse_args([ 'dags', 'backfill', 'example_bash_operator', '-t', 'runme_0', '--dry_run', '-s', DEFAULT_DATE.isoformat()]), dag=dag) mock_stdout.seek(0, 0) self.assertListEqual( [ "Dry run of DAG example_bash_operator on {}\n".format(DEFAULT_DATE.isoformat()), "Task runme_0\n", ], mock_stdout.readlines() ) mock_run.assert_not_called() # Dry run shouldn't run the backfill cli.backfill(self.parser.parse_args([ 'dags', 'backfill', 'example_bash_operator', '--dry_run', '-s', DEFAULT_DATE.isoformat()]), dag=dag) mock_run.assert_not_called() # Dry run shouldn't run the backfill cli.backfill(self.parser.parse_args([ 'dags', 'backfill', 'example_bash_operator', '-l', '-s', DEFAULT_DATE.isoformat()]), dag=dag) mock_run.assert_called_once_with( start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, conf=None, delay_on_limit_secs=1.0, donot_pickle=False, ignore_first_depends_on_past=False, ignore_task_deps=False, local=True, mark_success=False, pool=None, rerun_failed_tasks=False, run_backwards=False, verbose=False, ) mock_run.reset_mock()
def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test')
def setUp(self): with mock.patch( 'airflow.gcp.hooks.translate.CloudTranslateHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.hook = CloudTranslateHook(gcp_conn_id='test')
def setUp(self): with mock.patch( 'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_default_project_id): self.gcf_function_hook = GcfHook(gcp_conn_id='test', api_version='v1')
def setUp(self): with mock.patch( "airflow.gcp.hooks.base.CloudBaseHook.__init__", new=mock_base_gcp_hook_no_default_project_id, ): self.hook = CloudTasksHook(gcp_conn_id="test")
def test_no_key_provided(self): with mock.patch('airflow.contrib.hooks.bigquery_hook.read_gbq', new=lambda *args, **kwargs: kwargs['private_key']): self.assertEqual(self.instance.get_pandas_df('select 1'), None)
def setUp(self): self.retry_url_patch = mock.patch('boto.utils.retry_url') boto.utils.retry_url = self.retry_url_patch.start()
def setUp(self): self.urlopen_patch = mock.patch('boto.compat.urllib.request.urlopen') self.opener_patch = mock.patch( 'boto.compat.urllib.request.build_opener') self.urlopen = self.urlopen_patch.start() self.opener = self.opener_patch.start()
def test_submit(self, job_mock): with mock.patch(DATAPROC_STRING.format('DataProcHook.get_conn', return_value=None)): self.dataproc_hook.submit(GCP_PROJECT_ID_HOOK_UNIT_TEST, JOB) job_mock.assert_called_once_with(mock.ANY, GCP_PROJECT_ID_HOOK_UNIT_TEST, JOB, GCP_REGION, job_error_states=mock.ANY, num_retries=mock.ANY)
def setUp(self): with mock.patch(BASE_STRING.format("CloudBaseHook.__init__"), new=mock_init): self.hook = DataprocHook(gcp_conn_id="test")
def test_ready_prefix_on_cmdline_dead_process(self): self.child.cmdline.side_effect = psutil.NoSuchProcess(11347) self.process.children.return_value = [self.child] with mock.patch('psutil.Process', return_value=self.process): self.assertEqual(self.monitor._get_num_ready_workers_running(), 0)
def setUp(self): with mock.patch( 'airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_no_default_project_id, ): self.gct_hook = GCPTransferServiceHook(gcp_conn_id='test')
def setUp(self): with mock.patch('airflow.gcp.hooks.base.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_no_default_project_id): self.gcf_function_hook_no_project_id = CloudFunctionsHook( gcp_conn_id='test', api_version='v1')
def setUp(self): with mock.patch(GCS_STRING.format("GoogleCloudBaseHook.__init__"), new=mock_base_gcp_hook_default_project_id): self.gcs_hook = gcs.GoogleCloudStorageHook( google_cloud_storage_conn_id="test")
def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__')): self.gcs_hook = gcs_hook.GoogleCloudStorageHook( google_cloud_storage_conn_id='test')
def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.pubsub_hook = PubSubHook(gcp_conn_id='test')
def setUp(self): with mock.patch( 'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.instance = hook.GoogleCloudBaseHook(gcp_conn_id="google-cloud-default")
def setUp(self): with mock.patch( "airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__", new=mock_base_gcp_hook_no_default_project_id, ): self.hook = CloudDLPHook(gcp_conn_id="test")
def setUp(self): with mock.patch('airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_no_default_project_id): self.gce_hook_no_project_id = GceHook(gcp_conn_id='test')
def mock_CoreAgentManager(): # Always mock out the actual CoreAgentManager class for these tests, to # keep them quick path = "scout_apm.core.cli.core_agent_manager.CoreAgentManager" with mock.patch(path) as mock_obj: yield mock_obj
def setUp(self): with mock.patch( MODULE_NAME + '.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.instance = hook.GoogleCloudBaseHook(gcp_conn_id="google-cloud-default")
def setUp(self): with mock.patch( 'airflow.contrib.hooks.gcp_vision_hook.CloudVisionHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.hook = CloudVisionHook(gcp_conn_id='test')
def setUp(self): self.channel_mock = mock.patch('grpc.Channel').start()
def setUp(self): with mock.patch( 'airflow.contrib.hooks.gcp_api_base_hook.GoogleCloudBaseHook.__init__', new=mock_base_gcp_hook_default_project_id): self.hook = GSheetsHook(gcp_conn_id=GCP_CONN_ID, spreadsheet_id=SPREADHSEET_ID)
def test_cli_show_config_should_display_key(self): temp_stdout = StringIO() with mock.patch("sys.stdout", temp_stdout): cli.config(self.parser.parse_args(['config', '--color=off'])) self.assertIn('[core]', temp_stdout.getvalue()) self.assertIn('testkey = test_value', temp_stdout.getvalue())