def test_on_snapshot_loaded(self): m_response = Mock() endpoint_on_host = Mock() endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid") endpoint_on_host.value = ENDPOINT_STR bad_endpoint_on_host = Mock() bad_endpoint_on_host.key = ("/calico/v1/host/hostname/workload/" "orch/wlid/endpoint/epid2") bad_endpoint_on_host.value = ENDPOINT_STR[:10] endpoint_not_on_host = Mock() endpoint_not_on_host.key = ("/calico/v1/host/other/workload/" "orch/wlid/endpoint/epid") endpoint_not_on_host.value = ENDPOINT_STR still_ready = Mock() still_ready.key = ("/calico/v1/Ready") still_ready.value = "true" m_response.children = [ endpoint_on_host, bad_endpoint_on_host, endpoint_not_on_host, still_ready, ] with patch.object(self.watcher, "clean_up_endpoint_statuses") as m_clean: self.watcher._on_snapshot_loaded(m_response) # Cleanup should only get the endpoints on our host. m_clean.assert_called_once_with( set([EndpointId("hostname", "orch", "wlid", "epid")]))
def test_download_folder(makedirs): boto_mock = Mock(name="boto_session") boto_mock.client("sts").get_caller_identity.return_value = {"Account": "123"} session = sagemaker.Session(boto_session=boto_mock, sagemaker_client=Mock()) train_data = Mock() validation_data = Mock() train_data.bucket_name.return_value = BUCKET_NAME train_data.key = "prefix/train/train_data.csv" validation_data.bucket_name.return_value = BUCKET_NAME validation_data.key = "prefix/train/validation_data.csv" s3_files = [train_data, validation_data] boto_mock.resource("s3").Bucket(BUCKET_NAME).objects.filter.return_value = s3_files obj_mock = Mock() boto_mock.resource("s3").Object.return_value = obj_mock # all the S3 mocks are set, the test itself begins now. sagemaker.utils.download_folder(BUCKET_NAME, "/prefix", "/tmp", session) obj_mock.download_file.assert_called() calls = [ call(os.path.join("/tmp", "train/train_data.csv")), call(os.path.join("/tmp", "train/validation_data.csv")), ] obj_mock.download_file.assert_has_calls(calls) obj_mock.reset_mock() # Testing with a trailing slash for the prefix. sagemaker.utils.download_folder(BUCKET_NAME, "/prefix/", "/tmp", session) obj_mock.download_file.assert_called() obj_mock.download_file.assert_has_calls(calls)
def test_clean_up_endpoint_status(self): self.m_config.REPORT_ENDPOINT_STATUS = True ep_id = WloadEndpointId("foo", "openstack", "workloadid", "endpointid") empty_dir = Mock() empty_dir.key = ("/calico/felix/v1/host/foo/workload/" "openstack/foobar") empty_dir.dir = True missing_ep = Mock() missing_ep.key = ("/calico/felix/v1/host/foo/workload/" "openstack/aworkload/endpoint/anendpoint") self.m_client.read.return_value.leaves = [ empty_dir, missing_ep, ] with patch.object(self.rep, "_mark_endpoint_dirty") as m_mark: self.rep.clean_up_endpoint_statuses(async=True) self.step_actor(self.rep) # Missing endpoint should have been marked for cleanup. m_mark.assert_called_once_with( WloadEndpointId("foo", "openstack", "aworkload", "anendpoint") )
def test_cron_status_multiple_jobs(self): (mock_api, mock_scheduler_proxy) = self.create_mock_api() with contextlib.nested( patch('time.sleep'), patch('apache.aurora.client.api.SchedulerProxy', return_value=mock_scheduler_proxy), patch('apache.aurora.client.factory.CLUSTERS', new=self.TEST_CLUSTERS), patch('apache.aurora.client.cli.context.AuroraCommandContext.print_out')) as ( _, _, _, mock_print): response = self.create_simple_success_response() response.result = Mock() response.result.getJobsResult = Mock() mockjob1 = Mock() mockjob1.cronSchedule = "* * * * *" mockjob1.key = Mock() mockjob1.key.environment = "test" mockjob1.key.name = "hello2" mockjob1.key.role = "bozo" mockjob2 = Mock() mockjob2.cronSchedule = "* * * * *" mockjob2.key = Mock() mockjob2.key.environment = "test" mockjob2.key.name = "hello" mockjob2.key.role = "bozo" response.result.getJobsResult.configs = [mockjob1, mockjob2] mock_scheduler_proxy.getJobs.return_value = response cmd = AuroraCommandLine() result = cmd.execute(['cron', 'show', 'west/bozo/test/hello']) assert result == EXIT_OK mock_scheduler_proxy.getJobs.assert_called_once_with("bozo") mock_print.assert_called_with("west/bozo/test/hello\t * * * * *")
def test_download_folder(makedirs): boto_mock = Mock(name='boto_session') boto_mock.client('sts').get_caller_identity.return_value = {'Account': '123'} session = sagemaker.Session(boto_session=boto_mock, sagemaker_client=Mock()) train_data = Mock() validation_data = Mock() train_data.bucket_name.return_value = BUCKET_NAME train_data.key = 'prefix/train/train_data.csv' validation_data.bucket_name.return_value = BUCKET_NAME validation_data.key = 'prefix/train/validation_data.csv' s3_files = [train_data, validation_data] boto_mock.resource('s3').Bucket(BUCKET_NAME).objects.filter.return_value = s3_files obj_mock = Mock() boto_mock.resource('s3').Object.return_value = obj_mock # all the S3 mocks are set, the test itself begins now. sagemaker.utils.download_folder(BUCKET_NAME, '/prefix', '/tmp', session) obj_mock.download_file.assert_called() calls = [call(os.path.join('/tmp', 'train/train_data.csv')), call(os.path.join('/tmp', 'train/validation_data.csv'))] obj_mock.download_file.assert_has_calls(calls) obj_mock.reset_mock() # Testing with a trailing slash for the prefix. sagemaker.utils.download_folder(BUCKET_NAME, '/prefix/', '/tmp', session) obj_mock.download_file.assert_called() obj_mock.download_file.assert_has_calls(calls)
def test_clean_up_endpoint_status(self): self.m_config.REPORT_ENDPOINT_STATUS = True ep_id = EndpointId("hostname", "openstack", "workloadid", "endpointid") empty_dir = Mock() empty_dir.key = ("/calico/felix/v1/host/hostname/workload/" "openstack/foobar") empty_dir.dir = True missing_ep = Mock() missing_ep.key = ("/calico/felix/v1/host/hostname/workload/" "openstack/aworkload/endpoint/anendpoint") self.client.read.return_value.leaves = [ empty_dir, missing_ep, ] self.watcher.clean_up_endpoint_statuses(set([ep_id])) # Missing endpoint should have been marked for cleanup. self.m_status_rep.mark_endpoint_dirty.assert_called_once_with( EndpointId("hostname", "openstack", "aworkload", "anendpoint"), async=True )
def test_download_folder(makedirs): boto_mock = MagicMock(name="boto_session") session = sagemaker.Session(boto_session=boto_mock, sagemaker_client=MagicMock()) s3_mock = boto_mock.resource("s3") obj_mock = Mock() s3_mock.Object.return_value = obj_mock def obj_mock_download(path): # Mock the S3 object to raise an error when the input to download_file # is a "folder" if path in ("/tmp/", os.path.join("/tmp", "prefix")): raise botocore.exceptions.ClientError( error_response={ "Error": { "Code": "404", "Message": "Not Found" } }, operation_name="HeadObject", ) else: return Mock() obj_mock.download_file.side_effect = obj_mock_download train_data = Mock() validation_data = Mock() train_data.bucket_name.return_value = BUCKET_NAME train_data.key = "prefix/train/train_data.csv" validation_data.bucket_name.return_value = BUCKET_NAME validation_data.key = "prefix/train/validation_data.csv" s3_files = [train_data, validation_data] s3_mock.Bucket(BUCKET_NAME).objects.filter.return_value = s3_files # all the S3 mocks are set, the test itself begins now. sagemaker.utils.download_folder(BUCKET_NAME, "/prefix", "/tmp", session) obj_mock.download_file.assert_called() calls = [ call(os.path.join("/tmp", "train", "train_data.csv")), call(os.path.join("/tmp", "train", "validation_data.csv")), ] obj_mock.download_file.assert_has_calls(calls) assert s3_mock.Object.call_count == 3 s3_mock.reset_mock() obj_mock.reset_mock() # Test with a trailing slash for the prefix. sagemaker.utils.download_folder(BUCKET_NAME, "/prefix/", "/tmp", session) obj_mock.download_file.assert_called() obj_mock.download_file.assert_has_calls(calls) assert s3_mock.Object.call_count == 2
def test_fan_out(self, mock_issue_init, mock_context, mock_get_by_id): """Ensure that an Issue is created for the exception and tasks are inserted for each stack frame. """ mock_repo = Mock() mock_repo.key = Mock() mock_get_by_id.return_value = mock_repo context = Mock() context.insert_success = 2 mock_context.new.return_value.__enter__.return_value = context mock_issue = Mock() issue_key = Mock() issue_key.id.return_value = '123' mock_issue.key = issue_key mock_issue_init.return_value = mock_issue project_id = 'abc' timestamp = time.time() exception = 'ValueError' message = 'oh snap!' frame1 = ('foo.py', 24, 'foobar', 'return bar()') frame2 = ('bar.py', 120, 'baz', 'raise ValueError') frames = [frame1, frame2] stacktrace = 'stacktrace' data = { 'project_id': project_id, 'timestamp': timestamp, 'exception': exception, 'message': message, 'frames': frames, 'stacktrace': stacktrace } report.process_exception(data) mock_get_by_id.assert_called_once_with(project_id) mock_issue_init.assert_called_once_with( repo=mock_repo.key, timestamp=datetime.fromtimestamp(timestamp), exception=exception, message=message, frames=frames, stacktrace=stacktrace, contacts=[]) mock_issue.put.assert_called_once_with() expected = [ call(target=report.notify, args=(project_id, issue_key.id.return_value, timestamp, frame1[0], frame1[1], frame1[2], frame1[3], stacktrace)), call(target=report.notify, args=(project_id, issue_key.id.return_value, timestamp, frame2[0], frame2[1], frame2[2], frame2[3], stacktrace)) ] self.assertEqual(expected, context.add.call_args_list)
def test_should_return_number_of_cores_when_in_resources(self): resource_1 = Mock() resource_1.key = "weLoveCamelCase" resource_2 = Mock() resource_2.key = "numCpuCores" resource_2.value = 42 resource_3 = Mock() resource_3.key = "someOtherKey" resources = [resource_1, resource_2, resource_3] self.raw_esx.licensableResource.resource = resources self.assertEquals(self.wrapped_esx.get_number_of_cores(), 42)
def test_unpinned_requirement(self): req = Mock() req.key = "django" req.is_pinned = False req.latest_version_within_specs = "1.10" self.assertEqual(Update.get_commit_message(req), "Pin django to latest version 1.10")
def test_issue_to_changelog(self): issue = Mock() issue.key = 'some key' issue.fields = Mock() issue.fields.created = '1986-07-21T00:00:00Z' issue.changelog = Mock() item1 = Mock() item1.field = 'status' item1.toString = 'To Do' history1 = Mock() history1.created = '2017-01-01T00:00:00Z' history1.items = [item1] item2 = Mock() item2.field = 'something else' item3 = Mock() item3.field = 'status' item3.toString = 'In Progress' history2 = Mock() history2.created = '2017-02-02T00:00:00Z' history2.items = [item2, item3] issue.changelog.histories = [history1, history2] self.assertEquals( issue_to_changelog(issue), { 'key': 'some key', 'changelog': [(u'Created', parse_date('1986-07-21T00:00:00Z')), (u'To Do', parse_date('2017-01-01T00:00:00Z')), (u'In Progress', parse_date('2017-02-02T00:00:00Z'))] })
def fetch_access_token(self, *args, **kwargs): if self.error: raise self.error('') response = Mock(['key', 'secret']) response.key = self.key response.secret = self.secret return response
def test_exec_config_with_set_workspaces(self): """Test exec config with set workspaces subcommand.""" args = Mock() args.action = "set" args.key = "workspaces" args.value = False self.assertFalse(self.subcommand.execute(args))
def is_attribute_unchanged_data(value): mock = Mock(Resource) object.__setattr__(mock, 'PROPERTIES', {'key': object}) object.__setattr__(mock, '_data', {'key': value}) object.__setattr__(mock, '_dirty', dict()) mock.key = value assert 'key' not in mock._dirty
def create_mock_jobs(cls): jobs = [] for name in ['foo', 'bar', 'baz']: job = Mock() job.key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name) jobs.append(job) return jobs
def assert_handled(self, key, exp_handler=SAME_AS_KEY, **exp_captures): if exp_handler is SAME_AS_KEY: exp_handler = key if isinstance(exp_handler, types.StringTypes): exp_handler = exp_handler.strip("/") m_response = Mock(spec=etcd.EtcdResult) m_response.key = key m_response.action = self.action self.dispatcher.handle_event(m_response) exp_handlers = self.handlers[self.expected_handlers] for handler_key, handler in exp_handlers.iteritems(): assert isinstance(handler, Mock) if handler_key == exp_handler: continue self.assertFalse(handler.called, "Unexpected set handler %s was called for " "key %s" % (handler_key, key)) unexp_handlers = self.handlers[self.unexpected_handlers] for handler_key, handler in unexp_handlers.iteritems(): assert isinstance(handler, Mock) self.assertFalse(handler.called, "Unexpected del handler %s was called for " "key %s" % (handler_key, key)) if exp_handler is not None: exp_handlers[exp_handler].assert_called_once_with( m_response, **exp_captures)
def test_request_token_fake(self): """Try with a phony consumer key""" c = Mock() c.key = "yer" c.secret = "mom" r = client.get("oauth.request_token", c) eq_(r.content, "Invalid consumer.")
def create_mock_scheduled_tasks(cls): jobs = [] for name in ['foo', 'bar', 'baz']: job = Mock() job.key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name) job.failure_count = 0 job.assignedTask = Mock(spec=AssignedTask) job.assignedTask.slaveHost = 'slavehost' job.assignedTask.task = Mock(spec=TaskConfig) job.assignedTask.task.maxTaskFailures = 1 job.assignedTask.task.executorConfig = Mock(spec=ExecutorConfig) job.assignedTask.task.executorConfig.data = Mock() job.assignedTask.task.metadata = [] job.assignedTask.task.owner = Identity(role='bozo') job.assignedTask.task.environment = 'test' job.assignedTask.task.jobName = 'woops' job.assignedTask.task.numCpus = 2 job.assignedTask.task.ramMb = 2 job.assignedTask.task.diskMb = 2 job.assignedTask.instanceId = 4237894 job.assignedTask.assignedPorts = None job.status = ScheduleStatus.RUNNING mockEvent = Mock(spec=TaskEvent) mockEvent.timestamp = 28234726395 mockEvent.status = ScheduleStatus.RUNNING mockEvent.message = "Hi there" job.taskEvents = [mockEvent] jobs.append(job) return jobs
def create_getjobs_response(cls): result = cls.create_simple_success_response() result.result = Mock() result.result.getJobsResult = Mock() mock_job_one = Mock() mock_job_one.key = Mock() mock_job_one.key.role = 'RoleA' mock_job_one.key.environment = 'test' mock_job_one.key.name = 'hithere' mock_job_two = Mock() mock_job_two.key = Mock() mock_job_two.key.role = 'bozo' mock_job_two.key.environment = 'test' mock_job_two.key.name = 'hello' result.result.getJobsResult.configs = [mock_job_one, mock_job_two] return result
def check_attach_ethernet_card(self, settings): _ctx = self._gen_relation_ctx() conn_mock = Mock() smart_connect = MagicMock(return_value=conn_mock) with patch("vsphere_plugin_common.SmartConnectNoSSL", smart_connect): with patch("vsphere_plugin_common.Disconnect", Mock()): # use unexisted network _ctx.source.instance.runtime_properties.update(settings) network = None with patch( "vsphere_plugin_common.VsphereClient._get_obj_by_name", MagicMock(return_value=network)): with self.assertRaises(NonRecoverableError) as e: devices.attach_ethernet_card(ctx=_ctx) self.assertEqual(e.exception.message, "Network Cloudify could not be found") # without vm-id / distributed _ctx.source.instance.runtime_properties[ 'switch_distributed'] = True network = Mock() network.obj = network network.config.distributedVirtualSwitch.uuid = "aa-bb-vv" network.key = "121" with patch( "vsphere_plugin_common.VsphereClient._get_obj_by_name", MagicMock(return_value=network)): with self.assertRaises(NonRecoverableError) as e: devices.attach_ethernet_card(ctx=_ctx) self.assertEqual(e.exception.message, "VM is not defined") # without vm-id / simple network _ctx.source.instance.runtime_properties[ 'switch_distributed'] = False network = vim.Network("Cloudify") network.obj = network with patch( "vsphere_plugin_common.VsphereClient._get_obj_by_name", MagicMock(return_value=network)): with self.assertRaises(NonRecoverableError) as e: devices.attach_ethernet_card(ctx=_ctx) self.assertEqual(e.exception.message, "VM is not defined") # issues with add device _ctx.target.instance.runtime_properties[ 'vsphere_server_id'] = "vm-101" network = vim.Network("Cloudify") network.obj = network vm = self._get_vm() with patch( "vsphere_plugin_common.VsphereClient._get_obj_by_id", MagicMock(return_value=vm)): with patch( "vsphere_plugin_common.VsphereClient._get_obj_by_name", MagicMock(return_value=network)): with self.assertRaises(NonRecoverableError) as e: devices.attach_ethernet_card(ctx=_ctx) self.assertEqual( e.exception.message, "Have not found key for new added device")
def test_filter_nonmatching_file_rules(self): file_evaluator = Mock() file_evaluator.key = "file" file_evaluator.matches = Mock(return_value=False) line_evaluator = Mock() line_evaluator.key = "line" line_evaluator.matches = Mock(return_value=False) rule = Mock() rule.name = "test" rule.evaluators = [file_evaluator, line_evaluator] code_checker = CodeChecker([], [rule]) alert = code_checker.check(self.code, {"filename": "macbeth.txt"}) self.assertEquals(1, file_evaluator.matches.call_count) self.assertEquals(0, line_evaluator.matches.call_count)
def test_repo_groups(self): line_evaluator = Mock() line_evaluator.key = "line" line_evaluator.matches = Mock(return_value=True) rule = Mock() rule.name = "os_code_exec::python" rule.evaluators = [line_evaluator] junk_repo = Mock() junk_repo.name = 'junk' local_repo = Mock() local_repo.name = 'tooling' repo_groups = { 'skipped_repos': ['junk'], 'local_repos': ['tooling'] } rules_to_groups = { 'skipped_repos': [{'except': '.*'}], 'local_repos': [ {'match': '.*'}, {'except': 'os_code_exec::.*'} ] } code_checker = CodeChecker(context_processors=[], rules=[rule], repo_groups=repo_groups, rules_to_groups=rules_to_groups) check_context = {"filename": "macbeth.txt"} self.assertEquals(code_checker.check(lines=self.code, context=check_context, repo=junk_repo), []) self.assertEquals(code_checker.check(lines=self.code, context=check_context, repo=local_repo), [])
def test_pinned_requirement(self): req = Mock() req.key = "django" req.is_pinned = True req.latest_version_within_specs = "1.10" req.version = "1.0" self.assertEqual(Update.get_commit_message(req), "Update django from 1.0 to 1.10")
def test_requirement_pinned(self): req = Mock() req.key = "django" req.is_pinned = True req.latest_version_within_specs = "1.10" req.version = "1.0" self.assertEqual(SequentialUpdate.get_branch(req), "pyup-update-django-1.0-to-1.10")
def test_unpinned_requirement(self): req = Mock() req.key = "django" req.is_pinned = False req.latest_version_within_specs = "1.10" self.assertEqual(self.update.get_commit_message(req), "Version 1.10 for django is fix now.")
def test_request_token_fake(self): """Try with a phony consumer key""" c = Mock() c.key = 'yer' c.secret = 'mom' r = oclient.get('oauth.request_token', c, callback=True) eq_(r.content, 'Invalid Consumer.')
def test_request_token_fake(self): """Try with a phony consumer key""" c = Mock() c.key = 'yer' c.secret = 'mom' r = client.get('oauth.request_token', c) eq_(r.content, 'Invalid consumer.')
def assert_handled(self, key, exp_handler=SAME_AS_KEY, **exp_captures): if exp_handler is SAME_AS_KEY: exp_handler = key if isinstance(exp_handler, types.StringTypes): exp_handler = exp_handler.strip("/") m_response = Mock(spec=etcd.EtcdResult) m_response.key = key m_response.action = self.action self.dispatcher.handle_event(m_response) exp_handlers = self.handlers[self.expected_handlers] for handler_key, handler in exp_handlers.iteritems(): assert isinstance(handler, Mock) if handler_key == exp_handler: continue self.assertFalse( handler.called, "Unexpected set handler %s was called for " "key %s" % (handler_key, key)) unexp_handlers = self.handlers[self.unexpected_handlers] for handler_key, handler in unexp_handlers.iteritems(): assert isinstance(handler, Mock) self.assertFalse( handler.called, "Unexpected del handler %s was called for " "key %s" % (handler_key, key)) if exp_handler is not None: exp_handlers[exp_handler].assert_called_once_with( m_response, **exp_captures)
def test_request_token_fake(self): c = Mock() c.key = self.access.key c.secret = 'mom' res = self.auth.is_authenticated(self.call(client=OAuthClient(c))) eq_(res.status_code, 401) eq_(json.loads(res.content)['reason'], errors['headers'])
def create_mock_scheduled_tasks(cls): jobs = [] for name in ['foo', 'bar', 'baz']: job_key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name) job = Mock() job.key = job_key job.failure_count = 0 job.assignedTask = Mock(spec=AssignedTask) job.assignedTask.taskId = 1287391823 job.assignedTask.slaveHost = 'slavehost' job.assignedTask.task = Mock(spec=TaskConfig) job.assignedTask.task.executorConfig = Mock() job.assignedTask.task.maxTaskFailures = 1 job.assignedTask.task.metadata = [] job.assignedTask.task.job = job_key job.assignedTask.task.owner = Identity(role=cls.TEST_ROLE) job.assignedTask.task.environment = cls.TEST_ENV job.assignedTask.task.jobName = name job.assignedTask.task.numCpus = 2 job.assignedTask.task.ramMb = 2 job.assignedTask.task.diskMb = 2 job.assignedTask.instanceId = 4237894 job.assignedTask.assignedPorts = {} job.status = ScheduleStatus.RUNNING mockEvent = Mock(spec=TaskEvent) mockEvent.timestamp = 28234726395 mockEvent.status = ScheduleStatus.RUNNING mockEvent.message = "Hi there" job.taskEvents = [mockEvent] jobs.append(job) return jobs
def create_mock_scheduled_tasks(cls): jobs = [] for name in ['foo', 'bar', 'baz']: job = Mock() job.key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name) job.failure_count = 0 job.assignedTask = Mock(spec=AssignedTask) job.assignedTask.slaveHost = 'slavehost' job.assignedTask.task = Mock(spec=TaskConfig) job.assignedTask.task.maxTaskFailures = 1 job.assignedTask.task.packagesDEPRECATED = [] job.assignedTask.task.owner = Identity(role='mchucarroll') job.assignedTask.task.environment = 'test' job.assignedTask.task.jobName = 'woops' job.assignedTask.task.numCpus = 2 job.assignedTask.task.ramMb = 2 job.assignedTask.task.diskMb = 2 job.assignedTask.instanceId = 4237894 job.assignedTask.assignedPorts = None job.status = ScheduleStatus.RUNNING mockEvent = Mock(spec=TaskEvent) mockEvent.timestamp = 28234726395 mockEvent.status = ScheduleStatus.RUNNING mockEvent.message = "Hi there" job.taskEvents = [mockEvent] jobs.append(job) return jobs
def test_no_hunk(self, mock_query): """Ensure that when no CommitHunk is found, None is returned.""" first_query = Mock() second_query = Mock() third_query = Mock() fourth_query = Mock() first_query.filter.return_value = second_query second_query.filter.return_value = third_query third_query.order.return_value = fourth_query fourth_query.get.return_value = None mock_query.return_value = first_query repo = Mock() repo.key = Mock() filename = 'foo' line_number = 5 name, email, user = blame.blame(repo, filename, line_number) self.assertIsNone(name) self.assertIsNone(email) self.assertIsNone(user) mock_query.assert_called_once_with(ancestor=repo.key) first_query.filter.assert_called_once_with( CommitHunk.filename == filename) second_query.filter.assert_called_once_with( CommitHunk.lines == line_number) third_query.order.assert_called_once_with(-CommitHunk.timestamp) fourth_query.get.assert_called_once_with()
def test_request_token_fake(self): c = Mock() c.key = self.access.key c.secret = 'mom' ok_(not self.auth.authenticate( Request(self.call(client=OAuthClient(c))))) ok_(not this_thread_is_pinned())
def test_request_token_fake(self): c = Mock() c.key = self.accepted_consumer.key c.secret = 'mom' self.client = OAuthClient(c) res = self.client.get(self.url) eq_(res.status_code, 401)
def test_filter_nonmatching_file_rules(self): file_evaluator = Mock() file_evaluator.key = "file" file_evaluator.matches = Mock(return_value=False) line_evaluator = Mock() line_evaluator.key = "line" line_evaluator.matches = Mock(return_value=False) rule = Mock() rule.name = "test" rule.evaluators = [file_evaluator, line_evaluator] code_checker = CodeChecker([], [rule]) alert = code_checker.check(self.code, {"filename": "foo/macbeth.txt"}) self.assertEquals(1, file_evaluator.matches.call_count) self.assertEquals(0, line_evaluator.matches.call_count)
def create_mock_scheduled_tasks(cls): jobs = [] for name in ["foo", "bar", "baz"]: job = Mock() job.key = JobKey(role=cls.TEST_ROLE, environment=cls.TEST_ENV, name=name) job.failure_count = 0 job.assignedTask = Mock(spec=AssignedTask) job.assignedTask.slaveHost = "slavehost" job.assignedTask.task = Mock(spec=TaskConfig) job.assignedTask.task.maxTaskFailures = 1 job.assignedTask.task.executorConfig = Mock(spec=ExecutorConfig) job.assignedTask.task.executorConfig.data = Mock() job.assignedTask.task.metadata = [] job.assignedTask.task.owner = Identity(role="mchucarroll") job.assignedTask.task.environment = "test" job.assignedTask.task.jobName = "woops" job.assignedTask.task.numCpus = 2 job.assignedTask.task.ramMb = 2 job.assignedTask.task.diskMb = 2 job.assignedTask.instanceId = 4237894 job.assignedTask.assignedPorts = None job.status = ScheduleStatus.RUNNING mockEvent = Mock(spec=TaskEvent) mockEvent.timestamp = 28234726395 mockEvent.status = ScheduleStatus.RUNNING mockEvent.message = "Hi there" job.taskEvents = [mockEvent] jobs.append(job) return jobs
def mock_read_4_endpoints(path, recursive): assert path == ALL_ENDPOINTS_PATH assert recursive leaves = [] specs = [ (CALICO_V_PATH + "/host/TEST_HOST/bird_ip", "192.168.1.1"), (CALICO_V_PATH + "/host/TEST_HOST/bird6_ip", "fd80::4"), (CALICO_V_PATH + "/host/TEST_HOST/config/marker", "created"), (CALICO_V_PATH + "/host/TEST_HOST/workload/docker/1234/endpoint/567890abcdef", EP_56.to_json()), (CALICO_V_PATH + "/host/TEST_HOST/workload/docker/5678/endpoint/90abcdef1234", EP_90.to_json()), (CALICO_V_PATH + "/host/TEST_HOST2/bird_ip", "192.168.1.2"), (CALICO_V_PATH + "/host/TEST_HOST2/bird6_ip", "fd80::3"), (CALICO_V_PATH + "/host/TEST_HOST2/config/marker", "created"), (CALICO_V_PATH + "/host/TEST_HOST2/workload/docker/1234/endpoint/7890abcdef12", EP_78.to_json()), (CALICO_V_PATH + "/host/TEST_HOST2/workload/docker/5678/endpoint/1234567890ab", EP_12.to_json())] for spec in specs: leaf = Mock(spec=EtcdResult) leaf.key = spec[0] leaf.value = spec[1] leaves.append(leaf) result = Mock(spec=EtcdResult) result.leaves = iter(leaves) return result
def test_cover_no_match(self): m_result = Mock(spec=etcd.EtcdResult) m_result.key = "/a" m_result.action = "unknown" self.dispatcher.handle_event(m_result) for handlers in self.handlers.itervalues(): for key, handler in handlers.iteritems(): self.assertFalse(handler.called, msg="Unexpected handler called: %s" % key)
def test_exec_config_with_set(self): """Test exec config with set subcommand.""" args = Mock() args.action = "set" args.key = "logfile" args.value = "/tmp/yoda.log" self.subcommand.execute(args) self.assertEqual("/tmp/yoda.log", self.config["logfile"])
def create_getjobs_response(cls): result = Mock(spec=Response) result.responseCode = ResponseCode.OK result.result = Mock(spec=Result) result.result.getJobsResult = Mock(spec=GetJobsResult) mock_job_one = Mock(spec=JobConfiguration) mock_job_one.key = Mock(spec=JobKey) mock_job_one.key.role = 'RoleA' mock_job_one.key.environment = 'test' mock_job_one.key.name = 'hithere' mock_job_two = Mock(spec=JobConfiguration) mock_job_two.key = Mock(spec=JobKey) mock_job_two.key.role = 'bozo' mock_job_two.key.environment = 'test' mock_job_two.key.name = 'hello' result.result.getJobsResult.configs = [mock_job_one, mock_job_two] return result
def test_request_token_fake(self): c = Mock() c.key = self.access.key c.secret = 'mom' self.client = OAuthClient(c) res = self.client.get(self.url) eq_(res.status_code, 401) eq_(json.loads(res.content)['reason'], errors['headers'])
def test_pinned_requirement(self): req = Mock() req.key = "django" req.is_pinned = True req.latest_version_within_specs = "1.10" req.version = "1.0" self.assertEqual(self.update.get_commit_message(req), "1.0 is old, update django to 1.10.")
def test_get_stack_parameters_dict_returns_proper_dict(self, _, get_stack_mock): cfn = CloudFormation() parameter_1 = Mock() parameter_1.key = "myKey1" parameter_1.value = "myValue1" parameter_2 = Mock() parameter_2.key = "myKey2" parameter_2.value = "myValue2" stack_mock = Mock() stack_mock.parameters = [parameter_1, parameter_2] get_stack_mock.return_value = stack_mock result = cfn.get_stack_parameters_dict('foo') self.assertDictEqual({'myKey1': 'myValue1', 'myKey2': 'myValue2'}, result)
def test_requirement_pinned(self): req = Mock() req.key = "django" req.is_pinned = True req.latest_version_within_specs = "1.10" req.version = "1.0" self.assertEqual(SequentialUpdate.get_branch(req), "update-django-1.0-to-1.10")
def dispatch(self, key, action, value=None): """ Send an EtcdResult to the watcher's dispatcher. """ m_response = Mock(spec=EtcdResult) m_response.key = key m_response.action = action m_response.value = value self.watcher.dispatcher.handle_event(m_response)
def test_add_with_empty(self): update = Update([], self.config) req_file = Mock() req = Mock() req.key = "django" req.is_pinned = False req.latest_version_within_specs = "1.10" update.add(req, req_file) self.assertEqual("django-pin" in update, True) self.assertEqual(len(update["django-pin"]), 1)
def test_flatten_issue_with_expand(self): issue = Mock() issue.fields = Mock() issue1 = Mock() issue1.key = 'issue1' issue.fields.subtasks = [issue1] get_issue = Mock(return_value='expanded_issue1') self.assertEquals( flatten_issue(get_issue, issue, expand_subtasks=True), [issue, 'expanded_issue1']) get_issue.assert_called_with('issue1', expand='changelog')
def test_evaluators_called(self): evaluator = Mock() evaluator.key = "line" evaluator.matches = Mock(return_value=False) rule = Mock() rule.name = "test" rule.evaluators = [evaluator] code_checker = CodeChecker([], [rule]) alert = code_checker.check(self.code, {"filename": "macbeth.txt"}) self.assertEquals(len(self.code), evaluator.matches.call_count)
def test_rename_to(self): mock_new_loc = Mock(name='new_loc', spec=S3FileUrl) mock_new_loc.bucket = 'bucket' mock_new_loc.key = 'newfile' mock_new_loc.url = 's3://bucket/newfile' out = self.s3_file_url.rename_to(mock_new_loc) self.mock_s3_resource.Object.return_value.copy_from.assert_called_with(CopySource={ 'Bucket': 'bucket', 'Key': 'topdir/bottomdir/file' }) self.mock_s3_resource.Object.return_value.delete.assert_called_with() self.assertEqual(out, mock_new_loc)
def _create_getjobs_response(cls): response = cls.create_simple_success_response() response.result = Mock() response.result.getJobsResult = Mock() mockjob = Mock() mockjob.cronSchedule = "* * * * *" mockjob.key = Mock() mockjob.key.environment = "test" mockjob.key.name = "hello" mockjob.key.role = "bozo" response.result.getJobsResult.configs = [mockjob] return response
def test_get_stack_parameters_dict_returns_proper_dict( self, _, get_stack_mock): cfn = CloudFormation() parameter_1 = Mock() parameter_1.key = "myKey1" parameter_1.value = "myValue1" parameter_2 = Mock() parameter_2.key = "myKey2" parameter_2.value = "myValue2" stack_mock = Mock() stack_mock.parameters = [parameter_1, parameter_2] get_stack_mock.return_value = stack_mock result = cfn.get_stack_parameters_dict('foo') self.assertDictEqual({ 'myKey1': 'myValue1', 'myKey2': 'myValue2' }, result)