def _shelve_offload(self, clean_shutdown=True): host = "fake-mini" instance = self._create_fake_instance_obj(params={"host": host}) instance.task_state = task_states.SHELVING instance.save() cur_time = timeutils.utcnow() timeutils.set_time_override(cur_time) self.mox.StubOutWithMock(self.compute, "_notify_about_instance_usage") self.mox.StubOutWithMock(self.compute.driver, "power_off") self.mox.StubOutWithMock(self.compute, "_get_power_state") self.mox.StubOutWithMock(self.compute.network_api, "cleanup_instance_network_on_host") self.compute._notify_about_instance_usage(self.context, instance, "shelve_offload.start") if clean_shutdown: self.compute.driver.power_off(instance, CONF.shutdown_timeout, self.compute.SHUTDOWN_RETRY_INTERVAL) else: self.compute.driver.power_off(instance, 0, 0) self.compute.network_api.cleanup_instance_network_on_host(self.context, instance, instance.host) self.compute._get_power_state(self.context, instance).AndReturn(123) self.compute._notify_about_instance_usage(self.context, instance, "shelve_offload.end") self.mox.ReplayAll() with mock.patch.object(instance, "save"): self.compute.shelve_offload_instance(self.context, instance, clean_shutdown=clean_shutdown) self.assertEqual(vm_states.SHELVED_OFFLOADED, instance.vm_state) self.assertIsNone(instance.task_state)
def test_multiple_signals_same_id_timeout(self, mock_name, mock_swift): st = create_stack(swiftsignal_template) handle = st['test_wait_condition_handle'] mock_swift_object = mock.Mock() mock_swift.return_value = mock_swift_object mock_swift_object.url = "http://fake-host.com:8080/v1/AUTH_1234" mock_swift_object.head_account.return_value = { 'x-account-meta-temp-url-key': '123456' } obj_name = "%s-%s-abcdefghijkl" % (st.name, handle.name) mock_name.return_value = obj_name mock_swift_object.get_container.return_value = cont_index(obj_name, 2) mock_swift_object.get_object.return_value = (obj_header, json.dumps({'id': 1})) time_now = timeutils.utcnow() time_series = [datetime.timedelta(0, t) + time_now for t in six.moves.xrange(1, 100)] timeutils.set_time_override(time_series) self.addCleanup(timeutils.clear_time_override) st.create() self.assertIn("SwiftSignalTimeout: resources.test_wait_condition: " "1 of 2 received - Signal 1 received", st.status_reason) wc = st['test_wait_condition'] self.assertEqual("SwiftSignalTimeout: resources.test_wait_condition: " "1 of 2 received - Signal 1 received", wc.status_reason)
def test_pack_action_finish(self): timeutils.set_time_override(override_time=NOW) values = instance_action.InstanceAction.pack_action_finish( self.context, 'fake-uuid') self.assertEqual(values['request_id'], self.context.request_id) self.assertEqual(values['instance_uuid'], 'fake-uuid') self.assertEqual(values['finish_time'].replace(tzinfo=None), NOW)
def test_finish(self, mock_start, mock_finish): timeutils.set_time_override(override_time=NOW) expected_packed_action_start = { 'request_id': self.context.request_id, 'user_id': self.context.user_id, 'project_id': self.context.project_id, 'instance_uuid': 'fake-uuid', 'action': 'fake-action', 'start_time': self.context.timestamp, } expected_packed_action_finish = { 'request_id': self.context.request_id, 'instance_uuid': 'fake-uuid', 'finish_time': NOW, } mock_start.return_value = fake_action mock_finish.return_value = fake_action action = instance_action.InstanceAction.action_start( self.context, 'fake-uuid', 'fake-action') action.finish() mock_start.assert_called_once_with(self.context, expected_packed_action_start) mock_finish.assert_called_once_with(self.context, expected_packed_action_finish) self.compare_obj(action, fake_action)
def setUp(self): super(NotificationsTestCase, self).setUp() self.fixture = self.useFixture(o_fixture.ClearRequestContext()) self.net_info = fake_network.fake_get_instance_nw_info(self, 1, 1) def fake_get_nw_info(cls, ctxt, instance): self.assertTrue(ctxt.is_admin) return self.net_info self.stub_out('nova.network.api.API.get_instance_nw_info', fake_get_nw_info) fake_notifier.stub_notifier(self) self.addCleanup(fake_notifier.reset) self.flags(host='testhost') self.flags(notify_on_state_change="vm_and_task_state", group='notifications') self.flags(api_servers=['http://localhost:9292'], group='glance') self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) self.fake_time = datetime.datetime(2017, 2, 2, 16, 45, 0) timeutils.set_time_override(self.fake_time) self.instance = self._wrapped_create() self.decorated_function_called = False
def test_evaluate_ceilometer_controlled(self): rule = {'EvaluationPeriods': '1', 'MetricName': 'test_metric', 'Period': '300', 'Statistic': 'Maximum', 'ComparisonOperator': 'GreaterThanOrEqualToThreshold', 'Threshold': '30'} now = timeutils.utcnow() timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) # Now data breaches Threshold, but we're suspended last = now - datetime.timedelta(seconds=300) data = WatchData(35, now - datetime.timedelta(seconds=150)) self.wr = watchrule.WatchRule(context=self.ctx, watch_name="testwatch", rule=rule, watch_data=[data], stack_id=self.stack_id, last_evaluated=last) self.wr.state_set(self.wr.CEILOMETER_CONTROLLED) actions = self.wr.evaluate() self.assertEqual(self.wr.CEILOMETER_CONTROLLED, self.wr.state) self.assertEqual([], actions)
def test_evaluate_suspend(self): # Setup rule = {'EvaluationPeriods': '1', 'MetricName': 'test_metric', 'Period': '300', 'Statistic': 'Maximum', 'ComparisonOperator': 'GreaterThanOrEqualToThreshold', 'Threshold': '30'} now = timeutils.utcnow() timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) last = now - datetime.timedelta(seconds=300) data = WatchData(35, now - datetime.timedelta(seconds=150)) wr = watchrule.WatchRule(context=self.ctx, watch_name="testwatch", rule=rule, watch_data=[data], stack_id=self.stack_id, last_evaluated=last) wr.state_set(wr.SUSPENDED) # Test actions = wr.evaluate() self.assertEqual(wr.SUSPENDED, wr.state) self.assertEqual([], actions)
def test_trusted_filter_update_cache(self, req_mock): oat_data = {"hosts": [{"host_name": "node1", "trust_lvl": "untrusted", "vtime": utils.isotime()}]} req_mock.return_value = requests.codes.OK, oat_data extra_specs = {'trust:trusted_host': 'untrusted'} filter_properties = {'context': mock.sentinel.ctx, 'instance_type': {'memory_mb': 1024, 'extra_specs': extra_specs}} host = fakes.FakeHostState('host1', 'node1', {}) self.filt_cls.host_passes(host, filter_properties) # Fill the caches req_mock.reset_mock() self.filt_cls.host_passes(host, filter_properties) self.assertFalse(req_mock.called) req_mock.reset_mock() timeutils.set_time_override(timeutils.utcnow()) timeutils.advance_time_seconds( CONF.trusted_computing.attestation_auth_timeout + 80) self.filt_cls.host_passes(host, filter_properties) self.assertTrue(req_mock.called) timeutils.clear_time_override()
def setUp(self): super(SimpleTenantUsageV240Test, self).setUp() self.api.microversion = self.microversion self.project_id_0 = astb.PROJECT_ID self.project_id_1 = '0000000e737461636b20342065000000' started = timeutils.utcnow() now = started + datetime.timedelta(hours=1) timeutils.set_time_override(started) with mock.patch('oslo_utils.uuidutils.generate_uuid') as mock_uuids: # make uuids incrementing, so that sort order is deterministic uuid_format = '1f1deceb-17b5-4c04-84c7-e0d4499c8f%02d' mock_uuids.side_effect = [uuid_format % x for x in range(100)] self.project_id = self.project_id_0 self.instance1_uuid = self._post_server(name='instance-1') self.instance2_uuid = self._post_server(name='instance-2') self.project_id = self.project_id_1 self.instance3_uuid = self._post_server(name='instance-3') timeutils.set_time_override(now) self.query = { 'start': str(started), 'end': str(now), 'limit': '1', 'marker': self.instance1_uuid, }
def test_timeout(self): self.stack = self.create_stack() # Avoid the stack create exercising the timeout code at the same time self.m.StubOutWithMock(self.stack, 'timeout_secs') self.stack.timeout_secs().MultipleTimes().AndReturn(None) now = timeutils.utcnow() periods = [0, 0.001, 0.1, 4.1, 5.1] periods.extend(range(10, 100, 5)) fake_clock = [now + datetime.timedelta(0, t) for t in periods] timeutils.set_time_override(fake_clock) self.addCleanup(timeutils.clear_time_override) h_wch.HeatWaitConditionHandle.get_status( ).MultipleTimes().AndReturn([]) self.m.ReplayAll() self.stack.create() rsrc = self.stack['wait_condition'] self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state) reason = rsrc.status_reason self.assertTrue(reason.startswith('WaitConditionTimeout:')) self.m.VerifyAll()
def test_negative_multiplier(self): self.flags(preemptible_duration_weight_multiplier=-1.0) hostinfo_list = self._get_all_hosts() # host1: 0, running time 01:00 # host2: 1, running time 00:59 # host3: 2, running time 00:58 # host4: 3, running time 00:57 # So the ordering should be host1, host4, host3, host2 but # order is reversed, so host2 should win now = datetime.datetime(2015, 11, 5, 11, 00) timeutils.set_time_override(now) weighed_hosts = self._get_weighed_hosts(hostinfo_list) self.assertEqual(0.0, weighed_hosts[0].weight) self.assertEqual('host2', weighed_hosts[0].obj.host) self.assertEqual('host3', weighed_hosts[1].obj.host) self.assertEqual('host4', weighed_hosts[2].obj.host) # and host1 lose self.assertEqual(-1.0, weighed_hosts[3].weight) self.assertEqual('host1', weighed_hosts[3].obj.host)
def test_authorize_object_already_created(self, mock_create): # the expires time is calculated from the current time and # a ttl value in the object. Fix the current time so we can # test expires is calculated correctly as expected self.addCleanup(timeutils.clear_time_override) timeutils.set_time_override() ttl = 10 expires = timeutils.utcnow_ts() + ttl db_dict = copy.deepcopy(fakes.fake_token_dict) db_dict['expires'] = expires mock_create.return_value = db_dict obj = token_obj.ConsoleAuthToken( context=self.context, console_type=fakes.fake_token_dict['console_type'], host=fakes.fake_token_dict['host'], port=fakes.fake_token_dict['port'], internal_access_path=fakes.fake_token_dict['internal_access_path'], instance_uuid=fakes.fake_token_dict['instance_uuid'], access_url_base=fakes.fake_token_dict['access_url_base'], ) obj.authorize(100) self.assertRaises(exception.ObjectActionError, obj.authorize, 100)
def test_timeout(self): self.stack = self.create_stack() # Avoid the stack create exercising the timeout code at the same time self.m.StubOutWithMock(self.stack, "timeout_secs") self.stack.timeout_secs().MultipleTimes().AndReturn(None) now = timeutils.utcnow() fake_clock = [now + datetime.timedelta(0, t) for t in (0, 0.001, 0.1, 4.1, 5.1)] timeutils.set_time_override(fake_clock) self.addCleanup(timeutils.clear_time_override) aws_wch.WaitConditionHandle.get_status().MultipleTimes().AndReturn([]) self.m.ReplayAll() self.stack.create() rsrc = self.stack["WaitForTheHandle"] self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state) reason = rsrc.status_reason self.assertTrue(reason.startswith("WaitConditionTimeout:")) self.m.VerifyAll()
def test_trusted_filter_update_cache_timezone(self, req_mock): oat_data = {"hosts": [{"host_name": "node1", "trust_lvl": "untrusted", "vtime": "2012-09-09T05:10:40-04:00"}]} req_mock.return_value = requests.codes.OK, oat_data extra_specs = {'trust:trusted_host': 'untrusted'} spec_obj = objects.RequestSpec( context=mock.sentinel.ctx, flavor=objects.Flavor(memory_mb=1024, extra_specs=extra_specs)) host = fakes.FakeHostState('host1', 'node1', {}) timeutils.set_time_override( timeutils.normalize_time( timeutils.parse_isotime("2012-09-09T09:10:40Z"))) self.filt_cls.host_passes(host, spec_obj) # Fill the caches req_mock.reset_mock() self.filt_cls.host_passes(host, spec_obj) self.assertFalse(req_mock.called) req_mock.reset_mock() timeutils.advance_time_seconds( CONF.trusted_computing.attestation_auth_timeout - 10) self.filt_cls.host_passes(host, spec_obj) self.assertFalse(req_mock.called) timeutils.clear_time_override()
def test_update_timeout(self): self.stack = self.create_stack() rsrc = self.stack['WaitForTheHandle'] now = timeutils.utcnow() fake_clock = [now + datetime.timedelta(0, t) for t in (0, 0.001, 0.1, 4.1, 5.1)] timeutils.set_time_override(fake_clock) self.addCleanup(timeutils.clear_time_override) m_gs = self.patchobject( aws_wch.WaitConditionHandle, 'get_status', return_value=[]) uprops = copy.copy(rsrc.properties.data) uprops['Count'] = '5' update_snippet = rsrc_defn.ResourceDefinition(rsrc.name, rsrc.type(), uprops) updater = scheduler.TaskRunner(rsrc.update, update_snippet) ex = self.assertRaises(exception.ResourceFailure, updater) self.assertEqual("WaitConditionTimeout: resources.WaitForTheHandle: " "0 of 5 received", six.text_type(ex)) self.assertEqual(5, rsrc.properties['Count']) self.assertEqual(2, m_gs.call_count)
def test_transient_cluster_terminate(self, terminate_cluster, use_os_admin_auth_token): timeutils.set_time_override(datetime.datetime(2005, 2, 1, 0, 0)) ctx = context.ctx() job = self.api.job_create(ctx, te.SAMPLE_JOB) ds = self.api.data_source_create(ctx, te.SAMPLE_DATA_SOURCE) self._make_cluster('1') self._make_cluster('2') self._create_job_execution({"end_time": timeutils.utcnow(), "id": 1, "cluster_id": "1"}, job, ds, ds) self._create_job_execution({"end_time": None, "id": 2, "cluster_id": "2"}, job, ds, ds) self._create_job_execution({"end_time": None, "id": 3, "cluster_id": "2"}, job, ds, ds) timeutils.set_time_override(datetime.datetime(2005, 2, 1, 0, 1)) p._make_periodic_tasks().terminate_unneeded_transient_clusters(None) self.assertEqual(1, terminate_cluster.call_count) terminate_cluster.assert_has_calls([mock.call(u'1')]) self.assertEqual(1, use_os_admin_auth_token.call_count)
def test_update_timeout(self): self.stack = self.create_stack() self.m.ReplayAll() self.stack.create() rsrc = self.stack['WaitForTheHandle'] self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state) self.m.VerifyAll() self.m.UnsetStubs() now = timeutils.utcnow() fake_clock = [now + datetime.timedelta(0, t) for t in (0, 0.001, 0.1, 4.1, 5.1)] timeutils.set_time_override(fake_clock) self.addCleanup(timeutils.clear_time_override) self.m.StubOutWithMock(aws_wch.WaitConditionHandle, 'get_status') aws_wch.WaitConditionHandle.get_status().MultipleTimes().AndReturn([]) self.m.ReplayAll() uprops = copy.copy(rsrc.properties.data) uprops['Count'] = '5' update_snippet = rsrc_defn.ResourceDefinition(rsrc.name, rsrc.type(), uprops) updater = scheduler.TaskRunner(rsrc.update, update_snippet) ex = self.assertRaises(exception.ResourceFailure, updater) self.assertEqual("WaitConditionTimeout: resources.WaitForTheHandle: " "0 of 5 received", six.text_type(ex)) self.assertEqual(5, rsrc.properties['Count']) self.m.VerifyAll()
def _force_reclaim(self): # Make sure that compute manager thinks the instance is # old enough to be expired the_past = timeutils.utcnow() + datetime.timedelta(hours=1) timeutils.set_time_override(override_time=the_past) self.addCleanup(timeutils.clear_time_override) ctxt = context.get_admin_context() self.compute._reclaim_queued_deletes(ctxt)
def test_utcnow(self): timeutils.set_time_override(mock.sentinel.utcnow) self.assertEqual(timeutils.utcnow(), mock.sentinel.utcnow) timeutils.clear_time_override() self.assertFalse(timeutils.utcnow() == mock.sentinel.utcnow) self.assertTrue(timeutils.utcnow())
def test_action_finish_no_result(self, mock_finish): timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceAction expected_packed_values = test_class.pack_action_finish(self.context, "fake-uuid") mock_finish.return_value = fake_action action = instance_action.InstanceAction.action_finish(self.context, "fake-uuid", want_result=False) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.assertIsNone(action)
def test_not_transient_cluster_does_not_terminate(self, terminate_cluster): timeutils.set_time_override(datetime.datetime(2005, 2, 1, 0, 0)) self._make_cluster('1', is_transient=False) timeutils.set_time_override(datetime.datetime(2005, 2, 1, 0, 1)) p._make_periodic_tasks().terminate_unneeded_transient_clusters(None) self.assertEqual(0, terminate_cluster.call_count)
def _setup_action_mocks(self, mock_get_resource, now, action_expected=True): """Setup stubs for the action tests.""" timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) if action_expected: dummy_action = DummyAction() mock_get_resource.return_value = dummy_action
def test_evaluate(self): # Setup rule = {'EvaluationPeriods': '1', 'MetricName': 'test_metric', 'Period': '300', 'Statistic': 'Maximum', 'ComparisonOperator': 'GreaterThanOrEqualToThreshold', 'Threshold': '30'} now = timeutils.utcnow() timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) # Test 1 - It's not time to evaluate, so should stay NODATA last = now - datetime.timedelta(seconds=299) data = WatchData(25, now - datetime.timedelta(seconds=150)) wr = watchrule.WatchRule(context=self.ctx, watch_name="testwatch", rule=rule, watch_data=[data], stack_id=self.stack_id, last_evaluated=last) actions = wr.evaluate() self.assertEqual('NODATA', wr.state) self.assertEqual([], actions) # Test 2 - now - last == Period, so should set NORMAL last = now - datetime.timedelta(seconds=300) data = WatchData(25, now - datetime.timedelta(seconds=150)) wr = watchrule.WatchRule(context=self.ctx, watch_name="testwatch", rule=rule, watch_data=[data], stack_id=self.stack_id, last_evaluated=last) actions = wr.evaluate() self.assertEqual('NORMAL', wr.state) self.assertEqual(now, wr.last_evaluated) self.assertEqual([], actions) # Test 3 - Now data breaches Threshold, so should set ALARM last = now - datetime.timedelta(seconds=300) data = WatchData(35, now - datetime.timedelta(seconds=150)) wr = watchrule.WatchRule(context=self.ctx, watch_name="testwatch", rule=rule, watch_data=[data], stack_id=self.stack_id, last_evaluated=last) actions = wr.evaluate() self.assertEqual('ALARM', wr.state) self.assertEqual(now, wr.last_evaluated) self.assertEqual([], actions)
def test_show(self): url = self._prefix + ('/servers/%s' % UUID3) res = self._make_request(url) self.assertEqual(res.status_int, 200) now = timeutils.utcnow() timeutils.set_time_override(now) self.assertServerUsage(self._get_server(res.body), launched_at=DATE1, terminated_at=DATE2)
def test_transient_cluster_not_killed_too_early(self, terminate_cluster): timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1') timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=20)) p._make_periodic_tasks().terminate_unneeded_transient_clusters(None) self.assertEqual(0, terminate_cluster.call_count)
def test_event_start_no_result(self, mock_start): timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_start(self.context, "fake-uuid", "fake-event") mock_start.return_value = fake_event event = instance_action.InstanceActionEvent.event_start( self.context, "fake-uuid", "fake-event", want_result=False ) mock_start.assert_called_once_with(self.context, expected_packed_values) self.assertIsNone(event)
def setUp(self): super(AuditPeriodTest, self).setUp() # a fairly random time to test with self.test_time = datetime.datetime(second=23, minute=12, hour=8, day=5, month=3, year=2012) timeutils.set_time_override(override_time=self.test_time)
def setUp(self): super(MuteWeigherTestClass, self).setUp() self.flags(mute_weight_multiplier=-10.0, mute_child_interval=100, mute_weight_value=1000.0, group='cells') self.now = timeutils.utcnow() timeutils.set_time_override(self.now) self.cells = _get_fake_cells() for cell in self.cells: cell.last_seen = self.now
def test_set_time_override_using_default(self): now = timeutils.utcnow_ts() # NOTE(kgriffs): Normally it's bad form to sleep in a unit test, # but this is the only way to test that set_time_override defaults # to setting the override to the current time. time.sleep(1) timeutils.set_time_override() overriden_now = timeutils.utcnow_ts() self.assertThat(now, matchers.LessThan(overriden_now))
def test_age_cached_images(self): def fake_get_ds_browser(ds_ref): return 'fake-ds-browser' def fake_get_timestamp(ds_browser, ds_path): self._get_timestamp_called += 1 path = str(ds_path) if path == '[fake-ds] fake-path/fake-image-1': # No time stamp exists return if path == '[fake-ds] fake-path/fake-image-2': # Timestamp that will be valid => no deletion return 'ts-2012-11-22-10-00-00' if path == '[fake-ds] fake-path/fake-image-3': # Timestamp that will be invalid => deletion return 'ts-2012-11-20-12-00-00' self.fail() def fake_mkdir(session, ts_path, dc_ref): self.assertEqual( '[fake-ds] fake-path/fake-image-1/ts-2012-11-22-12-00-00', str(ts_path)) def fake_file_delete(session, ds_path, dc_ref): self.assertEqual('[fake-ds] fake-path/fake-image-3', str(ds_path)) def fake_timestamp_cleanup(dc_ref, ds_browser, ds_path): self.assertEqual('[fake-ds] fake-path/fake-image-4', str(ds_path)) with contextlib.nested( mock.patch.object(self._imagecache, '_get_ds_browser', fake_get_ds_browser), mock.patch.object(self._imagecache, '_get_timestamp', fake_get_timestamp), mock.patch.object(ds_util, 'mkdir', fake_mkdir), mock.patch.object(ds_util, 'file_delete', fake_file_delete), mock.patch.object(self._imagecache, 'timestamp_cleanup', fake_timestamp_cleanup), ) as (_get_ds_browser, _get_timestamp, _mkdir, _file_delete, _timestamp_cleanup): timeutils.set_time_override(override_time=self._time) datastore = ds_obj.Datastore(name='ds', ref='fake-ds-ref') dc_info = vmops.DcInfo(ref='dc_ref', name='name', vmFolder='vmFolder') self._get_timestamp_called = 0 self._imagecache.originals = set(['fake-image-1', 'fake-image-2', 'fake-image-3', 'fake-image-4']) self._imagecache.used_images = set(['fake-image-4']) self._imagecache._age_cached_images( 'fake-context', datastore, dc_info, ds_obj.DatastorePath('fake-ds', 'fake-path')) self.assertEqual(3, self._get_timestamp_called)
def setUp(self): """setUp method for simple tenant usage.""" super(SimpleTenantUsageSampleJsonTest, self).setUp() started = timeutils.utcnow() now = started + datetime.timedelta(hours=1) timeutils.set_time_override(started) self._post_server() timeutils.set_time_override(now) self.query = {'start': str(started), 'end': str(now)}
def test_action_finish(self, mock_finish): timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceAction expected_packed_values = test_class.pack_action_finish( self.context, 'fake-uuid') mock_finish.return_value = fake_action action = instance_action.InstanceAction.action_finish(self.context, 'fake-uuid', want_result=True) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.compare_obj(action, fake_action)
def test_event_finish_no_result(self, mock_finish): timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_finish( self.context, 'fake-uuid', 'fake-event') expected_packed_values['finish_time'] = timeutils.utcnow() mock_finish.return_value = fake_event event = instance_action.InstanceActionEvent.event_finish( self.context, 'fake-uuid', 'fake-event', want_result=False) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.assertIsNone(event)
def setUp(self): super(MuteWeigherTestClass, self).setUp() self.flags(mute_weight_multiplier=-10.0, mute_child_interval=100, group='cells') self.now = timeutils.utcnow() timeutils.set_time_override(self.now) self.cells = _get_fake_cells() for cell in self.cells: cell.last_seen = self.now
def _test_host_status_unknown_only(self, func_name, *args): admin_func = getattr(self.admin_api, func_name) func = getattr(self.api, func_name) # Run the operation as admin and extract the server from the response. server = self._get_server(admin_func(*args)) # We need to wait for ACTIVE if this was a post rebuild server action, # else a subsequent rebuild request will fail with a 409 in the API. self._wait_for_state_change(server, 'ACTIVE') # Verify admin can see the host status UP. self.assertEqual('UP', server['host_status']) # Get server as normal non-admin user. server = self._get_server(func(*args)) self._wait_for_state_change(server, 'ACTIVE') # Verify non-admin do not receive the host_status field because it is # not UNKNOWN. self.assertNotIn('host_status', server) # Stop the compute service to trigger UNKNOWN host_status. self.compute.stop() # Advance time by 30 minutes so nova considers service as down. minutes_from_now = timeutils.utcnow() + datetime.timedelta(minutes=30) timeutils.set_time_override(override_time=minutes_from_now) self.addCleanup(timeutils.clear_time_override) # Run the operation as admin and extract the server from the response. server = self._get_server(admin_func(*args)) # Verify admin can see the host status UNKNOWN. self.assertEqual('UNKNOWN', server['host_status']) # Now that the compute service is down, the rebuild will not ever # complete. But we're only interested in what would be returned from # the API post rebuild action, so reset the state to ACTIVE to allow # the next rebuild request to go through without a 409 error. self._set_server_state_active(server) # Run the operation as a normal non-admin user and extract the server # from the response. server = self._get_server(func(*args)) # Verify non-admin can see the host status UNKNOWN too. self.assertEqual('UNKNOWN', server['host_status']) self._set_server_state_active(server) # Now, adjust the policy to make it so only admin are allowed to see # UNKNOWN host status only. self.policy.set_rules( {self.host_status_unknown_only_rule: 'rule:admin_api'}, overwrite=False) # Run the operation as a normal non-admin user and extract the server # from the response. server = self._get_server(func(*args)) # Verify non-admin do not receive the host_status field. self.assertNotIn('host_status', server) self._set_server_state_active(server) # Verify that admin will not receive ths host_status field if the # API microversion < 2.16. with utils.temporary_mutation(self.admin_api, microversion='2.15'): server = self._get_server(admin_func(*args)) self.assertNotIn('host_status', server)
def test_active_cluster_not_killed_as_inactive(self, terminate_cluster): self.override_config('cleanup_time_for_incomplete_clusters', 1) timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1') timeutils.set_time_override( datetime.datetime(2005, 2, 1, hour=1, second=10)) p._make_periodic_tasks().terminate_incomplete_clusters(None) self.assertEqual(0, terminate_cluster.call_count)
def _action_set_stubs(self, now, action_expected=True): # Setup stubs for the action tests timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) if action_expected: dummy_action = DummyAction() self.m.StubOutWithMock(parser.Stack, 'resource_by_refid') parser.Stack.resource_by_refid( mox.IgnoreArg()).MultipleTimes().AndReturn(dummy_action) self.m.ReplayAll()
def _test_authorize(self, console_type, mock_create): # the expires time is calculated from the current time and # a ttl value in the object. Fix the current time so we can # test expires is calculated correctly as expected self.addCleanup(timeutils.clear_time_override) timeutils.set_time_override() ttl = 10 expires = timeutils.utcnow_ts() + ttl db_dict = copy.deepcopy(fakes.fake_token_dict) db_dict['expires'] = expires db_dict['console_type'] = console_type mock_create.return_value = db_dict create_dict = copy.deepcopy(fakes.fake_token_dict) create_dict['expires'] = expires create_dict['console_type'] = console_type del create_dict['id'] del create_dict['created_at'] del create_dict['updated_at'] expected = copy.deepcopy(fakes.fake_token_dict) del expected['token_hash'] del expected['expires'] expected['token'] = fakes.fake_token expected['console_type'] = console_type obj = token_obj.ConsoleAuthToken( context=self.context, console_type=console_type, host=fakes.fake_token_dict['host'], port=fakes.fake_token_dict['port'], internal_access_path=fakes.fake_token_dict['internal_access_path'], instance_uuid=fakes.fake_token_dict['instance_uuid'], access_url_base=fakes.fake_token_dict['access_url_base'], ) with mock.patch('uuid.uuid4', return_value=fakes.fake_token): token = obj.authorize(ttl) mock_create.assert_called_once_with(self.context, create_dict) self.assertEqual(token, fakes.fake_token) self.compare_obj(obj, expected) url = obj.access_url if console_type != 'novnc': expected_url = '%s?token=%s' % ( fakes.fake_token_dict['access_url_base'], fakes.fake_token) else: path = urlparse.urlencode({'path': '?token=%s' % fakes.fake_token}) expected_url = '%s?%s' % (fakes.fake_token_dict['access_url_base'], path) self.assertEqual(expected_url, url)
def test_incomplete_cluster_killed_in_time(self, terminate_cluster): self.override_config('cleanup_time_for_incomplete_clusters', 1) timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1', status='Pending') timeutils.set_time_override( datetime.datetime(2005, 2, 1, hour=1, second=10)) p._make_periodic_tasks().terminate_incomplete_clusters(None) self.assertEqual(terminate_cluster.call_count, 1) terminate_cluster.assert_has_calls([mock.call(u'1')])
def test_shelved_poll_not_timedout(self): instance = self._create_fake_instance_obj() sys_meta = instance.system_metadata shelved_time = timeutils.utcnow() timeutils.set_time_override(shelved_time) timeutils.advance_time_seconds(CONF.shelved_offload_time - 1) sys_meta['shelved_at'] = timeutils.strtime(at=shelved_time) db.instance_update_and_get_original(self.context, instance['uuid'], {'vm_state': vm_states.SHELVED, 'system_metadata': sys_meta}) self.mox.StubOutWithMock(self.compute.driver, 'destroy') self.mox.ReplayAll() self.compute._poll_shelved_instances(self.context)
def test_transient_cluster_killed_in_time(self, terminate_cluster, use_os_admin_auth_token): timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1') timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=40)) p._make_periodic_tasks().terminate_unneeded_transient_clusters(None) self.assertEqual(1, terminate_cluster.call_count) terminate_cluster.assert_has_calls([mock.call(u'1')]) self.assertEqual(1, use_os_admin_auth_token.call_count)
def test_incomplete_cluster_not_killed_too_early(self, terminate_cluster): self.override_config('cleanup_time_for_incomplete_clusters', 1) timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1', status='Pending') timeutils.set_time_override( datetime.datetime(2005, 2, 1, minute=59, second=50)) p._make_periodic_tasks().terminate_incomplete_clusters(None) self.assertEqual(terminate_cluster.call_count, 0)
def test_scaling_not_in_progress(self): awhile_after = timeutils.utcnow() + datetime.timedelta(seconds=60) previous_meta = { 'cooldown_end': { awhile_after.isoformat(): 'ChangeInCapacity : 1' }, 'scaling_in_progress': False } timeutils.set_time_override() timeutils.advance_time_seconds(100) self.patchobject(self.group, 'metadata_get', return_value=previous_meta) self.assertIsNone(self.group._check_scaling_allowed(60)) timeutils.clear_time_override()
def test_on_wait_task_expired(self): now = timeutils.utcnow() self.request_inst_mock.expired = True self.request_inst_mock.created_on = now timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) timeutils.advance_time_seconds(120) ex = self.executor() ex._requests_cache[self.task_uuid] = self.request_inst_mock self.assertEqual(len(ex._requests_cache), 1) ex._on_wait() self.assertEqual(len(ex._requests_cache), 0)
def test_utcnow_ts(self): skynet_self_aware_ts = 872835240 skynet_dt = datetime.datetime.utcfromtimestamp(skynet_self_aware_ts) self.assertEqual(self.skynet_self_aware_time, skynet_dt) # NOTE(kgriffs): timeutils.utcnow_ts() uses time.time() # IFF time override is not set. with mock.patch('time.time') as time_mock: time_mock.return_value = skynet_self_aware_ts ts = timeutils.utcnow_ts() self.assertEqual(ts, skynet_self_aware_ts) timeutils.set_time_override(skynet_dt) ts = timeutils.utcnow_ts() self.assertEqual(ts, skynet_self_aware_ts)
def test_incomplete_cluster_killed_in_time(self, terminate_cluster, use_os_admin_auth_token): self.override_config('cleanup_time_for_incomplete_clusters', 1) timeutils.set_time_override(datetime.datetime(2005, 2, 1, second=0)) self._make_cluster('1', c_u.CLUSTER_STATUS_SPAWNING) timeutils.set_time_override( datetime.datetime(2005, 2, 1, hour=1, second=10)) p._make_periodic_tasks().terminate_incomplete_clusters(None) self.assertEqual(1, terminate_cluster.call_count) terminate_cluster.assert_has_calls([mock.call(u'1')]) self.assertEqual(1, use_os_admin_auth_token.call_count)
def setUp(self): super(InstanceUsageAuditLogTestV21, self).setUp() self.context = context.get_admin_context() timeutils.set_time_override(datetime.datetime(2012, 7, 5, 10, 0, 0)) self._set_up_controller() self.host_api = self.controller.host_api def fake_service_get_all(context, disabled): self.assertIsNone(disabled) return TEST_COMPUTE_SERVICES self.stubs.Set(utils, 'last_completed_audit_period', fake_last_completed_audit_period) self.stubs.Set(db, 'service_get_all', fake_service_get_all) self.stubs.Set(db, 'task_log_get_all', fake_task_log_get_all) self.req = fakes.HTTPRequest.blank('')
def test_event_finish_with_failure(self, mock_finish, mock_tb): timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_finish( self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb') expected_packed_values['finish_time'] = timeutils.utcnow() mock_finish.return_value = fake_event event = test_class.event_finish_with_failure(self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb', want_result=True) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.compare_obj(event, fake_event)
def test_event_finish_with_failure_legacy_unicode(self, mock_finish): # Tests that traceback.format_tb is not called when exc_tb is unicode. timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_finish( self.context, 'fake-uuid', 'fake-event', 'val', unicode('fake-tb')) expected_packed_values['finish_time'] = timeutils.utcnow() mock_finish.return_value = fake_event event = test_class.event_finish_with_failure(self.context, 'fake-uuid', 'fake-event', exc_val='val', exc_tb=unicode('fake-tb'), want_result=True) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.compare_obj(event, fake_event)
def test_shelved_poll_not_timedout(self, mock_older): mock_older.return_value = False self.flags(shelved_offload_time=1) shelved_time = timeutils.utcnow() timeutils.set_time_override(shelved_time) timeutils.advance_time_seconds(CONF.shelved_offload_time - 1) instance = self._create_fake_instance_obj() instance.vm_state = vm_states.SHELVED instance.task_state = None instance.host = self.compute.host sys_meta = instance.system_metadata sys_meta['shelved_at'] = shelved_time.isoformat() instance.save() with mock.patch.object(self.compute, 'shelve_offload_instance') as soi: self.compute._poll_shelved_instances(self.context) self.assertFalse(soi.called) self.assertTrue(mock_older.called)
def test_url_paths(self): timeutils.set_time_override() self.addCleanup(timeutils.clear_time_override) data = { 'methods': ['GET', 'POST'], 'paths': ['messages', 'subscriptions'] } response = self.simulate_post(self.signed_url_prefix, body=jsonutils.dumps(data)) self.assertEqual(falcon.HTTP_200, self.srmock.status) content = jsonutils.loads(response[0]) self.assertEqual([ '/v2/queues/shared_queue/messages', '/v2/queues/shared_queue/subscriptions' ], content['paths'])
def test_shelved_poll_timedout(self): instance = self._create_fake_instance_obj() sys_meta = instance.system_metadata shelved_time = timeutils.utcnow() timeutils.set_time_override(shelved_time) timeutils.advance_time_seconds(CONF.shelved_offload_time + 1) sys_meta['shelved_at'] = timeutils.strtime(at=shelved_time) (old, instance) = db.instance_update_and_get_original(self.context, instance['uuid'], {'vm_state': vm_states.SHELVED, 'system_metadata': sys_meta}) def fake_destroy(inst, nw_info, bdm): # NOTE(alaski) There are too many differences between an instance # as returned by instance_update_and_get_original and # instance_get_all_by_filters so just compare the uuid. self.assertEqual(instance['uuid'], inst['uuid']) self.stubs.Set(self.compute.driver, 'destroy', fake_destroy) self.compute._poll_shelved_instances(self.context)
def test_FnGetAtt_alarm_url(self, mock_get): now = datetime.datetime(2012, 11, 29, 13, 49, 37) timeutils.set_time_override(now) self.addCleanup(timeutils.clear_time_override) # Setup stack_id = stack_name = 'FnGetAtt-alarm-url' stack = self._create_stack(TEMPLATE_CFN_SIGNAL, stack_name=stack_name, stack_id=stack_id) mock_get.return_value = 'http://server.test:8000/v1' rsrc = stack['signal_handler'] self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state) # Test url = rsrc.FnGetAtt('AlarmUrl') # Verify # url parameters come in unexpected order, so the conversion has to be # done for comparison expected_url_path = "".join([ 'http://server.test:8000/v1/signal/', 'arn%3Aopenstack%3Aheat%3A%3Atest_tenant%3Astacks/', 'FnGetAtt-alarm-url/FnGetAtt-alarm-url/resources/', 'signal_handler']) expected_url_params = { 'Timestamp': ['2012-11-29T13:49:37Z'], 'SignatureMethod': ['HmacSHA256'], 'AWSAccessKeyId': ['4567'], 'SignatureVersion': ['2'], 'Signature': ['JWGilkQ4gHS+Y4+zhL41xSAC7+cUCwDsaIxq9xPYPKE=']} url_path, url_params = url.split('?', 1) url_params = urlparse.parse_qs(url_params) self.assertEqual(expected_url_path, url_path) self.assertEqual(expected_url_params, url_params) mock_get.assert_called_once_with()
def test_event_finish_with_failure_legacy(self, mock_finish, mock_tb): # Tests that exc_tb is serialized when it's not a string type. mock_tb.return_value = 'fake-tb' timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_finish( self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb') expected_packed_values['finish_time'] = timeutils.utcnow() mock_finish.return_value = fake_event fake_tb = mock.sentinel.fake_tb event = test_class.event_finish_with_failure(self.context, 'fake-uuid', 'fake-event', exc_val='val', exc_tb=fake_tb, want_result=True) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.compare_obj(event, fake_event) mock_tb.assert_called_once_with(fake_tb)
def test_url_generation(self): timeutils.set_time_override() self.addCleanup(timeutils.clear_time_override) data = {'methods': ['GET', 'POST']} response = self.simulate_post(self.signed_url_prefix, body=jsonutils.dumps(data)) self.assertEqual(falcon.HTTP_200, self.srmock.status) content = jsonutils.loads(response[0]) expires = timeutils.utcnow(True) + datetime.timedelta(days=1) expires_str = expires.strftime(urls._DATE_FORMAT) for field in ['signature', 'project', 'methods', 'paths', 'expires']: self.assertIn(field, content) self.assertEqual(expires_str, content['expires']) self.assertEqual(data['methods'], content['methods']) self.assertEqual(['/v2/queues/shared_queue/messages'], content['paths'])
def test_event_finish_with_failure_no_result(self, mock_finish, mock_tb): # Tests that traceback.format_tb is not called when exc_tb is a str # and want_result is False, so no event should come back. mock_tb.return_value = 'fake-tb' timeutils.set_time_override(override_time=NOW) test_class = instance_action.InstanceActionEvent expected_packed_values = test_class.pack_action_event_finish( self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb') expected_packed_values['finish_time'] = timeutils.utcnow() mock_finish.return_value = fake_event event = test_class.event_finish_with_failure(self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb', want_result=False) mock_finish.assert_called_once_with(self.context, expected_packed_values) self.assertIsNone(event) self.assertFalse(mock_tb.called)
def test_create(self): CONF.set_override('message_ttl', 300) timeutils.set_time_override() self.addCleanup(timeutils.clear_time_override) expected_expires_at = timeutils.utcnow() + datetime.timedelta( seconds=300) expected_message_record = { 'project_id': 'fakeproject', 'request_id': 'fakerequestid', 'resource_type': 'fake_resource_type', 'resource_uuid': None, 'event_id': defined_messages.UNABLE_TO_ALLOCATE, 'message_level': 'ERROR', 'expires_at': expected_expires_at, } self.message_api.create(self.ctxt, defined_messages.UNABLE_TO_ALLOCATE, "fakeproject", resource_type="fake_resource_type") self.message_api.db.message_create.assert_called_once_with( self.ctxt, expected_message_record)
def setUp(self): super(SimpleTenantUsageV240Test, self).setUp() self.api.microversion = self.microversion started = timeutils.utcnow() now = started + datetime.timedelta(hours=1) timeutils.set_time_override(started) with mock.patch('oslo_utils.uuidutils.generate_uuid') as mock_uuids: # make uuids incrementing, so that sort order is deterministic uuid_format = '1f1deceb-17b5-4c04-84c7-e0d4499c8f%02d' mock_uuids.side_effect = [uuid_format % x for x in range(100)] self.instance1_uuid = self._post_server(name='instance-1') self.instance2_uuid = self._post_server(name='instance-2') self.instance3_uuid = self._post_server(name='instance-3') timeutils.set_time_override(now) self.query = { 'start': str(started), 'end': str(now), 'limit': '1', 'marker': self.instance1_uuid, }
def test_shelved_poll_timedout(self): self.flags(shelved_offload_time=1) shelved_time = timeutils.utcnow() timeutils.set_time_override(shelved_time) timeutils.advance_time_seconds(CONF.shelved_offload_time + 10) instance = self._create_fake_instance_obj() instance.vm_state = vm_states.SHELVED instance.task_state = None instance.host = self.compute.host sys_meta = instance.system_metadata sys_meta['shelved_at'] = shelved_time.isoformat() instance.save() data = [] def fake_soi(context, instance, **kwargs): data.append(instance.uuid) with mock.patch.object(self.compute, 'shelve_offload_instance') as soi: soi.side_effect = fake_soi self.compute._poll_shelved_instances(self.context) self.assertTrue(soi.called) self.assertEqual(instance.uuid, data[0])
def setUp(self): super(NotificationsTestCase, self).setUp() self.fixture = self.useFixture(o_fixture.ClearRequestContext()) self.net_info = fake_network.fake_get_instance_nw_info(self) self.notifier = self.useFixture(fixtures.NotificationFixture(self)) self.flags(host='testhost') self.flags(notify_on_state_change="vm_and_task_state", group='notifications') self.flags(api_servers=['http://localhost:9292'], group='glance') self.user_id = 'fake' self.project_id = 'fake' self.context = context.RequestContext(self.user_id, self.project_id) self.fake_time = datetime.datetime(2017, 2, 2, 16, 45, 0) timeutils.set_time_override(self.fake_time) self.instance = self._wrapped_create() self.decorated_function_called = False