def test_dont_enable_lan_channel_if_already_enabled(self): """Test that Lan_Channel doesn't get enabled if disabled.""" # Mock the response of the BMC response = ("Section Lan_Channel\n" " Volatile_Access_Mode Always_Available\n" " Non_Volatile_Access_Mode Always_Available\n" "EndSection") self.patch(maas_ipmi_autodetect, 'bmc_get').return_value = response # Mock the function 'bmc_set' bmc_set_mock = self.patch(maas_ipmi_autodetect, 'bmc_set') # Call the function set_ipmi_lan_channel_settings() # Check that the 'bmc_set' mock function (bmc_set_mock) was not called. self.assertThat(bmc_set_mock, MockNotCalled())
def test_on_listen_for_dismissal_for_other_user_does_nothing(self): super_on_listen = self.patch(Handler, "on_listen") super_on_listen.return_value = sentinel.on_listen user = factory.make_User() handler = NotificationHandler(user, {}, None) notification = factory.make_Notification(user=user) # A dismissal notification from the database FOR ANOTHER USER. dismissal = "%d:%d" % (notification.id, random.randrange(1, 99999)) self.assertThat( handler.on_listen("notificationdismissal", sentinel.action, dismissal), Is(None), ) self.assertThat(super_on_listen, MockNotCalled())
def test__doesnt_update_bios_boot_method_when_same(self): rack_controller = factory.make_RackController() local_ip = factory.make_ip_address() remote_ip = factory.make_ip_address() node = self.make_node(bios_boot_method='uefi') nic = node.get_boot_interface() mac = nic.mac_address node.boot_interface = nic node.boot_cluster_ip = local_ip node.save() mock_save = self.patch(Node, 'save') get_config(rack_controller.system_id, local_ip, remote_ip, mac=mac, bios_boot_method="uefi") self.assertThat(mock_save, MockNotCalled())
def test_open_for_update_uses_backend_as_context_manager(self): config_file = self.make_file() backend = self.patch(ExampleConfiguration, "backend") with ExampleConfiguration.open_for_update(config_file) as config: # The backend was opened using open_for_update() too. self.assertThat(backend.open_for_update, MockCalledOnceWith(config_file)) # The object returned from backend.open_for_update() has been used # as the context manager, providing `config`. backend_ctx = backend.open_for_update.return_value self.assertThat(config.store, Is(backend_ctx.__enter__.return_value)) # We're within the context, as expected. self.assertThat(backend_ctx.__exit__, MockNotCalled()) # The backend context has also been exited. self.assertThat(backend_ctx.__exit__, MockCalledOnceWith(None, None, None))
def test_logs_when_sending_event_errors(self): send_event = self.patch(tftp_module, "send_node_event_ip_address") send_event.side_effect = factory.make_exception() clock = Clock() log_request(sentinel.filename, clock) self.assertThat(send_event, MockNotCalled()) with TwistedLoggerFixture() as logger: clock.advance(0.0) self.assertDocTestMatches( """\ Logging TFTP request failed. Traceback (most recent call last): ... maastesting.factory.TestException#... """, logger.output, )
def test__power_off_already_off(self): driver = RedfishPowerDriver() context = make_context() url = driver.get_url(context) headers = driver.make_auth_headers(**context) node_id = b"1" mock_redfish_request = self.patch(driver, "redfish_request") mock_redfish_request.return_value = (SAMPLE_JSON_SYSTEMS, None) mock_set_pxe_boot = self.patch(driver, "set_pxe_boot") mock_power_query = self.patch(driver, "power_query") mock_power_query.return_value = "off" mock_power = self.patch(driver, "power") yield driver.power_off(node_id, context) self.assertThat(mock_set_pxe_boot, MockCalledOnceWith(url, node_id, headers)) self.assertThat(mock_power, MockNotCalled())
def test_update_authorisation_token_name_not_found(self): # If the provided token_key does not exist (for the currently # logged-in user), the api returns a 'Not Found' (404) error. mock_create_audit_event = self.patch( account_module, "create_audit_event" ) response = self.client.post( reverse("account_handler"), { "op": "update_token_name", "token": "no-such-token", "name": "test_name", }, ) self.assertEqual(http.client.NOT_FOUND, response.status_code) self.assertThat(mock_create_audit_event, MockNotCalled())
def test_status_erasure_failure_does_not_populate_tags(self): populate_tags_for_single_node = self.patch( api, "populate_tags_for_single_node") node = factory.make_Node( interface=True, status=NODE_STATUS.DISK_ERASING) payload = { 'event_type': 'finish', 'result': 'FAILURE', 'origin': 'curtin', 'name': 'cmd-erase', 'description': 'Erasing disk', 'timestamp': datetime.utcnow(), } self.processMessage(node, payload) self.assertEqual( NODE_STATUS.FAILED_DISK_ERASING, reload_object(node).status) self.assertThat(populate_tags_for_single_node, MockNotCalled())
def test_ignores_generate_directives_for_v6_dynamic_ranges(self): patch_dns_config_path(self) domain = factory.make_string() network = IPNetwork("192.168.0.1/22") dynamic_network = IPNetwork("%s/64" % factory.make_ipv6_address()) dns_zone_config = DNSReverseZoneConfig( domain, serial=random.randint(1, 100), network=network, dynamic_ranges=[ IPRange(dynamic_network.first, dynamic_network.last) ], ) get_generate_directives = self.patch(dns_zone_config, "get_GENERATE_directives") dns_zone_config.write_config() self.assertThat(get_generate_directives, MockNotCalled())
def test_queueMessages_handled_invalid_nodekey_with_instant_msg(self): worker = StatusWorkerService(sentinel.dbtasks) mock_processMessage = self.patch(worker, "_processMessage") contents = b'These are the contents of the file.' encoded_content = encode_as_base64(bz2.compress(contents)) message = self.make_message() message['files'] = [{ "path": "sample.txt", "encoding": "uuencode", "compression": "bzip2", "content": encoded_content }] nodes_with_tokens = yield deferToDatabase(self.make_nodes_with_tokens) node, token = nodes_with_tokens[0] yield deferToDatabase(token.delete) yield worker.queueMessage(token.key, message) self.assertThat(mock_processMessage, MockNotCalled())
def test__skips_those_that_have_not_expired(self): maaslog = self.patch(status_monitor.maaslog, 'info') self.useFixture(SignalsDisabled("power")) current_time = now() expired_time = current_time + timedelta(minutes=1) nodes = [ factory.make_Node(status=status, status_expires=expired_time) for status in NODE_FAILURE_MONITORED_STATUS_TRANSITIONS.keys() ] mark_nodes_failed_after_expiring(current_time, 20) failed_statuses = [ reload_object(node).status for node in nodes ] self.assertItemsEqual( NODE_FAILURE_MONITORED_STATUS_TRANSITIONS.keys(), failed_statuses) self.assertThat(maaslog, MockNotCalled())
def test_DELETE_force_not_required_for_pod_region_rack(self): self.become_admin() vlan = factory.make_VLAN() factory.make_Subnet(vlan=vlan) rack = factory.make_RegionRackController(vlan=vlan) ip = factory.make_StaticIPAddress( interface=rack.interface_set.first()) factory.make_Pod(ip_address=ip) mock_async_delete = self.patch(Pod, "async_delete") response = self.client.delete( self.get_rack_uri(rack), QUERY_STRING=urlencode({ 'force': 'true' }, doseq=True)) self.assertEqual( http.client.NO_CONTENT, response.status_code, explain_unexpected_response(http.client.NO_CONTENT, response)) self.assertThat(mock_async_delete, MockNotCalled())
def test_mark_nodes_failed_after_missing_timeout_heartbeat(self): node, script_set = self.make_node() current_time = now() node_timeout = Config.objects.get_config("node_timeout") script_set.last_ping = current_time - timedelta( minutes=(node_timeout + 1) ) script_set.save() script_results = [ factory.make_ScriptResult( script_set=script_set, status=SCRIPT_STATUS.PENDING ) for _ in range(3) ] mark_nodes_failed_after_missing_script_timeout( current_time, node_timeout ) node = reload_object(node) self.assertEquals(self.failed_status, node.status) self.assertEquals( "Node has not been heard from for the last %s minutes" % node_timeout, node.error_description, ) self.assertIn( call( "%s: Has not been heard from for the last %s minutes" % (node.hostname, node_timeout) ), self.maaslog.call_args_list, ) if node.enable_ssh: self.assertThat(self.mock_stop, MockNotCalled()) else: self.assertThat(self.mock_stop, MockCalledOnce()) self.assertIn( call("%s: Stopped because SSH is disabled" % node.hostname), self.maaslog.call_args_list, ) for script_result in script_results: self.assertEquals( SCRIPT_STATUS.TIMEDOUT, reload_object(script_result).status )
def test_wait_raises_exception_when_time_has_run_out(self): clock = self.patch(internet, "reactor", Clock()) sleep = self.patch(fs_module, "sleep") sleep.side_effect = clock.advance lock = self.make_lock() do_lock = self.patch(lock._fslock, "lock") do_unlock = self.patch(lock._fslock, "unlock") do_lock.return_value = False with ExpectedException(self.locktype.NotAvailable): with lock.wait(0.2): pass self.assertThat(do_lock, MockCallsMatch(call(), call(), call())) self.assertThat(sleep, MockCallsMatch(call(0.1), call(0.1))) self.assertThat(do_unlock, MockNotCalled())
def test_rejects_file_mode_with_high_bits_set(self): filename = random.choice(list(self.script.WRITABLE_FILES)) mode = random.randint(0o1000, 0o7777) # Inclusive of endpoints. args = self.script.arg_parser.parse_args([filename, oct(mode)]) with CaptureStandardIO() as stdio: error = self.assertRaises( SystemExit, self.script.main, args, io.BytesIO() ) self.assertThat(error.code, GreaterThan(0)) self.assertThat(self.script.atomic_write, MockNotCalled()) self.assertThat(stdio.getOutput(), Equals("")) self.assertThat( stdio.getError(), DocTestMatches( "usage: ... Given file mode 0o... is not permitted; " "only permission bits may be set." ), )
def test_rejects_file_name_not_on_allowed_list(self): filename = factory.make_name("/some/where", sep="/") mode = random.randint(0o000, 0o777) # Inclusive of endpoints. args = self.script.arg_parser.parse_args([filename, oct(mode)]) with CaptureStandardIO() as stdio: error = self.assertRaises( SystemExit, self.script.main, args, io.BytesIO() ) self.assertThat(error.code, GreaterThan(0)) self.assertThat(self.script.atomic_write, MockNotCalled()) self.assertThat(stdio.getOutput(), Equals("")) self.assertThat( stdio.getError(), DocTestMatches( "usage: ... Given filename ... is not in the " "allowed list. Choose from: ..." ), )
def test_pod_DELETE_delete_without_force(self): self.become_admin() vlan = factory.make_VLAN() factory.make_Subnet(vlan=vlan) rack = factory.make_RackController(vlan=vlan) ip = factory.make_StaticIPAddress(interface=rack.interface_set.first()) factory.make_Pod(ip_address=ip) vlan.dhcp_on = True vlan.primary_rack = rack vlan.save() mock_async_delete = self.patch(Pod, "async_delete") response = self.client.delete(self.get_rack_uri(rack)) self.assertEqual( http.client.BAD_REQUEST, response.status_code, explain_unexpected_response(http.client.BAD_REQUEST, response), ) self.assertThat(mock_async_delete, MockNotCalled())
def test_redfish_request_raises_error_on_response_code_above_400(self): driver = RedfishPowerDriver() context = make_context() url = driver.get_url(context) uri = join(url, b"redfish/v1/Systems") headers = driver.make_auth_headers(**context) mock_agent = self.patch(redfish_module, "Agent") mock_agent.return_value.request = Mock() expected_headers = Mock() expected_headers.code = HTTPStatus.BAD_REQUEST expected_headers.headers = "Testing Headers" mock_agent.return_value.request.return_value = succeed( expected_headers) mock_readBody = self.patch(redfish_module, "readBody") with ExpectedException(PowerActionError): yield driver.redfish_request(b"GET", uri, headers) self.assertThat(mock_readBody, MockNotCalled())
def test__sends_multicast_beacons_by_default(self): self.run_command() self.assertThat( self.protocol_mock, MockCalledOnceWith( ANY, debug=True, interface="::", port=0, process_incoming=True, interfaces=TEST_INTERFACES, ), ) self.assertThat(self.fake_protocol.send_beacon, MockNotCalled()) self.assertThat( self.fake_protocol.send_multicast_beacons, MockCalledOnceWith(TEST_INTERFACES, verbose=False), )
def test_is_silent_and_does_nothing_when_region_is_not_available(self): # Patch the logger in the clusterservice so no log messages are printed # because the tests run in debug mode. self.patch(common.log, "debug") self.useFixture(MAASRootFixture()) ntp = external.RackNTP() service = make_startable_RackExternalService( self, StubClusterClientService(), reactor, [("NTP", ntp)]) self.patch_autospec(ntp, "_tryUpdate") yield service.startService() self.addCleanup((yield service.stopService)) with TwistedLoggerFixture() as logger: yield service._tryUpdate() self.assertThat(logger.output, Equals("")) self.assertThat(ntp._tryUpdate, MockNotCalled())
def test__disable_disables_all_signals(self): manager = SignalsManager() signals = [self.make_Signal(), self.make_Signal()] for signal in signals: manager.add(signal) manager.disable() self.assertThat( signals, AllMatch( MatchesAll( AfterPreprocessing((lambda signal: signal.connect), MockNotCalled()), AfterPreprocessing( (lambda signal: signal.disconnect), MockCalledOnceWith(), ), )), )
def test_POST_create_returns_machine_with_matching_power_parameters(self): mock_create_machine = self.patch(machines_module, "create_machine") hostname = factory.make_name("hostname") architecture = make_usable_architecture(self) power_type = "ipmi" power_parameters = { "power_address": factory.make_ip_address(), "power_user": factory.make_name("power-user"), "power_pass": factory.make_name("power-pass"), "power_driver": "LAN_2_0", "mac_address": "", "power_boot_type": "auto", } machine = factory.make_Machine( hostname=hostname, status=NODE_STATUS.NEW, architecture="", power_type=power_type, power_parameters=power_parameters, ) # Simulate creating the MAAS IPMI user power_parameters["power_user"] = "******" power_parameters["power_pass"] = factory.make_name("power-pass") response = self.client.post( reverse("machines_handler"), { "hostname": "maas-enlistment", "architecture": architecture, "power_type": power_type, "mac_addresses": factory.make_mac_address(), "power_parameters": json.dumps(power_parameters), }, ) self.assertEqual(http.client.OK, response.status_code) machine = reload_object(machine) self.assertEqual(hostname, machine.hostname) self.assertEqual(architecture, machine.architecture) self.assertDictContainsSubset( machine.bmc.power_parameters, power_parameters ) self.assertThat(mock_create_machine, MockNotCalled()) self.assertEqual( machine.system_id, json_load_bytes(response.content)["system_id"] )
def test_registers_and_unregisters_listener(self): mock_listener = Mock() register = mock_listener.register = Mock() unregister = mock_listener.unregister = Mock() clock = Clock() run = self.patch(ActiveDiscoveryService, "run") service = ActiveDiscoveryService(clock, mock_listener) # Make sure the service doesn't actually do anything. service.startService() self.assertThat(service, MatchesStructure.byEquality( call=(run, (), {}), step=active_discovery.CHECK_INTERVAL, clock=clock)) self.assertThat(register, MockCalledOnceWith( 'config', service.refreshDiscoveryConfig)) self.assertThat(unregister, MockNotCalled()) service.stopService() self.assertThat(unregister, MockCalledOnceWith( 'config', service.refreshDiscoveryConfig))
def test_sends_event_later(self): send_event = self.patch(tftp_module, "send_node_event_ip_address") ip = factory.make_ip_address() self.patch(tftp_module.tftp, "get_remote_address").return_value = ( ip, sentinel.port, ) clock = Clock() log_request(sentinel.filename, clock) self.assertThat(send_event, MockNotCalled()) clock.advance(0.0) self.assertThat( send_event, MockCalledOnceWith( ip_address=ip, description=sentinel.filename, event_type=EVENT_TYPES.NODE_TFTP_REQUEST, ), )
def test_returns_false_when_starting_test_fails(self): self.mock_check_smart_support.return_value = (None, []) self.mock_run_smartctl_selftest.side_effect = random.choice( [ TimeoutExpired("smartctl", 60), CalledProcessError(42, "smartctl"), ] ) self.assertFalse( smartctl.execute_smartctl(self.blockdevice, self.test) ) self.assertThat( self.mock_run_smartctl_selftest, MockCalledOnceWith(self.blockdevice, self.test), ) self.assertThat(self.mock_wait_smartctl_selftest, MockNotCalled()) self.assertThat( self.mock_check_smartctl, MockCalledOnceWith(self.blockdevice) )
def test__populates_if_tag_is_defined(self): post_commit_do = self.patch(tag_module, "post_commit_do") tag = Tag(name=factory.make_name("tag"), definition="//foo") tag.save(populate=False) self.assertTrue(tag.is_defined) self.assertThat(post_commit_do, MockNotCalled()) tag._populate_nodes_later() self.assertThat( post_commit_do, MockCalledOnceWith( reactor.callLater, 0, deferToDatabase, populate_tags.populate_tags, tag, ), )
def test_probe_and_enlist_recs_probes_and_enlists_no_commission(self): user = factory.make_name('user') ip, port, username, password, node_id, context = self.make_context() domain = factory.make_name('domain') macs = [factory.make_mac_address() for _ in range(3)] mock_get_nodes = self.patch(RECSAPI, "get_nodes") mock_get_nodes.return_value = {node_id: {'macs': macs, 'arch': 'arm'}} self.patch(RECSAPI, "set_boot_source") mock_create_node = self.patch(recs_module, "create_node") mock_create_node.side_effect = asynchronous(lambda *args: node_id) mock_commission_node = self.patch(recs_module, "commission_node") yield deferToThread(probe_and_enlist_recs, user, ip, int(port), username, password, False, domain) self.expectThat( mock_create_node, MockCalledOnceWith(macs, 'armhf', 'recs_box', context, domain)) self.expectThat(mock_commission_node, MockNotCalled())
def test_mark_nodes_failed_after_builtin_commiss_script_overrun(self): user = factory.make_admin() node = factory.make_Node(status=NODE_STATUS.COMMISSIONING, owner=user) script_set = ScriptSet.objects.create_commissioning_script_set(node) node.current_commissioning_script_set = script_set node.save() now = datetime.now() script_set.last_ping = now script_set.save() pending_script_results = list(script_set.scriptresult_set.all()) passed_script_result = pending_script_results.pop() passed_script_result.status = SCRIPT_STATUS.PASSED passed_script_result.save() failed_script_result = pending_script_results.pop() failed_script_result.status = SCRIPT_STATUS.FAILED failed_script_result.save() running_script_result = pending_script_results.pop() running_script_result.status = SCRIPT_STATUS.RUNNING running_script_result.started = now - timedelta(minutes=10) running_script_result.save() mark_nodes_failed_after_missing_script_timeout() node = reload_object(node) self.assertEquals(NODE_STATUS.FAILED_COMMISSIONING, node.status) self.assertEquals( "%s has run past it's timeout(%s)" % (running_script_result.name, str(NODE_INFO_SCRIPTS[running_script_result.name]['timeout'])), node.error_description) if node.enable_ssh: self.assertThat(self.mock_stop, MockNotCalled()) else: self.assertThat(self.mock_stop, MockCalledOnce()) self.assertEquals(SCRIPT_STATUS.PASSED, reload_object(passed_script_result).status) self.assertEquals(SCRIPT_STATUS.FAILED, reload_object(failed_script_result).status) self.assertEquals(SCRIPT_STATUS.TIMEDOUT, reload_object(running_script_result).status) for script_result in pending_script_results: self.assertEquals(SCRIPT_STATUS.ABORTED, reload_object(script_result).status)
def test_run_scripts_from_metadata_doesnt_run_tests_on_commiss_fail(self): scripts_dir = self.useFixture(TempDirectory()).path fail_count = random.randint(1, 100) self.mock_run_scripts.return_value = fail_count index_json = self.make_index_json(scripts_dir) # Don't need to give the url, creds, or out_dir as we're not running # the scripts and sending the results. run_scripts_from_metadata(None, None, scripts_dir, None) self.assertThat( self.mock_run_scripts, MockCalledOnceWith( None, None, scripts_dir, None, index_json['commissioning_scripts'], True)) self.assertThat(self.mock_signal, MockNotCalled()) self.assertThat(self.mock_output_and_send, MockCalledOnceWith( '%s commissioning scripts failed to run' % fail_count, True, None, None, 'FAILED'))
def test_changing_status_of_node_emits_event(self): self.patch_autospec(power, "update_power_state_of_node_soon") old_status = NODE_STATUS.COMMISSIONING node = factory.make_Node(status=old_status, power_type="virsh") node.status = get_failed_status(old_status) with post_commit_hooks: node.save() # update_power_state_of_node_soon is registered as a post-commit # task, so it's not called immediately. self.expectThat(power.update_power_state_of_node_soon, MockNotCalled()) # One post-commit hooks have been fired, then it's called. post_commit_hooks.fire() self.assertThat( power.update_power_state_of_node_soon, MockCalledOnceWith(node.system_id), )