def test_power_issues_power_reset(self): driver = RedfishPowerDriver() context = make_context() power_change = factory.make_name('power_change') url = driver.get_url(context) headers = driver.make_auth_headers(**context) node_id = context.get('node_id').encode('utf-8') mock_file_body_producer = self.patch( redfish_module, 'FileBodyProducer') payload = FileBodyProducer( BytesIO( json.dumps( { 'ResetType': "%s" % power_change }).encode('utf-8'))) mock_file_body_producer.return_value = payload mock_redfish_request = self.patch(driver, 'redfish_request') expected_uri = join( url, REDFISH_POWER_CONTROL_ENDPOINT % node_id) yield driver.power(power_change, url, node_id, headers) self.assertThat(mock_redfish_request, MockCalledOnceWith( b"POST", expected_uri, headers, payload))
def test_power_query_off(self): mock_webhook_request = self.patch(self.webhook, "_webhook_request") mock_webhook_request.return_value = succeed(b"{'status': 'stopped'}") system_id = factory.make_name("system_id") power_query_uri = factory.make_url() context = { "power_query_uri": power_query_uri, "power_off_regex": r"status.*\:.*stopped", } status = yield self.webhook.power_query(system_id, context) self.assertEqual("off", status) self.assertThat( mock_webhook_request, MockCalledOnceWith( b"GET", power_query_uri.encode(), self.webhook._make_auth_headers(system_id, context), False, ), )
def test__allows_setting_testing_scripts_to_none(self): node = factory.make_Node(status=NODE_STATUS.READY, power_state=POWER_STATE.OFF) mock_start_commissioning = self.patch_autospec(node, 'start_commissioning') user = factory.make_admin() form = CommissionForm(instance=node, user=user, data={ 'testing_scripts': 'none', }) self.assertTrue(form.is_valid(), form.errors) node = form.save() self.assertIsNotNone(node) self.assertThat( mock_start_commissioning, MockCalledOnceWith(user, enable_ssh=False, skip_networking=False, skip_storage=False, commissioning_scripts=[], testing_scripts=['none']))
def test__calls_start_commissioning_with_options(self): node = factory.make_Node(status=NODE_STATUS.READY, power_state=POWER_STATE.OFF) user = factory.make_admin() commissioning_scripts = [ factory.make_Script(script_type=SCRIPT_TYPE.COMMISSIONING).name for _ in range(3) ] testing_scripts = [ factory.make_Script(script_type=SCRIPT_TYPE.TESTING).name for _ in range(3) ] mock_start_commissioning = self.patch_autospec(node, 'start_commissioning') form = CommissionForm(instance=node, user=user, data={ 'enable_ssh': True, 'skip_networking': True, 'skip_storage': True, 'commissioning_scripts': ','.join(commissioning_scripts), 'testing_scripts': ','.join(testing_scripts), }) self.assertTrue(form.is_valid(), form.errors) node = form.save() self.assertIsNotNone(node) self.assertThat( mock_start_commissioning, MockCalledOnceWith(user, enable_ssh=True, skip_networking=True, skip_storage=True, commissioning_scripts=commissioning_scripts, testing_scripts=testing_scripts))
def test_put_upload_sends_content_type_and_length_headers(self): response = httplib2.Response({"status": 200}) mock_request = self.patch(boot_resources_create, "http_request") mock_request.return_value = (response, b"") action = self.make_boot_resources_create_action() self.patch(action, "sign") data = factory.make_bytes() action.put_upload("http://example.com", data) headers = { "Content-Type": "application/octet-stream", "Content-Length": "%s" % len(data), } self.assertThat( mock_request, MockCalledOnceWith( "http://example.com", "PUT", body=ANY, headers=headers, insecure=False, ), )
def test_amttool_query_state_queries_unknown(self): amt_power_driver = AMTPowerDriver() ip_address = factory.make_ipv4_address() power_pass = factory.make_name("power_pass") amt_power_driver.ip_address = factory.make_name("ip_address") _issue_amttool_command_mock = self.patch(amt_power_driver, "_issue_amttool_command") _issue_amttool_command_mock.return_value = AMTTOOL_OUTPUT % ( b"", b"error", ) self.assertRaises( PowerActionError, amt_power_driver.amttool_query_state, ip_address, power_pass, ) self.assertThat( _issue_amttool_command_mock, MockCalledOnceWith("info", ip_address, power_pass), )
def test_allowed_for_user_resource_ids(self): response = mock.MagicMock(status_code=200) response.json.return_value = {"admin": ["1", "2", "3"]} self.mock_request.return_value = response user = factory.make_name("user") self.assertEqual( {"admin": [1, 2, 3]}, self.client.allowed_for_user("maas", user, "admin"), ) self.assertThat( self.mock_request, MockCalledOnceWith( "GET", "https://rbac.example.com/api/" "service/v1/resources/maas/" "allowed-for-user?u={}&p=admin".format(user), auth=mock.ANY, cookies=mock.ANY, json=None, ), )
def test_issue_amttool_command_calls__run(self): amt_power_driver = AMTPowerDriver() ip_address = factory.make_ipv4_address() power_pass = factory.make_name("power_pass") amttool_boot_mode = factory.make_name("amttool_boot_mode") stdin = factory.make_name("stdin").encode("utf-8") cmd = choice(["power-cycle", "powerup"]) command = "amttool", ip_address, cmd, amttool_boot_mode _run_mock = self.patch(amt_power_driver, "_run") _run_mock.return_value = b"output" result = amt_power_driver._issue_amttool_command( cmd, ip_address, power_pass, amttool_boot_mode=amttool_boot_mode, stdin=stdin, ) self.expectThat(_run_mock, MockCalledOnceWith(command, power_pass, stdin=stdin)) self.expectThat(result, Equals(b"output"))
def test_import_ssh_keys_creates_keys_keysource_and_audit_event(self): protocol = random.choice( [KEYS_PROTOCOL_TYPE.LP, KEYS_PROTOCOL_TYPE.GH]) auth_id = factory.make_name('auth_id') ks = "%s:%s" % (protocol, auth_id) key_string = get_data('data/test_rsa0.pub') mock_get_protocol_keys = self.patch( keysource_module, 'get_protocol_keys') mock_get_protocol_keys.return_value = [key_string] response = self.client.post( reverse('sshkeys_handler'), data=dict(op='import', keysource=ks)) added_key = get_one(SSHKey.objects.filter(user=self.user)) self.assertEqual(key_string, added_key.key) self.assertEqual(ks, str(added_key.keysource)) self.assertEqual(http.client.OK, response.status_code, response) self.assertThat( mock_get_protocol_keys, MockCalledOnceWith(protocol, auth_id)) event = Event.objects.get(type__level=AUDIT) self.assertIsNotNone(event) self.assertEqual( event.description, "Imported SSH keys.")
def test_calls_change_power_state_later(self): self.patch_methods_using_rpc() system_id = factory.make_name('system_id') hostname = factory.make_name('hostname') power_driver = random.choice([ driver for _, driver in PowerDriverRegistry if driver.queryable ]) power_change = random.choice(['on', 'off', 'cycle']) context = { factory.make_name('context-key'): factory.make_name('context-val') } yield power.maybe_change_power_state( system_id, hostname, power_driver.name, power_change, context) self.assertThat( power.change_power_state, MockCalledOnceWith( system_id, hostname, power_driver.name, power_change, context, power.reactor))
def test_processMessages_calls_loseConnection_if_type_not_request(self): protocol, factory = self.make_protocol() protocol.user = maas_factory.make_User() mock_loseConnection = self.patch_autospec(protocol, "loseConnection") self.patch_autospec(protocol, "handleRequest").return_value = NOT_DONE_YET messages = [ { "type": MSG_TYPE.RESPONSE, "request_id": 1 }, { "type": MSG_TYPE.REQUEST, "request_id": 2 }, ] protocol.messages = deque(messages) self.expectThat([messages[0]], Equals(protocol.processMessages())) self.expectThat( mock_loseConnection, MockCalledOnceWith(STATUSES.PROTOCOL_ERROR, "Invalid message type."))
def test_POST_new_creates_tag(self): self.patch_autospec(Tag, "populate_nodes") self.become_admin() name = factory.make_string() definition = "//node" comment = factory.make_string() response = self.client.post( reverse("tags_handler"), { "name": name, "comment": comment, "definition": definition }, ) self.assertEqual(http.client.OK, response.status_code) parsed_result = json.loads( response.content.decode(settings.DEFAULT_CHARSET)) self.assertEqual(name, parsed_result["name"]) self.assertEqual(comment, parsed_result["comment"]) self.assertEqual(definition, parsed_result["definition"]) self.assertTrue(Tag.objects.filter(name=name).exists()) self.assertThat(Tag.populate_nodes, MockCalledOnceWith(ANY))
def test_Release_enters_disk_erasing(self): user = factory.make_User() params = dict( power_address=factory.make_ipv4_address(), power_user=factory.make_string(), power_pass=factory.make_string()) node = factory.make_Node( interface=True, status=self.actionable_status, power_type='ipmi', power_state=POWER_STATE.OFF, owner=user, power_parameters=params) old_status = node.status node_start = self.patch_autospec(node, '_start') node_start.return_value = None with post_commit_hooks: Release(node, user).execute(erase=True) self.expectThat(node.status, Equals(NODE_STATUS.DISK_ERASING)) self.assertThat( node_start, MockCalledOnceWith( user, user_data=ANY, old_status=old_status, allow_power_cycle=True))
def test_store_result_runs_builtin_commissioning_hooks(self): script_set = factory.make_ScriptSet( result_type=RESULT_TYPE.COMMISSIONING) script_result = factory.make_ScriptResult(script_set=script_set, status=SCRIPT_STATUS.RUNNING) exit_status = random.randint(0, 255) stdout = factory.make_name("stdout").encode() mock_hook = MagicMock() scriptresult_module.NODE_INFO_SCRIPTS[script_result.name] = { "hook": mock_hook } self.addCleanup(scriptresult_module.NODE_INFO_SCRIPTS.pop, script_result.name) script_result.store_result(exit_status, stdout=stdout) self.assertThat( mock_hook, MockCalledOnceWith(node=script_set.node, output=stdout, exit_status=exit_status), )
def test_rbacSync_syncs_on_full_change(self): _, resources = self.make_resource_pools() RBACSync.objects.clear("resource-pool") RBACSync.objects.clear("") RBACSync.objects.create(resource_type="", resource_name="", source="test") rbac_client = MagicMock() rbac_client.update_resources.return_value = "x-y-z" service = RegionControllerService(sentinel.listener) self.patch(service, "_getRBACClient").return_value = rbac_client self.assertEqual([], service._rbacSync()) self.assertThat( rbac_client.update_resources, MockCalledOnceWith("resource-pool", updates=resources), ) self.assertFalse(RBACSync.objects.exists()) last_sync = RBACLastSync.objects.get() self.assertEqual(last_sync.resource_type, "resource-pool") self.assertEqual(last_sync.sync_id, "x-y-z")
def test_calls_atomic_write(self): patch_popen(self) patch_sudo(self) patch_dev(self, False) path = os.path.join(self.make_dir(), factory.make_name("file")) contents = factory.make_bytes() sudo_write_file(path, contents) self.assertThat( fs_module.Popen, MockCalledOnceWith( [ "sudo", "-n", get_library_script_path("maas-write-file"), path, "0644", ], stdin=PIPE, ), )
def test__calls_change_power_state_with_timeout(self): self.patch_methods_using_rpc() defer_with_timeout = self.patch(power, 'deferWithTimeout') system_id = factory.make_name('system_id') hostname = factory.make_name('hostname') power_driver = random.choice( [driver for _, driver in PowerDriverRegistry if driver.queryable]) power_change = random.choice(['on', 'off', 'cycle']) context = { factory.make_name('context-key'): factory.make_name('context-val') } yield power.maybe_change_power_state(system_id, hostname, power_driver.name, power_change, context) self.assertThat( defer_with_timeout, MockCalledOnceWith(power.CHANGE_POWER_STATE_TIMEOUT, power.change_power_state, system_id, hostname, power_driver.name, power_change, context, power.reactor))
def test_powers_single_node(self): node = factory.make_Node() client = Mock() wait_for_reactor(self.power_func)( client, node.system_id, node.hostname, node.get_effective_power_info(), ) power_info = node.get_effective_power_info() self.assertThat( client, MockCalledOnceWith( self.command, system_id=node.system_id, hostname=node.hostname, power_type=power_info.power_type, context=power_info.power_parameters, ), )
def test_get_config_returns_config_and_calls_with_params(self): mock_get_config = self.patch_autospec(dhcp, 'get_config') mock_get_config.return_value = sentinel.config (omapi_key, failover_peers, shared_networks, hosts, interfaces, global_dhcp_snippets) = self.make_args() state = dhcp.DHCPState(omapi_key, failover_peers, shared_networks, hosts, interfaces, global_dhcp_snippets) server = Mock() self.assertEqual((sentinel.config, " ".join(state.interfaces)), state.get_config(server)) self.assertThat( mock_get_config, MockCalledOnceWith(server.template_basename, omapi_key=omapi_key, ipv6=ANY, failover_peers=state.failover_peers, shared_networks=state.shared_networks, hosts=sorted(state.hosts.values(), key=itemgetter("host")), global_dhcp_snippets=sorted( global_dhcp_snippets, key=itemgetter("name"))))
def test_reports_services_to_region(self): # Pretend we're in a production environment. self.patch(sms, "is_dev_environment").return_value = False protocol, connecting = self.patch_rpc_methods() self.addCleanup((yield connecting)) class ExampleService(AlwaysOnService): name = service_name = snap_service_name = ( factory.make_name("service")) service = ExampleService() # Inveigle this new service into the service monitor. self.addCleanup(service_monitor._services.pop, service.name) service_monitor._services[service.name] = service state = ServiceState(SERVICE_STATE.ON, "running") mock_ensureServices = self.patch(service_monitor, "ensureServices") mock_ensureServices.return_value = succeed({service.name: state}) client = getRegionClient() rpc_service = Mock() rpc_service.getClientNow.return_value = succeed(client) monitor_service = sms.ServiceMonitorService(rpc_service, Clock()) yield monitor_service.startService() yield monitor_service.stopService() expected_services = list(monitor_service.ALWAYS_RUNNING_SERVICES) expected_services.append({ "name": service.name, "status": "running", "status_info": "", }) self.assertThat( protocol.UpdateServices, MockCalledOnceWith(protocol, system_id=client.localIdent, services=expected_services))
def test_call_custom_timeout(self): conn, client = self.make_connection_and_client() timeout = random.randint(10, 20) self.patch_autospec(common, "deferWithTimeout") common.deferWithTimeout.return_value = sentinel.response response = client( sentinel.command, _timeout=timeout, foo=sentinel.foo, bar=sentinel.bar, ) self.assertThat(response, Is(sentinel.response)) self.assertThat( common.deferWithTimeout, MockCalledOnceWith( timeout, conn.callRemote, sentinel.command, foo=sentinel.foo, bar=sentinel.bar, ), )
def test_DELETE_calls_async_delete(self): pod = factory.make_Pod() for _ in range(3): factory.make_Machine( bmc=pod, creation_type=NODE_CREATION_TYPE.PRE_EXISTING ) for _ in range(3): factory.make_Machine( bmc=pod, creation_type=NODE_CREATION_TYPE.MANUAL ) for _ in range(3): factory.make_Machine( bmc=pod, creation_type=NODE_CREATION_TYPE.DYNAMIC ) mock_eventual = MagicMock() mock_async_delete = self.patch(Pod, "async_delete") mock_async_delete.return_value = mock_eventual response = self.client.delete(get_pod_uri(pod)) self.assertEqual( http.client.NO_CONTENT, response.status_code, response.content ) self.assertThat(mock_eventual.wait, MockCalledOnceWith(60 * 7))
def test_delete_deletes_bcache(self): mock_create_audit_event = self.patch(bcache_module, "create_audit_event") self.become_admin() node = factory.make_Node(status=NODE_STATUS.READY) bcache = factory.make_FilesystemGroup( node=node, group_type=FILESYSTEM_GROUP_TYPE.BCACHE) uri = get_bcache_device_uri(bcache) response = self.client.delete(uri) self.assertEqual(http.client.NO_CONTENT, response.status_code, response.content) self.assertIsNone(reload_object(bcache)) self.assertThat( mock_create_audit_event, MockCalledOnceWith( EVENT_TYPES.NODE, ENDPOINT.API, ANY, node.system_id, "Deleted bcache.", ), )
def test_create(self): mock_create_audit_event = self.patch(bcache_cacheset_module, "create_audit_event") self.become_admin() node = factory.make_Node(status=NODE_STATUS.READY) cache_device = factory.make_PhysicalBlockDevice(node=node) uri = get_bcache_cache_sets_uri(node) response = self.client.post(uri, {"cache_device": cache_device.id}) self.assertEqual(http.client.OK, response.status_code, response.content) parsed_device = json_load_bytes(response.content) self.assertEqual(cache_device.id, parsed_device["cache_device"]["id"]) self.assertThat( mock_create_audit_event, MockCalledOnceWith( EVENT_TYPES.NODE, ENDPOINT.API, ANY, node.system_id, "Created bcache cache set.", ), )
def test__calls_and_returns_correctly(self): pod = factory.make_Pod() client = Mock() client.return_value = succeed({ 'machine': sentinel.machine, 'hints': sentinel.hints, }) machine, hints = wait_for_reactor(compose_machine)( client, pod.power_type, pod.power_parameters, sentinel.request, pod.id, pod.name) self.assertThat( client, MockCalledOnceWith(ComposeMachine, type=pod.power_type, context=pod.power_parameters, request=sentinel.request, pod_id=pod.id, name=pod.name)) self.assertEqual(sentinel.machine, machine) self.assertEqual(sentinel.hints, hints)
def test_exits_gracefully_if_cant_report_foreign_dhcp_server(self): clock = Clock() interface_name = factory.make_name("eth") interfaces = { interface_name: { "enabled": True, "links": [{ "address": "10.0.0.1/24" }] } } maaslog = self.patch(dhcp_probe_service, 'maaslog') deferToThread = self.patch(dhcp_probe_service, 'deferToThread') deferToThread.side_effect = [ defer.succeed(interfaces), ] probe_interface = self.patch(dhcp_probe_service, 'probe_interface') probe_interface.return_value = ['192.168.0.100'] protocol, connecting = self.patch_rpc_methods() self.addCleanup((yield connecting)) del protocol._commandDispatch[ region.ReportForeignDHCPServer.commandName] rpc_service = Mock() rpc_service.getClientNow.return_value = defer.succeed( getRegionClient()) service = DHCPProbeService(rpc_service, clock) yield service.startService() yield service.stopService() self.assertThat( maaslog.error, MockCalledOnceWith( "Unable to inform region of DHCP server: the region " "does not yet support the ReportForeignDHCPServer RPC " "method."))
def test_create(self): """Tests Bcache device creation.""" mock_create_audit_event = self.patch(bcache_module, "create_audit_event") self.become_admin() node = factory.make_Node(status=NODE_STATUS.READY) backing_size = 10 * 1000**4 cache_set = factory.make_CacheSet(node=node) backing_device = factory.make_PhysicalBlockDevice(node=node, size=backing_size) uuid = str(uuid4()) uri = get_bcache_devices_uri(node) response = self.client.post( uri, { "name": "bcache0", "uuid": uuid, "cache_mode": CACHE_MODE_TYPE.WRITEBACK, "cache_set": cache_set.id, "backing_device": backing_device.id, }, ) self.assertEqual(http.client.OK, response.status_code, response.content) parsed_device = json_load_bytes(response.content) self.assertEqual(backing_size, parsed_device["virtual_device"]["size"]) self.assertItemsEqual("bcache0", parsed_device["name"]) self.assertItemsEqual(uuid, parsed_device["uuid"]) self.assertThat( mock_create_audit_event, MockCalledOnceWith( EVENT_TYPES.NODE, ENDPOINT.API, ANY, node.system_id, "Created bcache.", ), )
def test__calls_are_made_to_all_clusters(self): rpc_fixture = self.prepare_live_rpc() rack_controllers = [factory.make_RackController() for _ in range(3)] protocols = [] rack_creds = [] for rack in rack_controllers: tokens = list(get_auth_tokens(rack.owner)) if len(tokens) > 0: # Use the latest token. token = tokens[-1] else: token = create_auth_token(rack.owner) creds = convert_tuple_to_string(get_creds_tuple(token)) rack_creds.append(creds) protocol = rpc_fixture.makeCluster(rack, EvaluateTag) protocol.EvaluateTag.side_effect = always_succeed_with({}) protocols.append(protocol) tag = factory.make_Tag(populate=False) [d] = populate_tags(tag) # `d` is a testing-only convenience. We must wait for it to fire, and # we must do that from the reactor thread. wait_for_populate = asynchronous(lambda: d) wait_for_populate().wait(10) for rack, protocol, creds in zip(rack_controllers, protocols, rack_creds): self.expectThat( protocol.EvaluateTag, MockCalledOnceWith(protocol, tag_name=tag.name, tag_definition=tag.definition, system_id=rack.system_id, tag_nsmap=ANY, credentials=creds, nodes=ANY))
def test_class_start_testing_with_storage_param(self): node = factory.make_Node(status=NODE_STATUS.DEPLOYED, with_boot_disk=False, interface=True) bd = factory.make_PhysicalBlockDevice(node=node) user = factory.make_admin() script = factory.make_Script( script_type=SCRIPT_TYPE.TESTING, parameters={"storage": { "type": "storage" }}, ) mock_start_testing = self.patch_autospec(node, "start_testing") input = random.choice([ str(bd.id), bd.name, bd.model, bd.serial, "%s:%s" % (bd.model, bd.serial), ] + bd.tags) form = TestForm( instance=node, user=user, data={ "testing_scripts": script.name, "storage": input }, ) self.assertTrue(form.is_valid(), form.errors) node = form.save() self.assertIsNotNone(node) self.assertThat( mock_start_testing, MockCalledOnceWith(user, False, [script.name], {script.name: { "storage": input }}), )
def test__render_POST_queue_messages(self): status_worker = Mock() status_worker.queueMessage = Mock() status_worker.queueMessage.return_value = succeed(None) resource = StatusHandlerResource(status_worker) message = { 'event_type': (factory.make_name('type') + '/' + factory.make_name('sub_type')), 'origin': factory.make_name('origin'), 'name': factory.make_name('name'), 'description': factory.make_name('description'), } token = factory.make_name('token') request = self.make_request( content=json.dumps(message).encode('ascii'), token=token) output = resource.render_POST(request) self.assertEquals(NOT_DONE_YET, output) self.assertEquals(204, request.responseCode) self.assertThat(status_worker.queueMessage, MockCalledOnceWith(token, message))