def discover_clusters_vmm(address, username, password, ret): """ Args: address (str): Address of the management server. username (str): Name of user of the management server. password (str): Password of user of the management server ret (DiscoverClustersV2Ret): Return proto to be populated. """ vmm_client = VmmClient(address=address, username=username, password=password) with vmm_client: log.debug("Connected to VMM %s", address) clusters = vmm_client.get_clusters() library_shares = vmm_client.get_library_shares() log.debug(library_shares) # TODO(ryan.hardin) Remove. log.debug(clusters) # TODO(ryan.hardin) Remove. cluster_collection = ret.cluster_inventory.cluster_collection_vec.add() cluster_collection.name = "HyperV" # TODO(ryan.hardin): Set ClusteringSoftware Type (i.e. kNutanix or kS2D). for cluster in clusters: cluster_pb = cluster_collection.cluster_vec.add() cluster_pb.id = cluster["name"] cluster_pb.name = cluster["name"] for storage in cluster["storage"]: logical_storage_pb = cluster_pb.storage_info_vec.add() logical_storage_pb.id = storage["path"] logical_storage_pb.name = storage["name"] logical_storage_pb.kind = logical_storage_pb.kHypervStorage for network in cluster["networks"]: logical_network_pb = cluster_pb.network_info_vec.add() logical_network_pb.id = network logical_network_pb.name = network for library_share in library_shares: library_share_pb = cluster_pb.library_shares.add() library_share_pb.name = library_share.get("name") library_share_pb.path = library_share.get("path") library_share_pb.server = library_share.get("server") # No return value since 'ret' is modified. log.debug(ret) # TODO(ryan.hardin) Remove. return None
def test_get_clusters(self): vmm_client = VmmClient(address=self.config["scvmm.address"], username=self.config["scvmm.username"], password=self.config["scvmm.password"]) with vmm_client: clusters = vmm_client.get_clusters(self.config["scvmm.cluster"]) self.assertIsInstance(clusters, list) self.assertEqual(len(clusters), 1) cluster = clusters[0] self.assertEqual(set(cluster.keys()), {"name", "type", "networks", "storage"}) self.assertEqual(cluster["type"], "HyperV") self.assertIsInstance(cluster["networks"], list) self.assertIsInstance(cluster["storage"], list) for storage in cluster["storage"]: self.assertIsInstance(storage, dict) self.assertEqual(set(storage.keys()), {"name", "path"}) self.assertIsInstance(storage["name"], basestring) self.assertIsInstance(storage["path"], basestring)
def _update_cluster_version_info_vmm(cluster_pb): """ See 'DiscoveryUtil.update_cluster_version_info' for info. """ mgmt_info = cluster_pb.cluster_management_server_info.vmm_info hyp_info = cluster_pb.cluster_hypervisor_info.hyperv_info vmm_client = VmmClient(address=mgmt_info.vmm_server, username=mgmt_info.vmm_user, password=mgmt_info.vmm_password) with vmm_client: cluster = vmm_client.get_clusters( cluster_name=mgmt_info.vmm_cluster_name)[0] mgmt_info.vmm_version = vmm_client.get_vmm_version() nodes = vmm_client.get_nodes(mgmt_info.vmm_cluster_name) hyp_info.version.extend(node["version"] for node in nodes) if cluster_pb.cluster_software_info.HasField("nutanix_info"): software_info = cluster_pb.cluster_software_info.nutanix_info cli = NutanixRestApiClient( software_info.prism_host, software_info.decrypt_field("prism_user"), software_info.decrypt_field("prism_password")) DiscoveryUtil._update_cluster_version_info_nos(cli, cluster_pb)
class TestVmmClient(unittest.TestCase): def setUp(self): self.vmm_client = VmmClient("fake_hostname", "fake_username", "fake_password") def test_init_set_host_defaults(self): vmm_client = VmmClient("fake_hostname", "fake_username", "fake_password") self.assertEqual(vmm_client.address, "fake_hostname") self.assertEqual(vmm_client.username, "fake_username") self.assertEqual(vmm_client.password, "fake_password") self.assertEqual(vmm_client.host_address, "fake_hostname") self.assertEqual(vmm_client.host_username, "fake_username") self.assertEqual(vmm_client.host_password, "fake_password") self.assertEqual(vmm_client.library_server_username, "fake_username") self.assertEqual(vmm_client.library_server_password, "fake_password") self.assertIsNone(vmm_client.library_server_share_path) self.assertIsNone(vmm_client.library_server_address) def test_init_set_host_override(self): vmm_client = VmmClient( "fake_hostname", "fake_username", "fake_password", host_address="fake_host_hostname", host_username="******", host_password="******", library_server_address="fake_library_server_hostname", library_server_username="******", library_server_password="******") self.assertEqual(vmm_client.address, "fake_hostname") self.assertEqual(vmm_client.username, "fake_username") self.assertEqual(vmm_client.password, "fake_password") self.assertEqual(vmm_client.host_address, "fake_host_hostname") self.assertEqual(vmm_client.host_username, "fake_host_username") self.assertEqual(vmm_client.host_password, "fake_host_password") self.assertEqual(vmm_client.library_server_address, "fake_library_server_hostname") self.assertEqual(vmm_client.library_server_username, "fake_library_server_username") self.assertEqual(vmm_client.library_server_password, "fake_library_server_password") self.assertIsNone(vmm_client.library_server_share_path) def test_init_library_server(self): vmm_client = VmmClient("fake_hostname", "fake_user", "fake_password", library_server_address="fake_library_server", library_server_share_path="fake_library_path") self.assertEqual(vmm_client.library_server_username, "fake_user") self.assertEqual(vmm_client.library_server_password, "fake_password") self.assertEqual(vmm_client.library_server_address, "fake_library_server") self.assertEqual(vmm_client.library_server_share_path, "fake_library_path") def test_is_nutanix_cvm_false(self): vm = {"name": "NOT-A-CVM"} self.assertFalse(VmmClient.is_nutanix_cvm(vm)) def test_is_nutanix_cvm_true(self): vm = {"name": "NTNX-12345678-A-CVM"} self.assertTrue(VmmClient.is_nutanix_cvm(vm)) def test_get_clusters(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_clusters() m_ps_client.execute.assert_called_once_with("Get-VmmHypervCluster", json_params="{}") def test_get_clusters_cluster_name_cluster_type(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_clusters(cluster_name="Fake Cluster", cluster_type="HyperV") m_ps_client.execute.assert_called_once_with( "Get-VmmHypervCluster", json_params="{\"name\": \"Fake Cluster\", \"type\": \"HyperV\"}") def test_get_library_shares(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_library_shares() m_ps_client.execute.assert_called_once_with("Get-VmmLibraryShares") def test_get_vms(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_vms(cluster_name="Fake Cluster") m_ps_client.execute.assert_called_once_with( "Get-VmmVM", cluster_name="Fake Cluster", json_params="[]", num_retries=10) def test_get_vms_matching_ids(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_vms(cluster_name="Fake Cluster", vm_input_list=[{ "id": "0" }, { "id": "1" }]) m_ps_client.execute.assert_called_once_with( "Get-VmmVM", cluster_name="Fake Cluster", json_params="[{\"id\": \"0\"}, {\"id\": \"1\"}]", num_retries=10) def test_refresh_vms(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.refresh_vms(cluster_name="Fake Cluster") m_ps_client.execute.assert_called_once_with( "Read-VmmVM", cluster_name="Fake Cluster", json_params="[]") def test_refresh_vms_matching_ids(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.refresh_vms(cluster_name="Fake Cluster", vm_input_list=[{ "id": "0" }, { "id": "1" }]) m_ps_client.execute.assert_called_once_with( "Read-VmmVM", cluster_name="Fake Cluster", json_params="[{\"id\": \"0\"}, {\"id\": \"1\"}]") def test_get_nodes(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_nodes(cluster_name="Fake Cluster") m_ps_client.execute.assert_called_once_with( "Get-VmmHypervClusterNode", cluster_name="Fake Cluster", json_params="[]", num_retries=10) def test_get_nodes_matching_ids(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.get_nodes(cluster_name="Fake Cluster", nodes=[{ "id": "0" }, { "id": "1" }]) m_ps_client.execute.assert_called_once_with( "Get-VmmHypervClusterNode", cluster_name="Fake Cluster", json_params="[{\"id\": \"0\"}, {\"id\": \"1\"}]", num_retries=10) def test_nodes_power_state(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.nodes_power_state(cluster_name="Fake Cluster") m_ps_client.execute_async.assert_called_once_with( "Set-VmmHypervClusterNodeShutdown", cluster_name="Fake Cluster", json_params="[]") def test_nodes_power_state_matching_ids(self): self.vmm_client.host_username = "******" self.vmm_client.host_password = "******" with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.nodes_power_state(cluster_name="Fake Cluster", nodes=[{ "id": "0" }, { "id": "1" }]) m_ps_client.execute_async.assert_called_once_with( "Set-VmmHypervClusterNodeShutdown", cluster_name="Fake Cluster", json_params="[{\"id\": \"0\", " "\"password\": \"fake_host_password\", " "\"username\": \"fake_host_username\"}, " "{\"id\": \"1\", " "\"password\": \"fake_host_password\", " "\"username\": \"fake_host_username\"}]") def test_vms_set_power_state_for_vms(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vms_set_power_state_for_vms( cluster_name="Fake Cluster", task_req_list=[{ "vm_id": "0", "power_state": "off" }, { "vm_id": "1", "power_state": "off" }]) m_ps_client.execute.assert_called_once_with( "Set-VmmVMPowerState", cluster_name="Fake Cluster", json_params="[{\"power_state\": \"off\", \"vm_id\": \"0\"}, " "{\"power_state\": \"off\", \"vm_id\": \"1\"}]") def test_vms_set_possible_owners_for_vms(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vms_set_possible_owners_for_vms( cluster_name="Fake Cluster", task_req_list=[{ "vm_id": "0", "possible_owners": ["0", "1"] }, { "vm_id": "1", "possible_owners": ["0", "1"] }]) m_ps_client.execute.assert_called_once_with( "Set-VmmVMPossibleOwners", cluster_name="Fake Cluster", json_params= "[{\"possible_owners\": [\"0\", \"1\"], \"vm_id\": \"0\"}, " "{\"possible_owners\": [\"0\", \"1\"], \"vm_id\": \"1\"}]") def test_vms_set_snapshot(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vms_set_snapshot(cluster_name="Fake Cluster", task_req_list=[{ "vm_id": "0", "name": "snapshot_0", "description": "Snapshot 0" }, { "vm_id": "1", "name": "snapshot_1", "description": "Snapshot 1" }]) m_ps_client.execute.assert_called_once_with( "Set-VmmVMSnapshot", cluster_name="Fake Cluster", json_params="[{\"description\": \"Snapshot 0\", " "\"name\": \"snapshot_0\", \"vm_id\": \"0\"}, " "{\"description\": \"Snapshot 1\", " "\"name\": \"snapshot_1\", \"vm_id\": \"1\"}]") def test_vm_get_job_status_vmm_tasks(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vm_get_job_status(task_id_list=[{ "task_type": "vmm", "task_id": "0" }, { "task_type": "vmm", "task_id": "1" }]) m_ps_client.execute.assert_called_once_with( "Get-Task", json_params="[{\"task_id\": \"0\", \"task_type\": \"vmm\"}, " "{\"task_id\": \"1\", \"task_type\": \"vmm\"}]", num_retries=10) def test_vm_get_job_status_ps_tasks(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: mock_ps_cmd_0 = mock.Mock() mock_ps_cmd_1 = mock.Mock() m_ps_client.poll.side_effect = [mock_ps_cmd_0, mock_ps_cmd_1] self.vmm_client.vm_get_job_status(task_id_list=[{ "task_type": "ps", "task_id": "0" }, { "task_type": "ps", "task_id": "1" }]) m_ps_client.poll.assert_has_calls([mock.call("0"), mock.call("1")]) mock_ps_cmd_0.as_ps_task.assert_called_once_with() mock_ps_cmd_1.as_ps_task.assert_called_once_with() def test_vm_get_job_status_unknown_task_type(self): with self.assertRaises(ValueError) as ar: self.vmm_client.vm_get_job_status(task_id_list=[{ "task_type": "arduous", "task_id": "0" }]) self.assertEqual("Unknown task type 'arduous'", str(ar.exception)) def test_vm_stop_job_vmm_tasks(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vm_stop_job(task_id_list=[{ "task_type": "vmm", "task_id": "0" }, { "task_type": "vmm", "task_id": "1" }]) m_ps_client.execute.assert_called_once_with( "Stop-Task", json_params="[{\"task_id\": \"0\", \"task_type\": \"vmm\"}, " "{\"task_id\": \"1\", \"task_type\": \"vmm\"}]") def test_vm_stop_job_ps_tasks(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: mock_ps_cmd_0 = mock.Mock() mock_ps_cmd_1 = mock.Mock() m_ps_client.poll.side_effect = [mock_ps_cmd_0, mock_ps_cmd_1] self.vmm_client.vm_stop_job(task_id_list=[{ "task_type": "ps", "task_id": "0" }, { "task_type": "ps", "task_id": "1" }]) m_ps_client.poll.assert_has_calls([mock.call("0"), mock.call("1")]) mock_ps_cmd_0.terminate.assert_called_once_with() mock_ps_cmd_1.terminate.assert_called_once_with() mock_ps_cmd_0.as_ps_task.assert_called_once_with() mock_ps_cmd_1.as_ps_task.assert_called_once_with() def test_vm_stop_job_unknown_task_type(self): with self.assertRaises(ValueError) as ar: self.vmm_client.vm_stop_job(task_id_list=[{ "task_type": "arduous", "task_id": "0" }]) self.assertEqual("Unknown task type 'arduous'", str(ar.exception)) def test_vms_delete_default(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vms_delete(cluster_name="Fake Cluster", vm_ids=["0", "1"]) m_ps_client.execute.assert_called_once_with( "Remove-VmmVM", cluster_name="Fake Cluster", json_params="{\"force_delete\": false, \"vm_ids\": [\"0\", \"1\"]}" ) def test_vms_delete_force_delete_true(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.vms_delete(cluster_name="Fake Cluster", vm_ids=["0", "1"], force_delete=True) m_ps_client.execute.assert_called_once_with( "Remove-VmmVM", cluster_name="Fake Cluster", json_params="{\"force_delete\": true, \"vm_ids\": [\"0\", \"1\"]}") def test_create_vm_template(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.create_vm_template("fake_cluster", "fake_vm_template", "host_id_0", "/fake/goldimages/path", "/fake/datastore/path", "fake_network") m_ps_client.execute.assert_called_once_with( "Install-VmmVMTemplate", cluster_name="fake_cluster", json_params=json.dumps( { "vm_name": "fake_vm_template", "vm_host_id": "host_id_0", "goldimage_disk_path": "/fake/goldimages/path", "vm_datastore_path": "/fake/datastore/path", "vmm_network_name": "fake_network", "vcpus": 1, "ram_mb": 1024 }, sort_keys=True)) def test_create_vm(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.create_vm("fake_cluster", "fake_vm_template", [{ "vm_name": "fake_vm_0", "node_id": "0" }, { "vm_name": "fake_vm_1", "node_id": "1" }], "/fake/datastore/path", None, None) m_ps_client.execute.assert_called_once_with( "New-VmmVM", cluster_name="fake_cluster", json_params=json.dumps( { "vm_template_name": "fake_vm_template", "vm_host_map": [{ "vm_name": "fake_vm_0", "node_id": "0" }, { "vm_name": "fake_vm_1", "node_id": "1" }], "vm_datastore_path": "/fake/datastore/path", "data_disks": None, "differencing_disks_path": None }, sort_keys=True)) def test_clone_vm(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.clone_vm("fake_cluster", "fake_vm_id_0", "fake_new_cloned_vm", "/fake/datastore/path") m_ps_client.execute.assert_called_once_with( "New-VmmVMClone", cluster_name="fake_cluster", json_params=json.dumps( { "base_vm_id": "fake_vm_id_0", "vm_name": "fake_new_cloned_vm", "vm_datastore_path": "/fake/datastore/path" }, sort_keys=True)) def test_upload_image(self): self.vmm_client.library_server_share_path = "/fake/library/share/path" self.vmm_client.library_server_address = "fake_library_server" with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.upload_image( ["/fake/goldimage/path/0", "/fake/goldimage/path/1"], "/fake/goldimage/target/directory", "fake_disk_name") m_ps_client.execute_async.assert_called_once_with( "Install-VmmDiskImage", overwriteFiles=False, json_params=json.dumps( { "vmm_library_server_share": "/fake/library/share/path", "vmm_library_server_user": "******", "vmm_library_server_password": "******", "vmm_library_server": "fake_library_server", "goldimage_disk_list": ["/fake/goldimage/path/0", "/fake/goldimage/path/1"], "goldimage_target_dir": "/fake/goldimage/target/directory", "disk_name": "fake_disk_name", "transfer_type": None }, sort_keys=True)) def test_convert_to_template(self): self.vmm_client.library_server_share_path = "/fake/library/share/path" with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.convert_to_template(cluster_name="Fake Cluster", target_dir="fake_target_dir", template_name="fake_template") m_ps_client.execute.assert_called_once_with( "ConvertTo-Template", cluster_name="Fake Cluster", json_params="{\"target_dir\": \"fake_target_dir\", " "\"template_name\": \"fake_template\", " "\"vmm_library_server_share\": \"/fake/library/share/path\"}") def test_migrate_vm(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.migrate_vm("fake_cluster", [{ "vm_name": "fake_vm_0", "node_id": "0" }, { "vm_name": "fake_vm_1", "node_id": "1" }], "/fake/datastore/path") m_ps_client.execute.assert_called_once_with( "Move-VmmVM", cluster_name="fake_cluster", json_params=json.dumps( { "vm_host_map": [{ "vm_name": "fake_vm_0", "node_id": "0" }, { "vm_name": "fake_vm_1", "node_id": "1" }], "vm_datastore_path": "/fake/datastore/path", }, sort_keys=True)) def test_migrate_vm_datastore(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.migrate_vm_datastore( "fake_cluster", [{ "vm_id": "fake_vm_id_0", "datastore_name": "fake_datastore_0" }, { "vm_id": "fake_vm_id_1", "datastore_name": "fake_datastore_1" }]) m_ps_client.execute.assert_called_once_with( "Move-VmmVMDatastore", cluster_name="fake_cluster", json_params=json.dumps( { "vm_datastore_map": [{ "vm_id": "fake_vm_id_0", "datastore_name": "fake_datastore_0" }, { "vm_id": "fake_vm_id_1", "datastore_name": "fake_datastore_1" }], }, sort_keys=True)) def test_clean_vmm(self): self.vmm_client.library_server_share_path = "/fake/library/share/path" with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.clean_vmm("fake_cluster", "fake_target_dir", "fake_datastore_path", "fake_vm_") m_ps_client.execute_async.assert_called_once_with( "Remove-ClusterVmmObjects", cluster_name="fake_cluster", json_params=json.dumps( { "vmm_library_server_share": "/fake/library/share/path", "target_dir": "fake_target_dir", "vm_datastore_path": "fake_datastore_path", "vm_name_prefix": "fake_vm_" }, sort_keys=True)) def test_clean_library_server(self): self.vmm_client.library_server_share_path = "/fake/library/share/path" self.vmm_client.library_server_address = "fake_library_server" with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.clean_library_server("/fake/target/directory", "fake_vm_") m_ps_client.execute_async.assert_called_once_with( "Remove-VmmDiskImages", json_params=json.dumps( { "vmm_library_server_share": "/fake/library/share/path", "vmm_library_server_user": "******", "vmm_library_server_password": "******", "vmm_library_server": "fake_library_server", "target_dir": "/fake/target/directory", "vm_name_prefix": "fake_vm_" }, sort_keys=True)) def test_update_library(self): with mock.patch.object(self.vmm_client, "ps_client") as m_ps_client: self.vmm_client.update_library("/fake/goldimages/path") m_ps_client.execute_async.assert_called_once_with( "Update-Library", json_params="{\"goldimage_disk_path\": \"/fake/goldimages/path\"}", num_retries=3)