def test_get_all_clusters_valid(self, http_client): """ Get all clusters. This testcase checks if get_all_clusters returns a list of ModelList. """ http_client_mock = MagicMock() http_client.return_value = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_output = { 'items': [{ 'cluster_name': u'test1', 'version': u'HDP-1.2.1' }] } linestring = open('json/get_all_clusters.json', 'r').read() mocked_response = linestring http_client_mock.invoke.return_value = mocked_response, mocked_code, mocked_content client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) all_clusters = client.get_all_clusters() self.assertEqual(len(all_clusters), 1, "There should be a cluster from the response") self.assertEqual(all_clusters.to_json_dict(), expected_output, "to_json_dict should convert ModelList")
def main(): """ This method has few examples on how to use the ambari_client api """ path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_client.log", level=logging.DEBUG ,filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin","admin",version=1) all_clusters = client.get_all_clusters() print all_clusters print all_clusters.to_json_dict() print"\n" cluster = client.get_cluster('test1') print cluster print"\n" # serviceList = cluster.get_all_services() # print serviceList # print"\n" # # # for service in serviceList: # print str(service.service_name)+" = "+str(service.state) # print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia.state print"\n" # cluster_ref = ganglia.clusterRef # print cluster_ref.cluster_name # print"\n" ganglia.stop()
def test_get_all_clusters_valid(self , http_client): """ Get all clusters. This testcase checks if get_all_clusters returns a list of ModelList. """ http_client_mock = MagicMock() http_client.return_value = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_output = {'items': [{'cluster_name': u'test1', 'version': u'HDP-1.2.1'}]} linestring = open('json/get_all_clusters.json', 'r').read() mocked_response = linestring http_client_mock.invoke.return_value = mocked_response , mocked_code , mocked_content client = AmbariClient("localhost", 8080, "admin", "admin", version=1 , client=http_client_mock) all_clusters = client.get_all_clusters() self.assertEqual(len(all_clusters), 1, "There should be a cluster from the response") self.assertEqual(all_clusters.to_json_dict(), expected_output, "to_json_dict should convert ModelList")
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ############################### # cluster creation ############################### # 1) create cluster cluster = client.create_cluster("test33", "HDP-1.3.0") print cluster cluster = client.get_cluster('test33') print cluster print cluster.to_json_dict() print"\n" # 2) create services services_list = ["HDFS", "MAPREDUCE", "NAGIOS", "GANGLIA"] s2 = cluster.create_services(services_list) print s2 s2 = cluster.create_service("ZOOKEEPER") print s2 # 3) create global config s3 = cluster.add_config("global", "version1" , {}) print s3 s3 = cluster.add_config("core-site", "version1" , {}) print s3 s3 = cluster.add_config("hdfs-site", "version1" , {}) print s3 s3 = cluster.add_config("mapred-site", "version1" , {}) print s3 # s3 = cluster.add_config("hbase-site", "version1" , {}) # print s3 # s3 = cluster.add_config("oozie-site", "version1" , {}) # print s3 # s3 = cluster.add_config("hive-site", "version1" , {}) # print s3 # s3 = cluster.add_config("webhcat-site", "version1" , {}) # print s3 # hdfs_components = client.get_components("1.3.0", "HDFS") # print hdfs_components # mr_components = client.get_components("1.3.0", "MAPREDUCE") # print mr_components # ganglia_components = client.get_components("1.3.0", "GANGLIA") # print ganglia_components # nagios_components = client.get_components("1.3.0", "NAGIOS") # print nagios_components # 4) add service components s2 = cluster.create_service_components("1.3.0", "HDFS") print s2 s2 = cluster.create_service_components("1.3.0", "MAPREDUCE") print s2 s2 = cluster.create_service_components("1.3.0", "GANGLIA") print s2 s2 = cluster.create_service_components("1.3.0", "NAGIOS") print s2 all_hosts = client.get_all_hosts() h_l = [x.host_name for x in all_hosts] print h_l # 5) add hosts s3 = cluster.create_hosts(h_l) print s3 print"\n" # 6) add hosts roles host1 = cluster.get_host('r01wn01') print host1 s4 = host1.assign_role("NAMENODE") print s4 print"\n" # 7) add hosts roles s4 = cluster.start_all_services() print s4 print"\n" all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print"\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n"
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ############################### # cluster creation ############################### # 1) create cluster cluster = client.create_cluster("test33", "HDP-1.3.0") print cluster cluster = client.get_cluster("test33") print cluster print cluster.to_json_dict() print "\n" # 2) create services services_list = ["HDFS", "MAPREDUCE", "NAGIOS", "GANGLIA"] s2 = cluster.create_services(services_list) print s2 s2 = cluster.create_service("ZOOKEEPER") print s2 # 3) create global config s3 = cluster.add_config("global", "version1", {}) print s3 s3 = cluster.add_config("core-site", "version1", {}) print s3 s3 = cluster.add_config("hdfs-site", "version1", {}) print s3 s3 = cluster.add_config("mapred-site", "version1", {}) print s3 # s3 = cluster.add_config("hbase-site", "version1" , {}) # print s3 # s3 = cluster.add_config("oozie-site", "version1" , {}) # print s3 # s3 = cluster.add_config("hive-site", "version1" , {}) # print s3 # s3 = cluster.add_config("webhcat-site", "version1" , {}) # print s3 # hdfs_components = client.get_components("1.3.0", "HDFS") # print hdfs_components # mr_components = client.get_components("1.3.0", "MAPREDUCE") # print mr_components # ganglia_components = client.get_components("1.3.0", "GANGLIA") # print ganglia_components # nagios_components = client.get_components("1.3.0", "NAGIOS") # print nagios_components # 4) add service components s2 = cluster.create_service_components("1.3.0", "HDFS") print s2 s2 = cluster.create_service_components("1.3.0", "MAPREDUCE") print s2 s2 = cluster.create_service_components("1.3.0", "GANGLIA") print s2 s2 = cluster.create_service_components("1.3.0", "NAGIOS") print s2 all_hosts = client.get_all_hosts() h_l = [x.host_name for x in all_hosts] print h_l # 5) add hosts s3 = cluster.create_hosts(h_l) print s3 print "\n" # 6) add hosts roles host1 = cluster.get_host("r01wn01") print host1 s4 = host1.assign_role("NAMENODE") print s4 print "\n" # 7) add hosts roles s4 = cluster.start_all_services() print s4 print "\n" all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print "\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n"
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print "\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print "\n" host1 = cluster.get_host('r01wn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print "\n" print "==================== host components ====================\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print "\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name metric_json = nn.get_metrics() print metric_json["metrics"]["cpu"] print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print "\n" print "==================== service components ====================\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print "\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print "\n" mr = cluster.get_service("MAPREDUCE") print mr print mr.to_json_dict() print "\n" mr_comp1 = mr.get_service_component('TASKTRACKER') print mr_comp1 print mr_comp1.to_json_dict() print mr_comp1.clusterRef.cluster_name metric_json = mr_comp1.get_metrics() print metric_json["metrics"]["cpu"] print "\n" ###################################### # configurations ###################################### hdfs_config = cluster.get_hdfs_site_config() print hdfs_config print hdfs_config.properties global_config = cluster.get_global_config() core_config = cluster.get_core_site_config() mapred_config = cluster.get_mapred_site_config() print global_config print core_config print mapred_config print global_config.clusterRef.cluster_name print core_config.clusterRef.cluster_name print mapred_config.clusterRef.cluster_name hdfs_config.properties["dfs.replication.max"] = 51 #print hdfs_config.properties hdfs_config1 = cluster.update_hdfs_site_config(hdfs_config) print hdfs_config1.properties
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print"\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print"\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print"\n" #host1 = cluster.get_host('r01wn01') host1 = cluster.get_host('r01hn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print"\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print"\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print"\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print"\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print"\n" s = client.get_config("1.3.0", "HDFS") print s print"\n" s = client.get_components("1.3.0", "HDFS") print s
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print"\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print"\n" host1 = cluster.get_host('r01wn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print"\n" print "==================== host components ====================\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print"\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name metric_json = nn.get_metrics() print metric_json["metrics"]["cpu"] print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print"\n" print "==================== service components ====================\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print"\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print"\n" mr = cluster.get_service("MAPREDUCE") print mr print mr.to_json_dict() print"\n" mr_comp1 = mr.get_service_component('TASKTRACKER') print mr_comp1 print mr_comp1.to_json_dict() print mr_comp1.clusterRef.cluster_name metric_json = mr_comp1.get_metrics() print metric_json["metrics"]["cpu"] print"\n" ###################################### # configurations ###################################### hdfs_config = cluster.get_hdfs_site_config() print hdfs_config print hdfs_config.properties global_config = cluster.get_global_config() core_config = cluster.get_core_site_config() mapred_config = cluster.get_mapred_site_config() print global_config print core_config print mapred_config print global_config.clusterRef.cluster_name print core_config.clusterRef.cluster_name print mapred_config.clusterRef.cluster_name hdfs_config.properties["dfs.replication.max"] = 51 #print hdfs_config.properties hdfs_config1 = cluster.update_hdfs_site_config(hdfs_config) print hdfs_config1.properties
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print "\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print "\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print "\n" #host1 = cluster.get_host('r01wn01') host1 = cluster.get_host('r01hn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print "\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print "\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print "\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print "\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print "\n" s = client.get_config("1.3.0", "HDFS") print s print "\n" s = client.get_components("1.3.0", "HDFS") print s