def test_get_cluster_valid(self, http_client): """ Get all clusters. This testcase checks if get_all_clusters returns a list of ModelList. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" linestring = open('json/get_cluster.json', 'r').read() mocked_response = linestring expected_dict_output = { 'cluster_name': u'test1', 'version': u'HDP-1.2.1' } http_client_mock.invoke.return_value = mocked_response, mocked_code, mocked_content client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
def test_get_cluster_services_valid(self, http_client): """ Get all services of a cluster. This testcase checks if get_all_services returns a list of ModelList. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_dict_output = { 'cluster_name': u'test1', 'version': u'HDP-1.2.1' } http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') serviceList = cluster.get_all_services() self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel") self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
def create_component(self, http_client_mock = MagicMock()): http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') host = cluster.get_host('myhost') component = host.get_host_component("DATANODE") return component
def test_exceptions(self, http_client): """ Test exceptions from ambari.client.core.errors """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') try: cluster.delete_host('deleted_nonexistant_cluster') self.fail('Exception should have been thrown!') except BadRequest, ex: self.assertEquals( str(ex), 'exception: 400. Attempted to add unknown hosts to a cluster. These hosts have not been registered with the server: dev05' )
def create_cluster(self, http_client_mock=MagicMock()): http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) return client.get_cluster('test1')
def main(): """ This method has few examples on how to use the ambari_client api """ path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_client.log", level=logging.DEBUG ,filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin","admin",version=1) all_clusters = client.get_all_clusters() print all_clusters print all_clusters.to_json_dict() print"\n" cluster = client.get_cluster('test1') print cluster print"\n" # serviceList = cluster.get_all_services() # print serviceList # print"\n" # # # for service in serviceList: # print str(service.service_name)+" = "+str(service.state) # print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia.state print"\n" # cluster_ref = ganglia.clusterRef # print cluster_ref.cluster_name # print"\n" ganglia.stop()
def test_exceptions(self , http_client): """ Test exceptions from ambari.client.core.errors """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') try: cluster.delete_host('deleted_nonexistant_cluster') self.fail('Exception should have been thrown!') except BadRequest, ex: self.assertEquals(str(ex), 'exception: 400. Attempted to add unknown hosts to a cluster. These hosts have not been registered with the server: dev05')
def test_get_cluster_services_valid(self , http_client): """ Get all services of a cluster. This testcase checks if get_all_services returns a list of ModelList. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'} http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') serviceList = cluster.get_all_services() self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel") self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response")
def test_get_cluster_valid(self , http_client): """ Get all clusters. This testcase checks if get_all_clusters returns a list of ModelList. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" linestring = open('json/get_cluster.json', 'r').read() mocked_response = linestring expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'} http_client_mock.invoke.return_value = mocked_response , mocked_code , mocked_content client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel")
def test_get_cluster_service_valid(self, http_client): """ Get the service of a cluster This testcase checks if get_service returns a list of ServiceModel. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_dict_output = { 'cluster_name': u'test1', 'version': u'HDP-1.2.1' } http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') serviceList = cluster.get_all_services() ganglia = cluster.get_service("GANGLIA") self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel") self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response") self.assertEqual( str(ganglia.state), "STARTED", "The ganglia service state should be fetched as STARTED") self.assertEqual( ganglia.clusterRef.cluster_name, cluster.cluster_name, "The clusterRef value for service should be fetched ")
def test_get_cluster_service_valid(self , http_client): """ Get the service of a cluster This testcase checks if get_service returns a list of ServiceModel. """ http_client_mock = MagicMock() http_client.returned_obj = http_client_mock mocked_code = "200" mocked_content = "text/plain" expected_dict_output = {'cluster_name': u'test1', 'version': u'HDP-1.2.1'} http_client_mock.invoke.side_effect = http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') serviceList = cluster.get_all_services() ganglia = cluster.get_service("GANGLIA") self.assertEqual(cluster.cluster_name, "test1", "cluster_name should be test1 ") self.assertEqual(cluster.to_json_dict(), expected_dict_output, "to_json_dict should convert ClusterModel") self.assertEqual(len(serviceList), 3, "There should be a 3 services from the response") self.assertEqual(str(ganglia.state), "STARTED", "The ganglia service state should be fetched as STARTED") self.assertEqual(ganglia.clusterRef.cluster_name, cluster.cluster_name, "The clusterRef value for service should be fetched ")
def create_cluster(self, http_client_mock=MagicMock()): http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) return client.get_cluster("test1")
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print"\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print"\n" host1 = cluster.get_host('r01wn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print"\n" print "==================== host components ====================\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print"\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name metric_json = nn.get_metrics() print metric_json["metrics"]["cpu"] print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print"\n" print "==================== service components ====================\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print"\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print"\n" mr = cluster.get_service("MAPREDUCE") print mr print mr.to_json_dict() print"\n" mr_comp1 = mr.get_service_component('TASKTRACKER') print mr_comp1 print mr_comp1.to_json_dict() print mr_comp1.clusterRef.cluster_name metric_json = mr_comp1.get_metrics() print metric_json["metrics"]["cpu"] print"\n" ###################################### # configurations ###################################### hdfs_config = cluster.get_hdfs_site_config() print hdfs_config print hdfs_config.properties global_config = cluster.get_global_config() core_config = cluster.get_core_site_config() mapred_config = cluster.get_mapred_site_config() print global_config print core_config print mapred_config print global_config.clusterRef.cluster_name print core_config.clusterRef.cluster_name print mapred_config.clusterRef.cluster_name hdfs_config.properties["dfs.replication.max"] = 51 #print hdfs_config.properties hdfs_config1 = cluster.update_hdfs_site_config(hdfs_config) print hdfs_config1.properties
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print"\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print"\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print"\n" #host1 = cluster.get_host('r01wn01') host1 = cluster.get_host('r01hn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print"\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print"\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print"\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print"\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print"\n" s = client.get_config("1.3.0", "HDFS") print s print"\n" s = client.get_components("1.3.0", "HDFS") print s
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" # s = client.get_config("1.3.0", "HDFS") # print s # # s = client.get_components("1.3.0", "HDFS") # print s # # mycluster = client.create_cluster("test46", "HDP-1.3.0") # print mycluster # mycluster = client.get_cluster('test46') print mycluster print mycluster.to_json_dict() print"\n" # # services_list = ["HDFS", "MAPREDUCE", "NAGIOS", "GANGLIA"] # s2 = mycluster.create_services(services_list) # print s2 # # # propr_dict = {"dfs_name_dir":"/data/1/hadoop/hdfs/namenode,/data/2/hadoop/hdfs/namenode,/data/3/hadoop/hdfs/namenode,/data/4/hadoop/hdfs/namenode,/data/5/hadoop/hdfs/namenode,/data/6/hadoop/hdfs/namenode,/data/7/hadoop/hdfs/namenode,/data/8/hadoop/hdfs/namenode", "namenode_heapsize":"1024m", "namenode_opt_newsize":"200m", "fs_checkpoint_dir":"/data/1/hadoop/hdfs/namesecondary", "dfs_data_dir":"/data/1/hadoop/hdfs/data,/data/2/hadoop/hdfs/data,/data/3/hadoop/hdfs/data,/data/4/hadoop/hdfs/data,/data/5/hadoop/hdfs/data,/data/6/hadoop/hdfs/data,/data/7/hadoop/hdfs/data,/data/8/hadoop/hdfs/data,/data/9/hadoop/hdfs/data,/data/10/hadoop/hdfs/data", "dtnode_heapsize":"1024m", "dfs_datanode_failed_volume_tolerated":"0", "dfs_webhdfs_enabled":"true", "hadoop_heapsize":"1024", "datanode_du_reserved":"0", "fs_checkpoint_period":"21600", "fs_checkpoint_size":"67108864", "hdfs_log_dir_prefix":"/var/log/hadoop", "hadoop_pid_dir_prefix":"/var/run/hadoop", "namenode_opt_maxnewsize":"200m", "dfs_exclude":"dfs.exclude", "dfs_include":"dfs.include", "dfs_replication":"3", "dfs_block_local_path_access_user":"******", "dfs_datanode_data_dir_perm":"750", "security_enabled":"false", "namenode_formatted_mark_dir":"/var/run/hadoop/hdfs/namenode/formatted/", "hcat_conf_dir":"", "jtnode_opt_newsize":"200m", "jtnode_opt_maxnewsize":"200m", "jtnode_heapsize":"1024m", "mapred_local_dir":"/data/1/hadoop/mapred,/data/2/hadoop/mapred,/data/3/hadoop/mapred,/data/4/hadoop/mapred,/data/5/hadoop/mapred,/data/6/hadoop/mapred,/data/7/hadoop/mapred,/data/8/hadoop/mapred,/data/9/hadoop/mapred,/data/10/hadoop/mapred", "mapred_map_tasks_max":"4", "mapred_red_tasks_max":"2", "mapred_child_java_opts_sz":"768", "scheduler_name":"org.apache.hadoop.mapred.CapacityTaskScheduler", "mapred_cluster_map_mem_mb":"1536", "mapred_cluster_red_mem_mb":"2048", "mapred_cluster_max_map_mem_mb":"6144", "mapred_cluster_max_red_mem_mb":"4096", "mapred_job_map_mem_mb":"1536", "mapred_job_red_mem_mb":"2048", "io_sort_mb":"200", "io_sort_spill_percent":"0.9", "mapreduce_userlog_retainhours":"24", "maxtasks_per_job":"-1", "lzo_enabled":"true", "snappy_enabled":"true", "rca_enabled":"true", "mapred_system_dir":"/mapred/system", "mapred_hosts_exclude":"mapred.exclude", "mapred_hosts_include":"mapred.include", "mapred_jobstatus_dir":"file:////mapred/jobstatus", "nagios_web_login":"******", "nagios_web_password":"******", "nagios_contact":"*****@*****.**", "nagios_group":"nagios", "hbase_conf_dir":"/etc/hbase", "proxyuser_group":"users", "dfs_datanode_address":"50010", "dfs_datanode_http_address":"50075", "gpl_artifacts_download_url":"", "apache_artifacts_download_url":"", "ganglia_runtime_dir":"/var/run/ganglia/hdp", "java64_home":"/usr/jdk/jdk1.6.0_31", "run_dir":"/var/run/hadoop", "hadoop_conf_dir":"/etc/hadoop", "hdfs_user":"******", "mapred_user":"******", "hbase_user":"******", "hive_user":"******", "hcat_user":"******", "webhcat_user":"******", "oozie_user":"******", "zk_user":"******", "gmetad_user":"******", "gmond_user":"******", "nagios_user":"******", "smokeuser":"******", "user_group":"hadoop", "rrdcached_base_dir":"/var/lib/ganglia/rrds"} # print propr_dict # s3 = mycluster.add_config("global", "version1" , propr_dict) # print s3 # # s2 = mycluster.create_service_components("1.3.0", "HDFS") # print s2 # s2 = mycluster.create_service_components("1.3.0", "MAPREDUCE") # print s2 # s2 = mycluster.create_service_components("1.3.0", "GANGLIA") # print s2 # s2 = mycluster.create_service_components("1.3.0", "NAGIOS") # print s2 # # h_l = ['apspal44-83', 'apspal44-84', 'apspal44-85', 'apspal44-86', 'apspal44-87', 'apspal44-88', 'apspal44-89', 'r01hn01', 'r01wn01', 'r01wn02', 'r01wn03'] # print h_l # s3 = mycluster.create_hosts(h_l) # print s3 # print"\n" # # # 6) add hosts roles # host1 = mycluster.get_host('r01hn01') # print host1 # s4 = host1.assign_role("NAMENODE") # print s4 # print"\n" # # s4 = mycluster.install_all_services() # print s4 # print"\n" # s4 = mycluster.start_all_services(run_smoke_test=True) # print s4 # print"\n" # # s4 = mycluster.stop_all_services() # print s4 # print"\n" # s2 = mycluster.create_service("ZOOKEEPER") # print s2 # s2 = mycluster.create_service_components("1.3.0", "ZOOKEEPER") # print s2 # # host1 = mycluster.get_host('r01wn01') # print host1 # s4 = host1.assign_role("ZOOKEEPER_SERVER") # print s4 # host1 = mycluster.get_host('r01wn02') # print host1 # s4 = host1.assign_role("ZOOKEEPER_SERVER") # print s4 # host1 = mycluster.get_host('r01wn03') # print host1 # s4 = host1.assign_role("ZOOKEEPER_SERVER") # print s4 # host1 = mycluster.get_host('r01wn03') # print host1 # s4 = host1.assign_role("ZOOKEEPER_CLIENT") # print s4 zk = mycluster.get_service("ZOOKEEPER") print zk s = zk.stop() print s
def create_service(self, http_client_mock = MagicMock()): http_client_mock.invoke.side_effect = HttpClientInvoker.http_client_invoke_side_effects client = AmbariClient("localhost", 8080, "admin", "admin", version=1, client=http_client_mock) cluster = client.get_cluster('test1') service = cluster.get_service('GANGLIA') return service
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print "\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print "\n" host1 = cluster.get_host('r01wn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print "\n" print "==================== host components ====================\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print "\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name metric_json = nn.get_metrics() print metric_json["metrics"]["cpu"] print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print "\n" print "==================== service components ====================\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print "\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print "\n" mr = cluster.get_service("MAPREDUCE") print mr print mr.to_json_dict() print "\n" mr_comp1 = mr.get_service_component('TASKTRACKER') print mr_comp1 print mr_comp1.to_json_dict() print mr_comp1.clusterRef.cluster_name metric_json = mr_comp1.get_metrics() print metric_json["metrics"]["cpu"] print "\n" ###################################### # configurations ###################################### hdfs_config = cluster.get_hdfs_site_config() print hdfs_config print hdfs_config.properties global_config = cluster.get_global_config() core_config = cluster.get_core_site_config() mapred_config = cluster.get_mapred_site_config() print global_config print core_config print mapred_config print global_config.clusterRef.cluster_name print core_config.clusterRef.cluster_name print mapred_config.clusterRef.cluster_name hdfs_config.properties["dfs.replication.max"] = 51 #print hdfs_config.properties hdfs_config1 = cluster.update_hdfs_site_config(hdfs_config) print hdfs_config1.properties
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ############################### # cluster creation ############################### # 1) create cluster cluster = client.create_cluster("test33", "HDP-1.3.0") print cluster cluster = client.get_cluster("test33") print cluster print cluster.to_json_dict() print "\n" # 2) create services services_list = ["HDFS", "MAPREDUCE", "NAGIOS", "GANGLIA"] s2 = cluster.create_services(services_list) print s2 s2 = cluster.create_service("ZOOKEEPER") print s2 # 3) create global config s3 = cluster.add_config("global", "version1", {}) print s3 s3 = cluster.add_config("core-site", "version1", {}) print s3 s3 = cluster.add_config("hdfs-site", "version1", {}) print s3 s3 = cluster.add_config("mapred-site", "version1", {}) print s3 # s3 = cluster.add_config("hbase-site", "version1" , {}) # print s3 # s3 = cluster.add_config("oozie-site", "version1" , {}) # print s3 # s3 = cluster.add_config("hive-site", "version1" , {}) # print s3 # s3 = cluster.add_config("webhcat-site", "version1" , {}) # print s3 # hdfs_components = client.get_components("1.3.0", "HDFS") # print hdfs_components # mr_components = client.get_components("1.3.0", "MAPREDUCE") # print mr_components # ganglia_components = client.get_components("1.3.0", "GANGLIA") # print ganglia_components # nagios_components = client.get_components("1.3.0", "NAGIOS") # print nagios_components # 4) add service components s2 = cluster.create_service_components("1.3.0", "HDFS") print s2 s2 = cluster.create_service_components("1.3.0", "MAPREDUCE") print s2 s2 = cluster.create_service_components("1.3.0", "GANGLIA") print s2 s2 = cluster.create_service_components("1.3.0", "NAGIOS") print s2 all_hosts = client.get_all_hosts() h_l = [x.host_name for x in all_hosts] print h_l # 5) add hosts s3 = cluster.create_hosts(h_l) print s3 print "\n" # 6) add hosts roles host1 = cluster.get_host("r01wn01") print host1 s4 = host1.assign_role("NAMENODE") print s4 print "\n" # 7) add hosts roles s4 = cluster.start_all_services() print s4 print "\n" all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print "\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n"
def main(): path = os.getcwd() print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG, filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print "\n" ###################################### # High level ###################################### all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print "\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print "\n" ###################################### # going into a specific cluster ###################################### cluster = client.get_cluster('test46') print cluster print cluster.to_json_dict() print "\n" clusters_hosts = cluster.get_all_hosts() print clusters_hosts.to_json_dict() print clusters_hosts print "\n" #host1 = cluster.get_host('r01wn01') host1 = cluster.get_host('r01hn01') print host1 print host1.clusterRef.cluster_name print host1.to_json_dict() print "\n" host1_comp = host1.get_host_components() print host1_comp print host1_comp.to_json_dict() print "\n" nn = host1.get_host_component("NAMENODE") print nn print nn.to_json_dict() print nn.clusterRef.cluster_name print "\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print "\n" ganglia = cluster.get_service("GANGLIA") print ganglia print ganglia.to_json_dict() print "\n" ganglia_comps = ganglia.get_service_components() print ganglia_comps print ganglia_comps.to_json_dict() print "\n" ganglia_comp1 = ganglia.get_service_component('GANGLIA_MONITOR') print ganglia_comp1 print ganglia_comp1.to_json_dict() print ganglia_comp1.clusterRef.cluster_name print "\n" s = client.get_config("1.3.0", "HDFS") print s print "\n" s = client.get_components("1.3.0", "HDFS") print s
def main(): path = os.getcwd() ; print path sys.path.append(path) logging.basicConfig(filename="ambari_api.log", level=logging.DEBUG , filemode="w") logging.info("Program started") client = AmbariClient("localhost", 8080, "admin", "admin", version=1) print client.version print client.host_url print"\n" ############################### # cluster creation ############################### # 1) create cluster cluster = client.create_cluster("test33", "HDP-1.3.0") print cluster cluster = client.get_cluster('test33') print cluster print cluster.to_json_dict() print"\n" # 2) create services services_list = ["HDFS", "MAPREDUCE", "NAGIOS", "GANGLIA"] s2 = cluster.create_services(services_list) print s2 s2 = cluster.create_service("ZOOKEEPER") print s2 # 3) create global config s3 = cluster.add_config("global", "version1" , {}) print s3 s3 = cluster.add_config("core-site", "version1" , {}) print s3 s3 = cluster.add_config("hdfs-site", "version1" , {}) print s3 s3 = cluster.add_config("mapred-site", "version1" , {}) print s3 # s3 = cluster.add_config("hbase-site", "version1" , {}) # print s3 # s3 = cluster.add_config("oozie-site", "version1" , {}) # print s3 # s3 = cluster.add_config("hive-site", "version1" , {}) # print s3 # s3 = cluster.add_config("webhcat-site", "version1" , {}) # print s3 # hdfs_components = client.get_components("1.3.0", "HDFS") # print hdfs_components # mr_components = client.get_components("1.3.0", "MAPREDUCE") # print mr_components # ganglia_components = client.get_components("1.3.0", "GANGLIA") # print ganglia_components # nagios_components = client.get_components("1.3.0", "NAGIOS") # print nagios_components # 4) add service components s2 = cluster.create_service_components("1.3.0", "HDFS") print s2 s2 = cluster.create_service_components("1.3.0", "MAPREDUCE") print s2 s2 = cluster.create_service_components("1.3.0", "GANGLIA") print s2 s2 = cluster.create_service_components("1.3.0", "NAGIOS") print s2 all_hosts = client.get_all_hosts() h_l = [x.host_name for x in all_hosts] print h_l # 5) add hosts s3 = cluster.create_hosts(h_l) print s3 print"\n" # 6) add hosts roles host1 = cluster.get_host('r01wn01') print host1 s4 = host1.assign_role("NAMENODE") print s4 print"\n" # 7) add hosts roles s4 = cluster.start_all_services() print s4 print"\n" all_clusters = client.get_all_clusters() print all_clusters.to_json_dict() print all_clusters print"\n" all_hosts = client.get_all_hosts() print all_hosts print all_hosts.to_json_dict() print"\n" serviceList = cluster.get_all_services() print serviceList print serviceList.to_json_dict() print"\n"