def get(self, cluster_name): try: conn = utils.get_ec2_conn(self) (master_nodes, slave_nodes, zoo_nodes) = utils.get_existing_cluster(conn, cluster_name) services = [ "mesos", "ganglia", "ephemeral_hdfs", "pi", "pa", "gridftp", "spark" ] service_names = { "mesos": "Mesos", "ganglia": "Ganglia", "ephemeral_hdfs": "Ephemeral HDFS", "pa": "Adatao pAnalytics", "pi": "Adatao pInsights", "gridftp": "Grid FTP", "spark": "Spark (after adatao.connect)" } service_ports = { "mesos": 5050, "ganglia": 5080, "ephemeral_hdfs": 50070, "pa": 7911, "pi": 8890, "gridftp": 5000, "spark": 30001 } service_links = { "mesos": "http://" + master_nodes[0].public_dns_name + ":5050", "ganglia": "http://" + master_nodes[0].public_dns_name + ":5080/ganglia", "ephemeral_hdfs": "http://" + master_nodes[0].public_dns_name + ":50070", "pa": "http://" + master_nodes[0].public_dns_name + ":7911", "pi": "http://" + master_nodes[0].public_dns_name + ":8890", "gridftp": "", "spark": "http://" + master_nodes[0].public_dns_name + ":30001" } service_statuses = {} if len(master_nodes) > 0: dns = master_nodes[0].public_dns_name for service in services: port = service_ports[service] service_statuses[service] = utils.isOpen(dns, port) self.render('cluster.html', error_msg=None, cluster_name=cluster_name, master_nodes=master_nodes, slave_nodes=slave_nodes, services=services, service_names=service_names, service_statuses=service_statuses, service_links=service_links) except Exception as e: print >> stderr, (e) self.render('error.html', error_msg=str(e))
def wait_for_instances(conn, instances): while True: for i in instances: i.update() if len([i for i in instances if i.state == 'pending']) > 0: time.sleep(5) else: if len([i for i in instances if utils.isOpen(i.public_dns_name, 22) == False]) > 0: time.sleep(5) else: return
def get(self, cluster_name): try: conn = utils.get_ec2_conn(self) (master_nodes, slave_nodes, zoo_nodes) = utils.get_existing_cluster(conn, cluster_name) services = ["mesos", "shark", "ganglia", "ephemeral_hdfs", "persistent_hdfs", "hadoop_mapreduce"] service_names = { "mesos": "Mesos", "shark": "Shark", "ganglia": "Ganglia", "ephemeral_hdfs": "Ephemeral HDFS", "persistent_hdfs": "Persistent HDFS", "hadoop_mapreduce": "Hadoop MapReduce", } service_ports = { "mesos": 8080, "shark": 10000, "ganglia": 5080, "ephemeral_hdfs": 50070, "persistent_hdfs": 60070, "hadoop_mapreduce": 50030, } service_links = { "mesos": "http://" + master_nodes[0].public_dns_name + ":8080", "shark": "/sql_console?server=" + master_nodes[0].public_dns_name, "ganglia": "http://" + master_nodes[0].public_dns_name + ":5080/ganglia", "ephemeral_hdfs": "http://" + master_nodes[0].public_dns_name + ":50070", "persistent_hdfs": "http://" + master_nodes[0].public_dns_name + ":60070", "hadoop_mapreduce": "http://" + master_nodes[0].public_dns_name + ":50030", } service_statuses = {} if len(master_nodes) > 0: dns = master_nodes[0].public_dns_name for service in services: port = service_ports[service] service_statuses[service] = utils.isOpen(dns, port) if service == "shark" and service_statuses[service]: service_names[service] = "Shark (SQL Console)" self.render( "cluster.html", error_msg=None, cluster_name=cluster_name, master_nodes=master_nodes, slave_nodes=slave_nodes, services=services, service_names=service_names, service_statuses=service_statuses, service_links=service_links, ) except Exception as e: print >> stderr, (e) self.render("error.html", error_msg=str(e))
def wait_for_instances(conn, instances): while True: for i in instances: i.update() if len([i for i in instances if i.state == 'pending']) > 0: time.sleep(5) else: if len([ i for i in instances if utils.isOpen(i.public_dns_name, 22) == False ]) > 0: time.sleep(5) else: return
def get(self, cluster_name): try: conn = utils.get_ec2_conn(self) (master_nodes, slave_nodes, zoo_nodes) = utils.get_existing_cluster(conn, cluster_name) services = [ "mesos", "ganglia", "ephemeral_hdfs", "pi", "pa", "gridftp", "spark" ] service_names = { "mesos" : "Mesos", "ganglia" : "Ganglia", "ephemeral_hdfs" : "Ephemeral HDFS", "pa" : "Adatao pAnalytics", "pi" : "Adatao pInsights", "gridftp" : "Grid FTP", "spark" : "Spark (after adatao.connect)"} service_ports = { "mesos" : 5050, "ganglia" : 5080, "ephemeral_hdfs" : 50070, "pa" : 7911, "pi" : 8890, "gridftp" : 5000, "spark" : 30001} service_links = { "mesos" : "http://" + master_nodes[0].public_dns_name + ":5050", "ganglia" : "http://" + master_nodes[0].public_dns_name + ":5080/ganglia", "ephemeral_hdfs" : "http://" + master_nodes[0].public_dns_name + ":50070", "pa" : "http://" + master_nodes[0].public_dns_name + ":7911", "pi" : "http://" + master_nodes[0].public_dns_name + ":8890", "gridftp" : "", "spark" : "http://" + master_nodes[0].public_dns_name + ":30001"} service_statuses = {} if len(master_nodes) > 0: dns = master_nodes[0].public_dns_name for service in services: port = service_ports[service] service_statuses[service] = utils.isOpen(dns, port) self.render('cluster.html', error_msg=None, cluster_name=cluster_name, master_nodes=master_nodes, slave_nodes=slave_nodes, services=services, service_names=service_names, service_statuses=service_statuses, service_links=service_links) except Exception as e: print >> stderr, (e) self.render('error.html', error_msg=str(e))
def get(self, cluster_name): try: conn = utils.get_ec2_conn(self) (master_nodes, slave_nodes, zoo_nodes) = utils.get_existing_cluster(conn, cluster_name) services = ["mesos", "shark", "ganglia", "ephemeral_hdfs", "persistent_hdfs", "hadoop_mapreduce"] service_names = {"mesos" : "Mesos", "shark" : "Shark", "ganglia": "Ganglia", "ephemeral_hdfs": "Ephemeral HDFS", "persistent_hdfs": "Persistent HDFS", "hadoop_mapreduce": "Hadoop MapReduce"} service_ports = {"mesos" : 8080, "shark" : 10000, "ganglia": 5080, "ephemeral_hdfs": 50070, "persistent_hdfs": 60070, "hadoop_mapreduce": 50030} service_links = {"mesos" : "http://" + master_nodes[0].public_dns_name + ":8080", "shark": "/sql_console?server=" + master_nodes[0].public_dns_name, "ganglia": "http://" + master_nodes[0].public_dns_name + ":5080/ganglia", "ephemeral_hdfs": "http://" + master_nodes[0].public_dns_name + ":50070", "persistent_hdfs": "http://" + master_nodes[0].public_dns_name + ":60070", "hadoop_mapreduce": "http://" + master_nodes[0].public_dns_name + ":50030"} service_statuses = {} if len(master_nodes) > 0: dns = master_nodes[0].public_dns_name for service in services: port = service_ports[service] service_statuses[service] = utils.isOpen(dns, port) if service == "shark" and service_statuses[service]: service_names[service] = "Shark (SQL Console)" self.render('cluster.html', error_msg=None, cluster_name=cluster_name, master_nodes=master_nodes, slave_nodes=slave_nodes, services=services, service_names=service_names, service_statuses=service_statuses, service_links=service_links) except Exception as e: print >> stderr, (e) self.render('error.html', error_msg=str(e))