cluster_name = sys.argv[1] config_file_path = sys.argv[2] user = raw_input('OpenStack User: '******'OpenStack Password: '******'project_name') project_id = json_parser.get('project_id') main_ip = json_parser.get('main_ip') image_id = json_parser.get('image_id') net_id = json_parser.get('net_id') template_id = json_parser.get('cluster_template_id') key_pair = json_parser.get('keypair_name') public_keypair_path = json_parser.get('public_keypair_path') private_keypair_path = json_parser.get('private_keypair_path') connector = ConnectionGetter(user, key, project_name, project_id, main_ip) keystone_util = UtilKeystone(connector.keystone()) token_ref_id = keystone_util.getTokenRef(user, key, project_name).id sahara_util = UtilSahara(connector.sahara(token_ref_id)) cluster_id = sahara_util.createClusterHadoop(cluster_name, image_id, template_id, net_id,key_pair) #cluster_id = "e5b33c1a-a6af-430d-bc0d-531c982f8017" print cluster_id instancesIps = sahara_util.get_instances_ips(cluster_id) configureInstances(instancesIps,public_keypair_path,private_keypair_path) master_ip = sahara_util.get_master_ip(cluster_id)
printUsage() exit(1) clusterId = sys.argv[1] config_file_path = sys.argv[2] user = raw_input('OpenStack User: '******'OpenStack Password: '******'exec_local_path') publicKeyPath = json_parser.get('public_key_path') keypairPath = json_parser.get('private_keypair_path') project_name = json_parser.get('project_name') project_id = json_parser.get('project_id') main_ip = json_parser.get('main_ip') input_file_path = json_parser.get('input_file_path') connector = ConnectionGetter(user, key, project_name, project_id, main_ip) keystone_util = UtilKeystone(connector.keystone()) token_ref_id = keystone_util.getTokenRef(user, key, project_name).id sahara_util = UtilSahara(connector.sahara(token_ref_id)) instancesIps = sahara_util.get_instances_ips(clusterId) masterIp = sahara_util.get_master_ip(clusterId) #configureInstances(instancesIps) #copyFileToInstances(execLocalPath,instancesIps) putFileInHDFS(input_file_path, masterIp)