MIN_NUM_ARGS = 4 def printUsage(): print "python create_cluster.py <cluster_name> <key_pair> <config_file_path>" if (len(sys.argv) < MIN_NUM_ARGS): print "Wrong number of arguments" printUsage() exit(1) cluster_name = sys.argv[1] key_pair = sys.argv[2] config_file_path = sys.argv[3] user = raw_input('OpenStack User: '******'OpenStack Password: '******'project_name') project_id = json_parser.get('project_id') main_ip = json_parser.get('main_ip') connector = ConnectionGetter(user, key, project_name, project_id, main_ip) keystone_util = UtilKeystone(connector.keystone()) token_ref_id = keystone_util.getTokenRef(user, key, project_name).id sahara_util = UtilSahara(connector.sahara(token_ref_id)) cluster_id = sahara_util.createClusterHadoop(cluster_name, image_id, template_id, net_id,key_pair) print cluster_id
#--------------------------------- CONFIGURATIONS -------------------------------------- number_execs = int(sys.argv[1]) cluster_size = int(sys.argv[2]) config_file_path = sys.argv[3] output_file = sys.argv[4] user = raw_input('OpenStack User: '******'OpenStack Password: '******'Gmail User(without @gmail.com): ') gmail_password = getpass.getpass(prompt='Gmail Password: '******'main_ip') project_name = json_parser.get('project_name') project_id = json_parser.get('project_id') public_keypair_path = json_parser.get('public_keypair_path') private_keypair_path = json_parser.get('private_keypair_path') private_keypair_name = json_parser.get('private_keypair_name') input_file_path = json_parser.get('input_file_path') net_id = json_parser.get('net_id') image_id = json_parser.get('image_id') volume_id = json_parser.get('volume_id') mapred_factors = json_parser.get('mapred_factor')
def printUsage(): print "python create_cluster.py <cluster_name> <config_file_path>" if (len(sys.argv) < MIN_NUM_ARGS): print "Wrong number of arguments" printUsage() exit(1) cluster_name = sys.argv[1] config_file_path = sys.argv[2] user = raw_input('OpenStack User: '******'OpenStack Password: '******'project_name') project_id = json_parser.get('project_id') main_ip = json_parser.get('main_ip') image_id = json_parser.get('image_id') net_id = json_parser.get('net_id') template_id = json_parser.get('cluster_template_id') key_pair = json_parser.get('keypair_name') public_keypair_path = json_parser.get('public_keypair_path') private_keypair_path = json_parser.get('private_keypair_path') connector = ConnectionGetter(user, key, project_name, project_id, main_ip) keystone_util = UtilKeystone(connector.keystone()) token_ref_id = keystone_util.getTokenRef(user, key, project_name).id sahara_util = UtilSahara(connector.sahara(token_ref_id)) cluster_id = sahara_util.createClusterHadoop(cluster_name, image_id, template_id, net_id,key_pair)
call(command,shell=True) print "Success! File is now at HDFS of cluster!" if (len(sys.argv) < MIN_NUM_ARGS): print "Wrong number of arguments: ", len(sys.argv) printUsage() exit(1) clusterId = sys.argv[1] config_file_path = sys.argv[2] user = raw_input('OpenStack User: '******'OpenStack Password: '******'exec_local_path') publicKeyPath = json_parser.get('public_key_path') keypairPath = json_parser.get('private_keypair_path') project_name = json_parser.get('project_name') project_id = json_parser.get('project_id') main_ip = json_parser.get('main_ip') input_file_path = json_parser.get('input_file_path') connector = ConnectionGetter(user, key, project_name, project_id, main_ip) keystone_util = UtilKeystone(connector.keystone()) token_ref_id = keystone_util.getTokenRef(user, key, project_name).id sahara_util = UtilSahara(connector.sahara(token_ref_id)) instancesIps = sahara_util.get_instances_ips(clusterId) masterIp = sahara_util.get_master_ip(clusterId)
exit(1) #------------ CONFIGURATIONS ----------------- number_execs = int(sys.argv[1]) mapper_exec_cmd = sys.argv[2] reducer_exec_cmd = sys.argv[3] mapred_reduce_tasks = sys.argv[4] config_file_path = sys.argv[5] output_file = sys.argv[6] user = raw_input('OpenStack User: '******'OpenStack Password: '******'main_ip') project_name = json_parser.get('project_name') project_id = json_parser.get('project_id') output_container_name = json_parser.get('output_container_name') job_template_id = json_parser.get('job_template_id') exec_local_path = json_parser.get('exec_local_path') public_keypair_path = json_parser.get('public_keypair_path') private_keypair_path = json_parser.get('private_keypair_path') private_keypair_name = json_parser.get('private_keypair_name') input_file_path = json_parser.get('input_file_path') input_ds_id = json_parser.get('input_ds_id') net_id = json_parser.get('net_id')
#------------ CONFIGURATIONS ----------------- number_execs = int(sys.argv[1]) job_template_id = sys.argv[2] input_ds_id = sys.argv[3] mapper_exec_cmd = sys.argv[4] reducer_exec_cmd = sys.argv[5] mapred_reduce_tasks = sys.argv[6] config_file_path = sys.argv[7] output_file = sys.argv[8] user = raw_input('OpenStack User: '******'OpenStack Password: '******'main_ip') project_name = json_parser.get('project_name') project_id = json_parser.get('project_id') output_container_name = json_parser.get('output_container_name') exec_local_path = json_parser.get('exec_local_path') public_keypair_path = json_parser.get('public_keypair_path') private_keypair_path = json_parser.get('private_keypair_path') private_keypair_name = json_parser.get('private_keypair_name') net_id = json_parser.get('net_id') image_id = json_parser.get('image_id')