def get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) : "Looking for study root" from cloudflu.preferences import get a_proxy_address = get( 'common.proxy.proxy_address' ) a_proxy_port = get( 'common.proxy.proxy_port' ) a_proxy_user = get( 'common.proxy.proxy_user' ) a_proxy_pass = get( 'common.proxy.proxy_pass' ) import boto a_s3_conn = boto.connect_s3( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, proxy = a_proxy_address, proxy_port = a_proxy_port, proxy_user = a_proxy_user, proxy_pass = a_proxy_pass ) an_id = a_s3_conn.get_canonical_user_id() import hashlib a_bucket_name = hashlib.md5( an_id ).hexdigest() a_bucket = None try : a_bucket = a_s3_conn.get_bucket( a_bucket_name ) except : a_bucket = a_s3_conn.create_bucket( a_bucket_name ) pass return TRootObject( a_s3_conn, a_bucket, an_id )
def region_connect( the_cluster_location, the_aws_access_key_id, the_aws_secret_access_key ) : import boto.ec2 from cloudflu.preferences import get a_proxy_address = get( 'common.proxy.proxy_address' ) a_proxy_port = get( 'common.proxy.proxy_port' ) a_proxy_user = get( 'common.proxy.proxy_user' ) a_proxy_pass = get( 'common.proxy.proxy_pass' ) a_regions = boto.ec2.regions( proxy = a_proxy_address, proxy_port = a_proxy_port, proxy_user = a_proxy_user, proxy_pass = a_proxy_pass ) print_d( 'a_regions = %s\n' % [ str( a_region.name ) for a_region in a_regions ] ) an_image_region = None for a_region in a_regions : if a_region.name == the_cluster_location : an_image_region = a_region pass pass if an_image_region == None : print_e( "There no region with such location - '%s'\n" % an_image_region ) pass print_d( 'an_image_region = < %r >\n' % an_image_region ) an_ec2_conn = an_image_region.connect( aws_access_key_id = the_aws_access_key_id, aws_secret_access_key = the_aws_secret_access_key, proxy = a_proxy_address, proxy_port = a_proxy_port, proxy_user = a_proxy_user, proxy_pass = a_proxy_pass ) print_d( 'an_ec2_conn = < %r >\n' % an_ec2_conn ) return an_ec2_conn
def connect( the_password, the_identity_file, the_host_port, the_login_name, the_host_name, the_command = None ) : import paramiko a_ssh_client = paramiko.SSHClient() a_ssh_client.set_missing_host_key_policy( paramiko.AutoAddPolicy() ) a_ssh_connect = None if the_password != None : a_ssh_connect = lambda : a_ssh_client.connect( hostname = the_host_name, port = the_host_port, username = the_login_name, password = the_password ) else : a_rsa_key = paramiko.RSAKey( filename = the_identity_file ) a_ssh_connect = lambda : a_ssh_client.connect( hostname = the_host_name, port = the_host_port, username = the_login_name, pkey = a_rsa_key ) pass if the_command == None : from cloudflu.preferences import get the_command = get( 'common.ssh.command' ) pass wait( a_ssh_connect, a_ssh_client, the_command ) return a_ssh_client
def entry_point( the_root_object, the_study_name = None, the_booked = False ) : a_spent_time = Timer() if the_study_name == None : import uuid; the_study_name = 'tmp-' + str( uuid.uuid4() ) pass print_d( "the_study_name = '%s'\n" % the_study_name ) a_study_object = None if the_booked == True : a_study_object = TStudyObject.get( the_root_object, the_study_name ) else: from cloudflu.preferences import get a_location = get( 'amazon.data_transfer.location' ) a_study_object = TStudyObject.create( the_root_object, a_location, the_study_name ) pass print_d( "a_study_object = %s\n" % a_study_object ) print_d( "a_spent_time = %s, sec\n" % a_spent_time ) return a_study_object
def main() : #----------------------- Defining utility command-line interface ------------------------- import instance_extract_options an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() an_usage_description += instance_extract_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ instance_extract_options.add( an_option_parser ) ec2.use.options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) an_instance_ord = instance_extract_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ), instance_extract_options.compose( an_instance_ord ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) an_instance = a_reservation.instances[ an_instance_ord ] print_d( "an_instance = %s\n" % an_instance ) an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name ) print_d( "an_identity_file = '%s'\n" % an_identity_file ) import os.path if not os.path.exists( an_identity_file ) : print_d( "\n-------------------- There are no such identity files ---------------------\n" ) import sys, os; sys.exit( os.EX_UNAVAILABLE ) pass print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) a_password = None a_host_name = an_instance.public_dns_name common.ssh.options.track( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) common.ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- an_usage_description = "%prog" from reservation_rm_options import usage_description as usage_description_options an_usage_description += usage_description_options() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.use.options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) from reservation_rm_options import extract as extract_options a_cluster_ids = extract_options( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] from reservation_rm_options import compose as compose_options a_call = "%s %s" % ( an_engine, compose_options( a_cluster_ids ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_worker_pool = WorkerPool( len( a_cluster_ids ) ) for a_cluster_id in a_cluster_ids : a_worker_pool.charge( execute, ( an_ec2_conn, a_cluster_id ) ) pass a_worker_pool.shutdown() a_worker_pool.join() print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) # There are no - it is a terminal step print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- an_usage_description = "%prog" an_usage_description += ec2.ami.run_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.ami.run_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) an_instance_type, an_image_id, a_number_nodes = ec2.ami.run_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = get( 'amazon.cluster.host_port' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s" % ( an_engine, ec2.ami.run_options.compose( an_instance_type, an_image_id, a_number_nodes ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() a_reservation, an_identity_file = ec2.run.run_reservation( an_image_id, a_cluster_location, an_instance_type, a_number_nodes, a_number_nodes, a_host_port, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) a_cluster_location = a_reservation.region.name a_cluster_id = a_reservation.id ec2.use.options.track( a_cluster_id ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- an_usage_description = "%prog" from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s" % ( an_engine ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() import boto.ec2 an_ec2_conn = boto.ec2.connect_to_region( a_cluster_location, aws_access_key_id = AWS_ACCESS_KEY_ID, aws_secret_access_key = AWS_SECRET_ACCESS_KEY ) for a_reservation in an_ec2_conn.get_all_instances() : an_instance = a_reservation.instances[ 0 ] a_status = an_instance.update() if a_status == 'terminated' or a_status == 'shutting-down' : continue print a_reservation.id pass print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- from cloudflu import amazon an_usage_description = "%prog --precision=10 --start-size=65536 --solution-window=50 --number-mesurements=5" from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ an_option_parser.add_option( "--precision", metavar = "< algorithm precision, % >", type = "int", action = "store", dest = "precision", help = "(%default, by default)", default = 10 ) an_option_parser.add_option( "--start-size", metavar = "< start value for the search algorithm, bytes >", type = "int", action = "store", dest = "start_size", help = "(%default, by default)", default = 65536 ) an_option_parser.add_option( "--solution-window", metavar = "< initial solution window considered to, %>", type = "int", action = "store", dest = "solution_window", help = "(%default, by default)", default = 50 ) an_option_parser.add_option( "--number-mesurements", metavar = "< number mesurements to be done in the solution window>", type = "int", action = "store", dest = "number_mesurements", help = "(%default, by default)", default = 5 ) amazon.security_options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_precision = an_options.precision a_center_x = an_options.start_size a_region_x = an_options.solution_window a_nb_attempts = an_options.number_mesurements from cloudflu.preferences import get a_data_location = get( 'amazon.data_transfer.location' ) an_engine = SeedSizeMesurement( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, a_data_location ) from cloudflu.common import Timer a_spent_time = Timer() an_optimize_x, a_cost = entry_point( an_engine, a_center_x, a_region_x, a_precision, a_nb_attempts, get_stats ) from preferences import resource_filename; an_rcfilename = resource_filename() import time; an_rcfilename_save = '%s_%s' % ( an_rcfilename, time.strftime( '%Y-%m-%d_%H:%M' ) ) import os; os.system( "cp %s %s" % ( an_rcfilename, an_rcfilename_save ) ) import os; os.system( "perl -p -i -e 's/(socket_timeout =)\s*[0-9]+/\\1 %d/' %s" % ( an_engine.timeout(), an_rcfilename ) ) import os; os.system( "perl -p -i -e 's/(upload_seed_size =)\s*[0-9]+/\\1 %d/' %s" % ( an_optimize_x, an_rcfilename ) ) print "a_spent_time = %s, sec\n" % a_spent_time pass
def main() : #----------------------- Defining utility command-line interface ------------------------- an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.use.options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) a_security_group = ec2.use.get_security_group( an_ec2_conn, a_reservation ) print_d( "< %r > : %s\n" % ( a_security_group, a_security_group.rules ) ) an_instance2ssh = {} for an_instance in a_reservation.instances : a_password = None an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name ) a_host_name = an_instance.public_dns_name ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) an_instance2ssh[ an_instance ] = a_ssh_client ssh.command( a_ssh_client, 'sudo apt-get install -y nfs-common portmap nfs-kernel-server' ) # install server and client packages try: a_security_group.authorize( 'tcp', 111, 111, '%s/0' % an_instance.private_ip_address ) # for rpcbind a_security_group.authorize( 'tcp', 2049, 2049, '%s/0' % an_instance.private_ip_address ) # for nfs over tcp a_security_group.authorize( 'udp', 35563, 35563, '%s/0' % an_instance.private_ip_address ) # for nfs over udp except : pass pass [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ] print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) ec2.use.options.track( a_cluster_id ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- import openmpi_config_options an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() an_usage_description += openmpi_config_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ openmpi_config_options.add( an_option_parser ) ec2.use.options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) a_hostfile = openmpi_config_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ), openmpi_config_options.compose( a_hostfile ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) try: a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) except Exception, exc: print_e( '%s\n' % exc ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- import data_transfer_options import solver_start_options import data_seeding_options import openmpi_config_options an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() an_usage_description += data_transfer_options.usage_description() an_usage_description += solver_start_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.use.options.add( an_option_parser ) data_transfer_options.add( an_option_parser ) solver_start_options.add( an_option_parser ) data_seeding_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) openmpi_config_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() an_enable_debug, a_log_file = common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) a_hostfile = openmpi_config_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) an_upload_seed_size = data_seeding_options.extract( an_option_parser ) a_case_dir, a_run_hook = solver_start_options.extract( an_option_parser ) a_study_name = data_transfer_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s %s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ), data_transfer_options.compose( a_study_name ), solver_start_options.compose( a_case_dir, a_run_hook ), openmpi_config_options.compose( a_hostfile ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) a_master_node = an_instance = a_reservation.instances[ 0 ] print_d( "\n--------------------- Uploading case data to S3 ---------------------------\n" ) import os; a_case_name = os.path.basename( a_case_dir ) from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) import data_upload an_input_study = data_upload.entry_point( a_root_object, { a_case_dir : '' }, an_upload_seed_size, a_number_threads ) an_input_study_name = an_input_study.name() print_d( "an_input_study_name = '%s'\n" % an_input_study_name ) print_d( "\n------------------ Installing cloudflu to master node ----------------------\n" ) a_password = None a_host_name = an_instance.public_dns_name an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name ) ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) an_instance2ssh = {} an_instance2ssh[ an_instance ] = a_ssh_client print_d( "\n------------- Downloading case data from S3 to the master node ------------\n" ) a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' ) a_working_dir = a_stdout_lines[ 0 ][ : -1 ] print_d( "a_working_dir = '%s'\n" % a_working_dir ) ssh.command( a_ssh_client, "cloudflu-download --study-name=%s --output-dir=%s --remove" % ( an_input_study_name, a_working_dir ) ) print_d( "\n--- Sharing the solver case folder for all the cluster nodes through NFS --\n" ) ssh.command( a_ssh_client, "sudo sh -c 'echo %s *\(rw,no_root_squash,sync,subtree_check\) >> /etc/exports'" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo exportfs -a" ) # make changes effective on the running NFS server for an_instance in a_reservation.instances[ 1 : ] : a_host_name = an_instance.public_dns_name ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) ssh.command( a_ssh_client, "mkdir -p %s" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo mount %s:%s %s" % ( a_master_node.private_ip_address, a_working_dir, a_working_dir ) ) an_instance2ssh[ an_instance ] = a_ssh_client pass print_d( "\n------------------------ Get access the booked study ----------------------\n" ) import study_book; an_output_study = study_book.entry_point( a_root_object, a_study_name, True ) print_d( "\n----------------------- Running of the solver case ------------------------\n" ) a_num_nodes = len( a_reservation.instances ) a_target_dir = os.path.join( a_working_dir, a_case_name ) an_additional_args = "--study-name='%s' --booked --upload-seed-size=%d" % ( a_study_name, an_upload_seed_size ) a_ssh_client = an_instance2ssh[ a_master_node ] ssh.command( a_ssh_client, "%s/%s %d '%s' %s" % ( a_target_dir, a_run_hook, a_num_nodes, a_hostfile, an_additional_args ) ) an_output_study.seal() [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ] print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) ec2.use.options.track( a_cluster_id ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass