def main() : #----------------------- Defining utility command-line interface ------------------------- import timestamps_upload_options import data_upload_options import study_book_options import data_seeding_options an_usage_description = "%prog" an_usage_description += timestamps_upload_options.usage_description() an_usage_description += study_book_options.usage_description() an_usage_description += data_upload_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ timestamps_upload_options.add( an_option_parser ) study_book_options.add( an_option_parser ) data_upload_options.add( an_option_parser ) data_seeding_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) common.communication_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) common.communication_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) an_upload_seed_size = data_seeding_options.extract( an_option_parser ) a_study_name = study_book_options.extract( an_option_parser ) a_booked = data_upload_options.extract( an_option_parser ) a_solver_run, a_case_dir, a_solver_log, a_watched_keyword, a_time_log = timestamps_upload_options.extract( an_option_parser ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s %s %s" % ( an_engine, timestamps_upload_options.compose( a_solver_run, a_case_dir, a_solver_log, a_watched_keyword, a_time_log ), data_upload_options.compose( a_booked ), study_book_options.compose( a_study_name ), data_seeding_options.compose( an_upload_seed_size ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) import pexpect a_spent_time = common.Timer() import os.path; a_watched_dir = os.path.join( a_case_dir, 'processor0' ) import os; a_listdir = os.listdir( a_watched_dir ) from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) from cloudflu.common import WorkerPool a_worker_pool = WorkerPool( a_number_threads ) a_child = pexpect.spawn( a_solver_run ) import os.path; a_log_file = os.path.join( a_case_dir, a_solver_log ) a_child.logfile = file( a_log_file, 'w' ) while True : try: try: a_child.expect( a_watched_keyword ) except: break import os; a_list_dir = os.listdir( a_watched_dir ) for an_item in a_list_dir : if an_item in a_listdir : continue a_worker_pool.charge( upload_timedata, ( a_root_object, a_study_name, a_booked, an_upload_seed_size, a_number_threads, a_case_dir, an_item ) ) if a_time_log == True : a_worker_pool.charge( upload_timelog, ( a_root_object, a_study_name, a_booked, an_upload_seed_size, a_number_threads, an_item, a_log_file ) ) pass pass a_listdir = a_list_dir except: import sys, traceback traceback.print_exc( file = sys.stderr ) break pass a_worker_pool.shutdown() a_worker_pool.join() print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) print a_study_name print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) import sys, os; sys.exit( os.EX_OK ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- import data_upload_options import study_book_options import upload_start_options import data_seeding_options an_usage_description = "%prog" an_usage_description += upload_start_options.usage_description() an_usage_description += data_upload_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ data_upload_options.add( an_option_parser ) upload_start_options.add( an_option_parser ) study_book_options.add( an_option_parser ) data_seeding_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) common.communication_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) common.communication_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) an_upload_seed_size = data_seeding_options.extract( an_option_parser ) a_file2locations = upload_start_options.extract( an_option_parser ) a_booked = data_upload_options.extract( an_option_parser ) a_study_name = study_book_options.extract( an_option_parser ) print_i( "--------------------------- Defining the study object ---------------------------\n" ) from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) print_i( "-------------------------- Running actual functionality -------------------------\n" ) a_study_object = entry_point( a_root_object, a_file2locations, an_upload_seed_size, a_number_threads, a_study_name, a_booked ) print_i( "-------------------- Printing succussive pipeline arguments ---------------------\n" ) print a_study_object.name() print_i( "-------------------------------------- OK ---------------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- import data_transfer_options import solver_start_options import data_seeding_options import openmpi_config_options an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() an_usage_description += data_transfer_options.usage_description() an_usage_description += solver_start_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.use.options.add( an_option_parser ) data_transfer_options.add( an_option_parser ) solver_start_options.add( an_option_parser ) data_seeding_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) openmpi_config_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() an_enable_debug, a_log_file = common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) a_hostfile = openmpi_config_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) an_upload_seed_size = data_seeding_options.extract( an_option_parser ) a_case_dir, a_run_hook = solver_start_options.extract( an_option_parser ) a_study_name = data_transfer_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s %s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ), data_transfer_options.compose( a_study_name ), solver_start_options.compose( a_case_dir, a_run_hook ), openmpi_config_options.compose( a_hostfile ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) a_master_node = an_instance = a_reservation.instances[ 0 ] print_d( "\n--------------------- Uploading case data to S3 ---------------------------\n" ) import os; a_case_name = os.path.basename( a_case_dir ) from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) import data_upload an_input_study = data_upload.entry_point( a_root_object, { a_case_dir : '' }, an_upload_seed_size, a_number_threads ) an_input_study_name = an_input_study.name() print_d( "an_input_study_name = '%s'\n" % an_input_study_name ) print_d( "\n------------------ Installing cloudflu to master node ----------------------\n" ) a_password = None a_host_name = an_instance.public_dns_name an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name ) ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) an_instance2ssh = {} an_instance2ssh[ an_instance ] = a_ssh_client print_d( "\n------------- Downloading case data from S3 to the master node ------------\n" ) a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' ) a_working_dir = a_stdout_lines[ 0 ][ : -1 ] print_d( "a_working_dir = '%s'\n" % a_working_dir ) ssh.command( a_ssh_client, "cloudflu-download --study-name=%s --output-dir=%s --remove" % ( an_input_study_name, a_working_dir ) ) print_d( "\n--- Sharing the solver case folder for all the cluster nodes through NFS --\n" ) ssh.command( a_ssh_client, "sudo sh -c 'echo %s *\(rw,no_root_squash,sync,subtree_check\) >> /etc/exports'" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo exportfs -a" ) # make changes effective on the running NFS server for an_instance in a_reservation.instances[ 1 : ] : a_host_name = an_instance.public_dns_name ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) ssh.command( a_ssh_client, "mkdir -p %s" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo mount %s:%s %s" % ( a_master_node.private_ip_address, a_working_dir, a_working_dir ) ) an_instance2ssh[ an_instance ] = a_ssh_client pass print_d( "\n------------------------ Get access the booked study ----------------------\n" ) import study_book; an_output_study = study_book.entry_point( a_root_object, a_study_name, True ) print_d( "\n----------------------- Running of the solver case ------------------------\n" ) a_num_nodes = len( a_reservation.instances ) a_target_dir = os.path.join( a_working_dir, a_case_name ) an_additional_args = "--study-name='%s' --booked --upload-seed-size=%d" % ( a_study_name, an_upload_seed_size ) a_ssh_client = an_instance2ssh[ a_master_node ] ssh.command( a_ssh_client, "%s/%s %d '%s' %s" % ( a_target_dir, a_run_hook, a_num_nodes, a_hostfile, an_additional_args ) ) an_output_study.seal() [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ] print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) ec2.use.options.track( a_cluster_id ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass