def entry_point( the_root_object, the_file2locations, the_upload_seed_size, the_number_threads, the_study_name = None, the_booked = False ) : a_spent_time = Timer() import study_book; a_study_object = study_book.entry_point( the_root_object, the_study_name, the_booked ) print_d( '\n' ) import upload_start; a_study_object = upload_start.entry_point( a_study_object, the_file2locations, the_upload_seed_size, the_number_threads ) print_d( '\n' ) a_located_files = [] for a_file, a_location in the_file2locations.iteritems() : import os.path; a_located_files.append( os.path.join( a_location, os.path.basename( a_file ) ) ) pass print_d( "a_located_files = %s\n" % a_located_files ) import upload_resume; a_study_object = upload_resume.entry_point( a_study_object, a_located_files, the_number_threads ) print_d( '\n' ) print_d( "a_spent_time = %s, sec\n" % a_spent_time ) return a_study_object
def main() : #----------------------- Defining utility command-line interface ------------------------- import data_transfer_options import solver_process_options an_usage_description = "%prog" an_usage_description += data_transfer_options.usage_description() an_usage_description += solver_process_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ data_transfer_options.add( an_option_parser ) solver_process_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() an_enable_debug, a_log_file = common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) a_study_name = data_transfer_options.extract( an_option_parser ) an_output_dir, a_before_hook, a_time_hook, an_after_hook = solver_process_options.extract( an_option_parser ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s" % ( an_engine, data_transfer_options.compose( a_study_name ), solver_process_options.compose( an_output_dir, a_before_hook, a_time_hook, an_after_hook ) ) print_d( a_call + '\n' ) #---------------------- To compose a before 'download' callback functor ------------------ class BeforeHook : def __init__( self, the_hook ) : self._hook = the_hook pass def __call__( self, the_study_name, the_output_dir ) : try: import os; os.system( "%s '%s' '%s'" % ( self._hook, the_study_name, the_output_dir ) ) except: pass pass pass if a_before_hook != None : a_before_hook = BeforeHook( a_before_hook ) pass #---------------------- To compose a time 'download' callback functor -------------------- class TimeHook : def __init__( self, the_hook ) : self._hook = the_hook pass def __call__( self, the_output_dir, the_located_file ) : try: a_time = float( the_located_file ) import os; os.system( "%s '%s' '%s'" % ( self._hook, the_output_dir, a_time ) ) except: pass pass pass if a_time_hook != None : a_time_hook = TimeHook( a_time_hook ) pass #---------------------- To compose an after 'download' callback functor ------------------ class AfterHook : def __init__( self, the_hook ) : self._hook = the_hook pass def __call__( self, the_output_dir ) : try: import os; os.system( "%s '%s'" % ( self._hook, the_output_dir ) ) except: pass pass pass if an_after_hook != None : an_after_hook = AfterHook( an_after_hook ) pass print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) import study_book; an_output_study = study_book.entry_point( a_root_object, a_study_name, True ) def Download( the_study_object, the_output_dir, the_number_threads ) : if a_before_hook != None : a_before_hook( the_study_object.name(), the_output_dir ) pass import download; download.entry_point( the_study_object, the_output_dir, None, the_number_threads, True, True, True, a_time_hook ) if an_after_hook != None : an_after_hook( the_output_dir ) pass pass Download( an_output_study, an_output_dir, a_number_threads ) print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) print a_study_name print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass
def main() : #----------------------- Defining utility command-line interface ------------------------- import data_transfer_options import solver_start_options import data_seeding_options import openmpi_config_options an_usage_description = "%prog" an_usage_description += ec2.use.options.usage_description() an_usage_description += data_transfer_options.usage_description() an_usage_description += solver_start_options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ ec2.use.options.add( an_option_parser ) data_transfer_options.add( an_option_parser ) solver_start_options.add( an_option_parser ) data_seeding_options.add( an_option_parser ) common.concurrency_options.add( an_option_parser ) openmpi_config_options.add( an_option_parser ) amazon.security_options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() an_enable_debug, a_log_file = common.options.extract( an_option_parser ) AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser ) a_cluster_id = ec2.use.options.extract( an_option_parser ) a_hostfile = openmpi_config_options.extract( an_option_parser ) a_number_threads = common.concurrency_options.extract( an_option_parser ) an_upload_seed_size = data_seeding_options.extract( an_option_parser ) a_case_dir, a_run_hook = solver_start_options.extract( an_option_parser ) a_study_name = data_transfer_options.extract( an_option_parser ) from cloudflu.preferences import get a_cluster_location = get( 'amazon.cluster.location' ) a_host_port = int( get( 'amazon.cluster.host_port' ) ) a_login_name = get( 'amazon.cluster.login_name' ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s %s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ), data_transfer_options.compose( a_study_name ), solver_start_options.compose( a_case_dir, a_run_hook ), openmpi_config_options.compose( a_hostfile ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) a_spent_time = Timer() an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id ) print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) ) a_master_node = an_instance = a_reservation.instances[ 0 ] print_d( "\n--------------------- Uploading case data to S3 ---------------------------\n" ) import os; a_case_name = os.path.basename( a_case_dir ) from cloudflu.amazon.s3 import TRootObject a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY ) print_d( "a_root_object = %s\n" % a_root_object ) import data_upload an_input_study = data_upload.entry_point( a_root_object, { a_case_dir : '' }, an_upload_seed_size, a_number_threads ) an_input_study_name = an_input_study.name() print_d( "an_input_study_name = '%s'\n" % an_input_study_name ) print_d( "\n------------------ Installing cloudflu to master node ----------------------\n" ) a_password = None a_host_name = an_instance.public_dns_name an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name ) ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) an_instance2ssh = {} an_instance2ssh[ an_instance ] = a_ssh_client print_d( "\n------------- Downloading case data from S3 to the master node ------------\n" ) a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' ) a_working_dir = a_stdout_lines[ 0 ][ : -1 ] print_d( "a_working_dir = '%s'\n" % a_working_dir ) ssh.command( a_ssh_client, "cloudflu-download --study-name=%s --output-dir=%s --remove" % ( an_input_study_name, a_working_dir ) ) print_d( "\n--- Sharing the solver case folder for all the cluster nodes through NFS --\n" ) ssh.command( a_ssh_client, "sudo sh -c 'echo %s *\(rw,no_root_squash,sync,subtree_check\) >> /etc/exports'" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo exportfs -a" ) # make changes effective on the running NFS server for an_instance in a_reservation.instances[ 1 : ] : a_host_name = an_instance.public_dns_name ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) ssh.command( a_ssh_client, "mkdir -p %s" % ( a_working_dir ) ) ssh.command( a_ssh_client, "sudo mount %s:%s %s" % ( a_master_node.private_ip_address, a_working_dir, a_working_dir ) ) an_instance2ssh[ an_instance ] = a_ssh_client pass print_d( "\n------------------------ Get access the booked study ----------------------\n" ) import study_book; an_output_study = study_book.entry_point( a_root_object, a_study_name, True ) print_d( "\n----------------------- Running of the solver case ------------------------\n" ) a_num_nodes = len( a_reservation.instances ) a_target_dir = os.path.join( a_working_dir, a_case_name ) an_additional_args = "--study-name='%s' --booked --upload-seed-size=%d" % ( a_study_name, an_upload_seed_size ) a_ssh_client = an_instance2ssh[ a_master_node ] ssh.command( a_ssh_client, "%s/%s %d '%s' %s" % ( a_target_dir, a_run_hook, a_num_nodes, a_hostfile, an_additional_args ) ) an_output_study.seal() [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ] print_d( "a_spent_time = %s, sec\n" % a_spent_time ) print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) ec2.use.options.track( a_cluster_id ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass