def upload_timelog( the_root_object, the_study_name, the_booked, the_upload_seed_size, the_number_threads, the_item, the_log_file ) : a_time_log_file = "%s-%s" % ( the_log_file, the_item ) sh_command( "cp %s %s" % ( the_log_file, a_time_log_file ) ) import data_upload; data_upload.entry_point( the_root_object, { a_time_log_file : '' }, the_upload_seed_size, the_number_threads, the_study_name, the_booked ) pass
def upload_timedata( the_root_object, the_study_name, the_booked, the_upload_seed_size, the_number_threads, the_case_dir, the_item ) : sh_command( "reconstructPar -case '%s' -time %s >> log.reconstructPar 2>&1" % ( the_case_dir, the_item ) ) import os.path; a_timestamp = os.path.join( the_case_dir, the_item ) import os.path; a_timestamp = os.path.abspath( a_timestamp ) import data_upload; data_upload.entry_point( the_root_object, { a_timestamp : '' }, the_upload_seed_size, the_number_threads, the_study_name, the_booked ) pass
def upload_file( the_worker_pool, the_file_path, the_file_location, the_study_object, the_upload_seed_size, the_printing_depth ) : a_working_dir = generate_uploading_dir( the_file_path ) import shutil shutil.rmtree( a_working_dir, True ) os.makedirs( a_working_dir ) print_d( "a_working_dir = '%s'\n" % a_working_dir, the_printing_depth ) a_file_dirname = os.path.dirname( the_file_path ) a_file_basename = os.path.basename( the_file_path ) import tempfile a_tmp_file = tempfile.mkstemp( dir = a_working_dir )[ 1 ] # a_tmp_file = tempfile.mkstemp()[ 1 ] # use this work arround for FAT file systems sh_command( "cd '%s' && tar -czf %s '%s'" % ( a_file_dirname, a_tmp_file, a_file_basename ), the_printing_depth ) a_statinfo = os.stat( a_tmp_file ) print_d( "a_statinfo.st_size = %d, bytes\n" % a_statinfo.st_size, the_printing_depth ) import math a_suffix_length = math.log10( float( a_statinfo.st_size ) / the_upload_seed_size ) if a_suffix_length > 0 : a_suffix_length = int( a_suffix_length + 1.0 ) else: a_suffix_length = 0 pass print_d( "a_suffix_length = %d, digits\n" % a_suffix_length, the_printing_depth ) a_file_seed_target = os.path.join( a_working_dir, a_file_basename ) sh_command( "cat '%s' | split --bytes=%d --numeric-suffixes --suffix-length=%d - %s.tgz-" % ( a_tmp_file, the_upload_seed_size, a_suffix_length, a_file_seed_target ), the_printing_depth ) a_file_pointer = open( a_tmp_file, 'rb' ) a_md5 = compute_md5( a_file_pointer ) a_hex_md5, a_base64md5 = a_md5 a_file_pointer.close() os.remove( a_tmp_file ) a_file_object = TFileObject.create( the_study_object, the_file_path, the_file_location, a_hex_md5 ) print_d( "a_file_object = %s\n" % a_file_object, the_printing_depth ) pass
def correct_value( the_value ) : if the_value == "" or the_value == "''" : return None if the_value.startswith( '/dev/fd/' ) : from cloudflu.common import sh_command a_stdout_lines = sh_command( 'cat %s' % the_value ) the_value = a_stdout_lines[ 0 ][ : -1 ] pass return the_value
def download_file( the_file_object, the_output_dir, the_number_threads, the_remove, the_fresh, the_callback ) : a_printing_depth = 0 print_d( "the_file_object = %s\n" % the_file_object, a_printing_depth ) a_hex_md5 = the_file_object.hex_md5() a_located_file = the_file_object.located_file() import os.path a_file_dirname = os.path.dirname( a_located_file ) a_file_basename = os.path.basename( a_located_file ) import os.path; an_output_dir = os.path.join( the_output_dir, a_file_dirname ) print_d( "an_output_dir = '%s'\n" % an_output_dir, a_printing_depth + 1 ) import os.path; a_file_path = os.path.join( an_output_dir, a_file_basename ) if the_fresh : sh_command( "rm -fr '%s.tgz*'" % ( a_file_path ), a_printing_depth + 1 ) sh_command( "rm -fr '%s'" % ( a_file_path ), a_printing_depth + 1 ) pass if not os.path.exists( an_output_dir ) : os.makedirs( an_output_dir ) pass print_d( "a_file_path = '%s'\n" % a_file_path, a_printing_depth + 2 ) if not os.path.exists( a_file_path ) : while True : download_seeds( the_file_object, a_file_basename, an_output_dir, the_number_threads, a_printing_depth + 3 ) an_archive_name = "%s.tgz" % a_file_basename import os.path an_archive_path = os.path.join( an_output_dir, an_archive_name ) if not os.path.exists( an_archive_path ) : sh_command( "cd '%s' && cat %s-* > %s" % ( an_output_dir, an_archive_name, an_archive_name ), a_printing_depth + 1 ) pass an_archive_pointer = open( an_archive_path, 'rb' ) a_md5 = compute_md5( an_archive_pointer )[ 0 ] print_d( "'%s' - %s\n" % ( a_hex_md5, ( a_hex_md5 == a_md5 ) ), a_printing_depth + 1 ) if a_hex_md5 == a_md5 : break import os; os.remove( an_archive_path ) pass sh_command( "tar -xzf '%s' -C '%s'" % ( an_archive_path, an_output_dir ), a_printing_depth + 1 ) sh_command( "cd '%s' && rm %s-*" % ( an_output_dir, an_archive_name ), a_printing_depth + 1 ) os.remove( an_archive_path ) else: print_d( "- nothing to be done, already downloaded\n", a_printing_depth + 1 ) pass if the_remove == True and os.path.exists( a_file_path ) : the_file_object.delete( the_number_threads, a_printing_depth + 1 ) pass if the_callback != None : the_callback( an_output_dir, a_located_file ) pass return True
def main() : #----------------------- Defining utility command-line interface ------------------------- an_usage_description = "%prog" an_usage_description += deploy_options.usage_description() an_usage_description += ssh.options.usage_description() from cloudflu import VERSION a_version = "%s" % VERSION from optparse import IndentedHelpFormatter a_help_formatter = IndentedHelpFormatter( width = 127 ) from optparse import OptionParser an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter ) #----------------------- Definition of the command line arguments ------------------------ deploy_options.add( an_option_parser ) ssh.options.add( an_option_parser ) common.options.add( an_option_parser ) #------------------ Extracting and verifying command-line arguments ---------------------- an_options, an_args = an_option_parser.parse_args() an_enable_debug = common.options.extract( an_option_parser ) a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command = ssh.options.extract( an_option_parser ) a_production, an_url = deploy_options.extract( an_option_parser ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) import sys; an_engine = sys.argv[ 0 ] a_call = "%s %s %s" % ( an_engine, deploy_options.compose( a_production, an_url ), ssh.options.compose( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) ) print_d( a_call + '\n' ) print_d( "\n----------------------- Running actual functionality ----------------------\n" ) ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command ) if a_production == True : ssh.command( a_ssh_client, "sudo easy_install %s" % an_url ) else: import os.path; a_source_dir = os.path.abspath( os.curdir ) sh_command( "cd %s && ./setup.py sdist" % a_source_dir ) ssh.command( a_ssh_client, ( "sudo apt-get -y install python-setuptools" ) ) ssh.command( a_ssh_client, ( "sudo apt-get -y install python-all-dev" ) ) a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' ) a_working_dir = a_stdout_lines[ 0 ][ : -1 ] print_d( "a_working_dir = %s\n" % a_working_dir ) import cloudflu a_cloudflu_name = "%s-%s" % ( cloudflu.NAME, cloudflu.VERSION ) a_cloudflu_archive_name = a_cloudflu_name + os.extsep + "tar.gz" a_cloudflu_source_archive = os.path.join( a_source_dir, 'dist', a_cloudflu_archive_name ) a_cloudflu_target_archive = os.path.join( a_working_dir, a_cloudflu_archive_name ) # Uploading and installing into the cloud corresponding Python engine (itself) a_sftp_client = a_ssh_client.open_sftp() a_sftp_client.put( a_cloudflu_source_archive, a_cloudflu_target_archive ) ssh.command( a_ssh_client, 'cd %s && tar -xzf %s' % ( a_working_dir, a_cloudflu_archive_name ) ) a_cloudflu_setup_dir = os.path.join( a_working_dir, a_cloudflu_name ) ssh.command( a_ssh_client, 'cd %s && sudo python ./setup.py install' % ( a_cloudflu_setup_dir ) ) # ssh.command( a_ssh_client, """python -c 'import shutil; shutil.rmtree( "%s" )'""" % a_working_dir ) pass # To enable 'cloudflu' debug mode by default ssh.command( a_ssh_client, """sudo bash -c "echo 'export __CLOUDFLU_DEBUG_ENABLE__=X' >> /etc/profile" """ ) a_ssh_client.close() print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" ) ssh.options.track( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) print_d( "\n--------------------------- Canonical substitution ------------------------\n" ) ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) print_d( a_call + '\n' ) print_d( "\n-------------------------------------- OK ---------------------------------\n" ) pass