Пример #1
0
def download_seeds( the_file_object, the_file_basename, the_output_dir, the_number_threads, the_printing_depth ) :
    an_is_download_ok = False
    while not the_file_object.sealed() or not an_is_download_ok :
        a_worker_pool = WorkerPool( the_number_threads )
        try:
            for a_seed_object in the_file_object :
                a_hex_md5 = a_seed_object.hex_md5()
                a_seed_path = os.path.join( the_output_dir, a_seed_object.basename() )

                if os.path.exists( a_seed_path ) :
                    a_file_pointer = open( a_seed_path, 'rb' )
                    a_md5 = compute_md5( a_file_pointer )[ 0 ]
                    
                    if a_hex_md5 == a_md5 :
                        continue
                    
                    os.remove( a_seed_path )

                    pass

                print_d( "a_seed_path = '%s'\n" % a_seed_path, the_printing_depth )

                a_worker_pool.charge( download_seed, ( a_seed_object, a_seed_path, the_printing_depth + 1 ) )
        except:
            from cloudflu.common import print_traceback
            print_traceback( the_printing_depth )
            pass

        a_worker_pool.shutdown()
        an_is_download_ok = a_worker_pool.is_all_right()

        print_d( "'%s'.uploaded() == %s\n" % ( the_file_object.located_file(), the_file_object.sealed() ), the_printing_depth )
        pass
    
    return True
Пример #2
0
def upload_files( the_study_object, the_number_threads, the_printing_depth ) :
    a_worker_pool = WorkerPool( the_number_threads )

    for a_file_object in the_study_object :
        print_d( "a_file_object = %s\n" % a_file_object, the_printing_depth )

        a_worker_pool.charge( upload_file, ( a_file_object, the_number_threads, the_printing_depth + 1 ) )                    
        pass

    a_worker_pool.shutdown()
    a_worker_pool.join()
    pass
Пример #3
0
def upload_files( the_study_object, the_file2locations, the_upload_seed_size, the_number_threads, the_printing_depth ) :
    a_worker_pool = WorkerPool( the_number_threads )

    for a_file, a_location_file in the_file2locations.iteritems() :
        a_worker_pool.charge( upload_file, ( a_worker_pool, a_file, a_location_file, the_study_object, 
                                             the_upload_seed_size, the_printing_depth ) )

        pass

    a_worker_pool.shutdown()
    a_worker_pool.join()
    
    pass
Пример #4
0
    def delete( self, the_number_threads, the_printing_depth ) :
        print_d( "deleting - %s\n" % self, the_printing_depth )

        a_worker_pool = WorkerPool( the_number_threads )

        a_deleter = lambda the_object, the_printing_depth : \
            the_object.delete( the_printing_depth )

        for a_seed_object in self :
            a_worker_pool.charge( a_deleter, ( a_seed_object, the_printing_depth + 1 ) )
            pass

        TSealingObject.delete( self )
        
        a_worker_pool.shutdown()
        a_worker_pool.join()

        self._key.delete()
        pass
Пример #5
0
def entry_point( the_study_objects, the_number_threads, the_printing_depth = 0 ) :
    a_spent_time = Timer()
    
    a_worker_pool = WorkerPool( len( the_study_objects ) )

    a_deleter = lambda the_object, the_number_threads, the_printing_depth : \
        the_object.delete( the_number_threads, the_printing_depth )
    
    for a_study_object in the_study_objects :
        print_i( "------------------------------- Removing study ----------------------------------\n" )
        print_d( "a_study_object = %s\n" % a_study_object, the_printing_depth )
        a_worker_pool.charge( a_deleter, ( a_study_object, the_number_threads, the_printing_depth ) )

        pass

    a_worker_pool.shutdown()
    a_worker_pool.join()

    print_d( "a_spent_time = %s, sec\n" % a_spent_time, the_printing_depth )
    return
Пример #6
0
def entry_point( the_study_object, the_located_files, the_number_threads ) :
    a_spent_time = Timer()
    
    if the_located_files == None :
        upload_files( the_study_object, the_number_threads, 0 )
        pass
    else :
        a_worker_pool = WorkerPool( the_number_threads )
        
        for a_located_file in the_located_files:
            a_file_object = TFileObject.get( the_study_object, a_located_file )
            a_worker_pool.charge( upload_file, ( a_file_object, the_number_threads, 0 ) )                    
            pass
        
        a_worker_pool.shutdown()
        a_worker_pool.join()
        pass
    
    print_d( "a_spent_time = %s, sec\n" % a_spent_time )

    return the_study_object
Пример #7
0
def upload_seeds( the_file_object, the_working_dir, the_number_threads, the_printing_depth ) :
    "Uploading file items"
    while True :
        a_dir_contents = os.listdir( the_working_dir )

        a_number_threads = max( min( the_number_threads, len( a_dir_contents ) ), 1 )
        print_d( "a_number_threads = %d\n" % a_number_threads, the_printing_depth )

        a_worker_pool = WorkerPool( a_number_threads )
        
        a_dir_contents.sort()
        a_dir_contents.reverse()
        
        a_seed_names = [ a_seed_object.basename() for a_seed_object in the_file_object ]
        
        for a_seed_name in a_dir_contents :
            a_seed_path = os.path.join( the_working_dir, a_seed_name )
            print_d( "'%s'\n" % a_seed_path, the_printing_depth + 1 )
        
            if a_seed_name in a_seed_names :
                os.remove( a_seed_path )

                continue

            a_worker_pool.charge( upload_seed, ( the_file_object, a_seed_name, a_seed_path, the_printing_depth + 2 ) )

            pass

        a_worker_pool.shutdown()
        an_is_all_right = a_worker_pool.is_all_right()

        if an_is_all_right :
            mark_finished( the_file_object, the_working_dir, the_printing_depth )

            break

        pass

    return True
Пример #8
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"

    from reservation_rm_options import usage_description as usage_description_options
    an_usage_description += usage_description_options()

    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    ec2.use.options.add( an_option_parser )

    amazon.security_options.add( an_option_parser )
    
    common.options.add( an_option_parser )
  
 
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )
    
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
    
    from reservation_rm_options import extract as extract_options
    a_cluster_ids = extract_options( an_option_parser )
    
    from cloudflu.preferences import get
    a_cluster_location = get( 'amazon.cluster.location' )
   

    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]
    
    from reservation_rm_options import compose as compose_options
    a_call = "%s %s" % ( an_engine, compose_options( a_cluster_ids ) )
    print_d( a_call + '\n' )
 
 
    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    a_spent_time = Timer()
    
    an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )

    a_worker_pool = WorkerPool( len( a_cluster_ids ) )

    for a_cluster_id in a_cluster_ids :
        a_worker_pool.charge( execute, ( an_ec2_conn, a_cluster_id ) )
        pass
    
    a_worker_pool.shutdown()
    a_worker_pool.join()

    print_d( "a_spent_time = %s, sec\n" % a_spent_time )
    
    
    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    # There are no - it is a terminal step
    
    
    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    print_d( a_call + '\n' )
    
    
    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass
Пример #9
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"

    from rm_options import usage_description as usage_description_options
    an_usage_description += usage_description_options()
    
    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    from rm_options import add as add_options
    add_options( an_option_parser )

    amazon.security_options.add( an_option_parser )
    
    common.concurrency_options.add( an_option_parser )
    
    common.communication_options.add( an_option_parser )
    
    common.options.add( an_option_parser )


    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )

    common.communication_options.extract( an_option_parser )
    
    a_number_threads = common.concurrency_options.extract( an_option_parser )
    
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )

    from rm_options import extract as extract_options
    a_study_name, a_located_files = extract_options( an_option_parser )


    print_i( "--------------------------- Looking for study object ----------------------------\n" )
    a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
    print_d( "a_root_object = %s\n" % a_root_object )

    a_study_object = TStudyObject.get( a_root_object, a_study_name )
    print_d( "a_study_object = %s\n" % a_study_object )


    print_i( "-------------------------- Removing study files -----------------------------\n" )
    a_worker_pool = WorkerPool( len( a_located_files ) )

    a_deleter = lambda the_object, the_number_threads, the_printing_depth : \
        the_object.delete( the_number_threads, the_printing_depth )

    for a_located_file in a_located_files :
        print_d( "a_located_file = %s\n" % a_located_file )
    
        a_file_object = TFileObject.get( a_study_object, a_located_file )
        print_d( "a_file_object = %s\n" % a_file_object, 1 )
        
        a_worker_pool.charge( a_deleter, ( a_file_object, a_number_threads, 2 ) )
        pass

    a_worker_pool.shutdown()
    a_worker_pool.join()


    print_i( "------------------- Printing succussive pipeline arguments ----------------------\n" )
    print a_study_name


    print_i( "-------------------------------------- OK ---------------------------------------\n" )
    pass
Пример #10
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------
    import timestamps_upload_options
    import data_upload_options
    import study_book_options
    import data_seeding_options

    an_usage_description = "%prog"
    an_usage_description += timestamps_upload_options.usage_description()
    an_usage_description += study_book_options.usage_description()
    an_usage_description += data_upload_options.usage_description()
    
    from cloudflu import VERSION
    a_version = "%s" % VERSION
    
    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )
    
    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )
    

    #----------------------- Definition of the command line arguments ------------------------
    timestamps_upload_options.add( an_option_parser )

    study_book_options.add( an_option_parser )

    data_upload_options.add( an_option_parser )

    data_seeding_options.add( an_option_parser )

    common.concurrency_options.add( an_option_parser )

    common.communication_options.add( an_option_parser )

    amazon.security_options.add( an_option_parser )

    common.options.add( an_option_parser )

    
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )
    
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
    
    common.communication_options.extract( an_option_parser )

    a_number_threads = common.concurrency_options.extract( an_option_parser )
    
    an_upload_seed_size = data_seeding_options.extract( an_option_parser )

    a_study_name = study_book_options.extract( an_option_parser )
    
    a_booked = data_upload_options.extract( an_option_parser )

    a_solver_run, a_case_dir, a_solver_log, a_watched_keyword, a_time_log = timestamps_upload_options.extract( an_option_parser )
    

    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]

    a_call = "%s %s %s %s %s" % ( an_engine, 
                                  timestamps_upload_options.compose( a_solver_run, a_case_dir, a_solver_log, a_watched_keyword, a_time_log ),
                                  data_upload_options.compose( a_booked ),
                                  study_book_options.compose( a_study_name ),
                                  data_seeding_options.compose( an_upload_seed_size ) )
    print_d( a_call + '\n' )


    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    import pexpect

    a_spent_time = common.Timer()

    import os.path; a_watched_dir = os.path.join( a_case_dir, 'processor0' )
    import os; a_listdir = os.listdir( a_watched_dir )
    
    from cloudflu.amazon.s3 import TRootObject
    a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
    print_d( "a_root_object = %s\n" % a_root_object )

    from cloudflu.common import WorkerPool
    a_worker_pool = WorkerPool( a_number_threads )
    
    a_child = pexpect.spawn( a_solver_run )
    
    import os.path; a_log_file = os.path.join( a_case_dir, a_solver_log )
    a_child.logfile = file( a_log_file, 'w' )

    while True :
        try:
            try:
                a_child.expect( a_watched_keyword )
            except:
                break
            
            import os; a_list_dir = os.listdir( a_watched_dir )

            for an_item in a_list_dir :
                if an_item in a_listdir :
                    continue
                
                a_worker_pool.charge( upload_timedata, ( a_root_object, a_study_name, a_booked,
                                                         an_upload_seed_size, a_number_threads, 
                                                         a_case_dir, an_item ) )
                if a_time_log == True :
                    a_worker_pool.charge( upload_timelog, ( a_root_object, a_study_name, a_booked,
                                                            an_upload_seed_size, a_number_threads, 
                                                            an_item, a_log_file ) )
                    pass
                pass
            
            a_listdir = a_list_dir
        except:
            import sys, traceback
            traceback.print_exc( file = sys.stderr )
            break
        
        pass

    a_worker_pool.shutdown()
    a_worker_pool.join()

    print_d( "a_spent_time = %s, sec\n" % a_spent_time )


    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    print a_study_name


    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    print_d( a_call + '\n' )
    

    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    import sys, os; sys.exit( os.EX_OK )
    pass
Пример #11
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------
    an_usage_description = "%prog"
    an_usage_description += common.concurrency_options.usage_description()
    
    from cloudflu import VERSION
    a_version = "%s" % VERSION
    
    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )
    
    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )
    

    #----------------------- Definition of the command line arguments ------------------------
    common.concurrency_options.add( an_option_parser )

    common.communication_options.add( an_option_parser )

    amazon.security_options.add( an_option_parser )
    
    common.options.add( an_option_parser )

    
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )
    
    common.communication_options.extract( an_option_parser )

    a_number_threads = common.concurrency_options.extract( an_option_parser )
    
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )


    print_i( "-------------------------- Running actual functionality -------------------------\n" )
    import boto; a_s3_conn = boto.connect_s3( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )

    a_worker_pool = WorkerPool( a_number_threads )


    print_i( "------------------------ Remove all the bucket keys, first ----------------------\n" )
    for a_bucket in a_s3_conn.get_all_buckets() :
        try :
            a_s3_bucket_keys = a_bucket.get_all_keys()
            print_d( "'%s' : %d\n" % ( a_bucket.name, len( a_s3_bucket_keys ) ) )
        
            for a_s3_bucket_key in a_bucket.list() :
                print_d( "\t'%s'\n" % ( a_s3_bucket_key.name ) )
                a_worker_pool.charge( lambda the_s3_bucket_key : the_s3_bucket_key.delete(), [ a_s3_bucket_key ] )
                pass
        except :
            pass
        
        pass

    a_worker_pool.join()


    print_i( "--------------------------- Remove the buckets itself ---------------------------\n" )
    for a_bucket in a_s3_conn.get_all_buckets() :
        print_d( "'%s'\n" % ( a_bucket.name ) )
        a_worker_pool.charge( lambda the_s3_bucket : the_s3_bucket.delete(), [ a_bucket ] )
        pass
    
    a_worker_pool.shutdown()
    a_worker_pool.join()


    print_i( "-------------------------------------- OK ---------------------------------------\n" )
    pass