def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"

    an_usage_description += credentials_deploy_options.usage_description()

    an_usage_description += ssh.options.usage_description()
    
    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    credentials_deploy_options.add( an_option_parser )

    ssh.options.add( an_option_parser )
    
    amazon.security_options.add( an_option_parser )
    
    common.options.add( an_option_parser )
  

    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )

    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )

    a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command = ssh.options.extract( an_option_parser )

    AWS_USER_ID, EC2_PRIVATE_KEY, EC2_CERT, a_remote_location = credentials_deploy_options.extract( an_option_parser )

    
    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]

    a_call = "%s %s %s" % ( an_engine, 
                            credentials_deploy_options.compose( AWS_USER_ID, EC2_PRIVATE_KEY, EC2_CERT, a_remote_location ),
                            ssh.options.compose( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) )
    print_d( a_call + '\n' )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )


    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command )
    import os.path; a_remote_dir = os.path.dirname( a_remote_location )
    ssh.command( a_ssh_client, 'sudo mkdir --parents %s' % a_remote_dir )
    ssh.command( a_ssh_client, 'sudo chmod 777 %s' % a_remote_dir )

    a_sftp_client = a_ssh_client.open_sftp()

    ssh.command( a_ssh_client, 'echo "export AWS_ACCESS_KEY_ID=%s" > %s' % ( AWS_ACCESS_KEY_ID, a_remote_location ) )
    ssh.command( a_ssh_client, 'echo "export AWS_SECRET_ACCESS_KEY=%s" >> %s' % ( AWS_SECRET_ACCESS_KEY, a_remote_location ) )

    if AWS_USER_ID != None :
        ssh.command( a_ssh_client, 'echo "export AWS_USER_ID=%s" >> %s' % ( AWS_USER_ID, a_remote_location ) )
        pass

    if EC2_PRIVATE_KEY != None :
        a_remote_ec2_private_key = os.path.join( a_remote_dir, os.path.basename( EC2_PRIVATE_KEY ) )
        a_sftp_client.put( EC2_PRIVATE_KEY, a_remote_ec2_private_key )
        ssh.command( a_ssh_client, 'echo "export EC2_PRIVATE_KEY=%s" >> %s' % ( a_remote_ec2_private_key, a_remote_location ) )
        pass

    if EC2_CERT != None :
        a_remote_ec2_cert = os.path.join( a_remote_dir, os.path.basename( EC2_CERT ) )
        a_sftp_client.put( EC2_CERT, a_remote_ec2_cert )
        ssh.command( a_ssh_client, 'echo "export EC2_CERT=%s" >> %s' % ( a_remote_ec2_cert, a_remote_location ) )
        pass

    ssh.command( a_ssh_client, 'sudo ln -s %s /etc/profile.d/aws_credentials.sh' % ( a_remote_location ) )
    ssh.command( a_ssh_client, 'env | grep -E "AWS|EC2"' )
    
    a_ssh_client.close()
    
    
    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    ssh.options.track( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    
    
    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    print_d( a_call + '\n' )


    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass
Exemple #2
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"

    from run_options import usage_description as usage_description_options
    an_usage_description += usage_description_options()
    
    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    from run_options import add as add_options
    add_options( an_option_parser )

    ssh.options.add( an_option_parser )

    common.options.add( an_option_parser )
  
 
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    common.options.extract( an_option_parser )
    
    a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command = ssh.options.extract( an_option_parser )

    from run_options import extract as extract_options
    a_scripts, a_script_args, a_sequence_file = extract_options( an_option_parser )


    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]
    
    from run_options import compose as compose_options
    a_call = "%s %s %s" % ( an_engine, compose_options( a_scripts, a_script_args, a_sequence_file ),
                            ssh.options.compose( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) )
    
    print_d( a_call + '\n' )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )


    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command )

    if a_scripts != None :
        for an_id in range( len( a_scripts ) ) :
            a_script_file = a_scripts[ an_id ]
            a_script_arg = a_script_args[ an_id ]
        
            a_working_dir = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' )[ 0 ][ : -1 ]
            import os; a_target_script = os.path.join( a_working_dir, os.path.basename( a_script_file ) )
        
            a_sftp_client = a_ssh_client.open_sftp() # Instantiating a sftp client
            a_sftp_client.put( a_script_file, a_target_script )
            
            ssh.command( a_ssh_client, 'chmod 755 "%s"' % a_target_script )
            ssh.command( a_ssh_client, 'sudo "%s" %s' % ( a_target_script, a_script_arg ) )
            
            # ssh.command( a_ssh_client, """python -c 'import shutil; shutil.rmtree( "%s" )'""" % a_working_dir )
            pass
        pass

    if a_sequence_file != None :
        a_file = open( a_sequence_file )
        for a_line in a_file.readlines() :
            if a_line[ 0 ] == "#" or a_line[ 0 ] == "\n" :
                continue
            ssh.command( a_ssh_client, 'sudo %s' % a_line[ : -1 ] )
            pass
        a_file.close()
        pass
    
    a_ssh_client.close()


    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    ssh.options.track( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    

    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    print_d( a_call + '\n' )


    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"
    an_usage_description += ec2.use.options.usage_description()

    from cloudflu import VERSION
    a_version = "%s" % VERSION
    
    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    ec2.use.options.add( an_option_parser )
    
    amazon.security_options.add( an_option_parser )
    
    common.options.add( an_option_parser )
  
 
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()
    
    common.options.extract( an_option_parser )
   
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
    
    a_cluster_id = ec2.use.options.extract( an_option_parser )
   
    from cloudflu.preferences import get
    a_cluster_location = get( 'amazon.cluster.location' )
    a_host_port = int( get( 'amazon.cluster.host_port' ) )
    a_login_name = get( 'amazon.cluster.login_name' )


    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]
   
    a_call = "%s %s" % ( an_engine, ec2.use.options.compose( a_cluster_id ) )
    print_d( a_call + '\n' )
   

    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    a_spent_time = Timer()
   
    an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )

    a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id )
    print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) )
   
    a_security_group = ec2.use.get_security_group( an_ec2_conn, a_reservation )
    print_d( "< %r > : %s\n" % ( a_security_group, a_security_group.rules ) )

    an_instance2ssh = {}
    for an_instance in a_reservation.instances :
        a_password = None
        an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name )
        a_host_name = an_instance.public_dns_name
        ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    
        a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
        an_instance2ssh[ an_instance ] = a_ssh_client
       
        ssh.command( a_ssh_client, 'sudo apt-get install -y nfs-common portmap nfs-kernel-server' ) # install server and client packages
       
        try:
            a_security_group.authorize( 'tcp', 111, 111, '%s/0' % an_instance.private_ip_address ) # for rpcbind
            a_security_group.authorize( 'tcp', 2049, 2049, '%s/0' % an_instance.private_ip_address ) # for nfs over tcp
            a_security_group.authorize( 'udp', 35563, 35563, '%s/0' % an_instance.private_ip_address ) # for nfs over udp
        except :
            pass
        pass

    [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ]
    
    print_d( "a_spent_time = %s, sec\n" % a_spent_time )
    
    
    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    ec2.use.options.track( a_cluster_id )
    
    
    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    print_d( a_call + '\n' )
    
    
    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass
        print_e( '%s\n' % exc )
        pass

    a_security_group = ec2.use.get_security_group( an_ec2_conn, a_reservation )
    print_d( "< %r > : %s\n" % ( a_security_group, a_security_group.rules ) )


    print_d( "\n-------------------- Providing seamless ssh connection --------------------\n" )
    an_instance2ssh = {}
    for an_instance in a_reservation.instances :
        a_password = None
        an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name )
        a_host_name = an_instance.public_dns_name
        ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
        
        a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
        an_instance2ssh[ an_instance ] = a_ssh_client

        import os.path; an_upload_name = os.path.basename( an_identity_file )

        a_sftp_client = a_ssh_client.open_sftp()
        a_sftp_client.put( an_identity_file, an_upload_name )

        a_target_name = '${HOME}/.ssh/id_rsa'
        ssh.command( a_ssh_client, 'mv -f %s %s' % ( an_upload_name, a_target_name ) )
        ssh.command( a_ssh_client, 'chmod 600 %s' % ( a_target_name ) )
        ssh.command( a_ssh_client, """sudo sh -c 'echo "    StrictHostKeyChecking no" >> /etc/ssh/ssh_config'""" )
        
        try:
            a_security_group.authorize( 'tcp', 1, 65535, '%s/0' % an_instance.private_ip_address ) # mpi cluster ports
        except :
Exemple #5
0
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    an_usage_description = "%prog"

    an_usage_description += deploy_options.usage_description()

    an_usage_description += ssh.options.usage_description()

    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    deploy_options.add( an_option_parser )

    ssh.options.add( an_option_parser )

    common.options.add( an_option_parser )
  
 
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()

    an_enable_debug = common.options.extract( an_option_parser )
    
    a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command = ssh.options.extract( an_option_parser )

    a_production, an_url = deploy_options.extract( an_option_parser )
    

    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]

    a_call = "%s %s %s" % ( an_engine, 
                            deploy_options.compose( a_production, an_url ),
                            ssh.options.compose( a_password, an_identity_file, a_host_port, a_login_name, a_host_name ) )
    print_d( a_call + '\n' )


    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name, a_command )

    if a_production == True : 
        ssh.command( a_ssh_client, "sudo easy_install %s" % an_url )
    else:
        import os.path; a_source_dir = os.path.abspath( os.curdir )
        sh_command( "cd %s && ./setup.py sdist" % a_source_dir )

        ssh.command( a_ssh_client, ( "sudo apt-get -y install python-setuptools" ) )
        ssh.command( a_ssh_client, ( "sudo apt-get -y install python-all-dev" ) )

        a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' )
        a_working_dir = a_stdout_lines[ 0 ][ : -1 ]
        print_d( "a_working_dir = %s\n" % a_working_dir )

        import cloudflu
        a_cloudflu_name = "%s-%s" % ( cloudflu.NAME, cloudflu.VERSION )
        a_cloudflu_archive_name = a_cloudflu_name + os.extsep + "tar.gz"
        a_cloudflu_source_archive = os.path.join( a_source_dir, 'dist', a_cloudflu_archive_name )
        a_cloudflu_target_archive = os.path.join( a_working_dir, a_cloudflu_archive_name )

        # Uploading and installing into the cloud corresponding Python engine (itself)
        a_sftp_client = a_ssh_client.open_sftp()
        a_sftp_client.put( a_cloudflu_source_archive, a_cloudflu_target_archive )
        ssh.command( a_ssh_client, 'cd %s && tar -xzf %s' % ( a_working_dir, a_cloudflu_archive_name ) )
        a_cloudflu_setup_dir = os.path.join( a_working_dir, a_cloudflu_name )
        ssh.command( a_ssh_client, 'cd %s && sudo python ./setup.py install' % ( a_cloudflu_setup_dir ) )
    
        # ssh.command( a_ssh_client, """python -c 'import shutil; shutil.rmtree( "%s" )'""" % a_working_dir )
        pass

    # To enable 'cloudflu' debug mode by default
    ssh.command( a_ssh_client, """sudo bash -c "echo 'export __CLOUDFLU_DEBUG_ENABLE__=X' >> /etc/profile" """ ) 

    a_ssh_client.close()

    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    ssh.options.track( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )


    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    print_d( a_call + '\n' )


    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass
def main() :
    #----------------------- Defining utility command-line interface -------------------------    
    import data_transfer_options
    import solver_start_options
    import data_seeding_options
    import openmpi_config_options

    an_usage_description = "%prog"
    an_usage_description += ec2.use.options.usage_description()
    an_usage_description += data_transfer_options.usage_description()
    an_usage_description += solver_start_options.usage_description()

    from cloudflu import VERSION
    a_version = "%s" % VERSION

    from optparse import IndentedHelpFormatter
    a_help_formatter = IndentedHelpFormatter( width = 127 )

    from optparse import OptionParser
    an_option_parser = OptionParser( usage = an_usage_description, version = a_version, formatter = a_help_formatter )


    #----------------------- Definition of the command line arguments ------------------------
    ec2.use.options.add( an_option_parser )
    
    data_transfer_options.add( an_option_parser )

    solver_start_options.add( an_option_parser )

    data_seeding_options.add( an_option_parser )

    common.concurrency_options.add( an_option_parser )

    openmpi_config_options.add( an_option_parser )

    amazon.security_options.add( an_option_parser )
    
    common.options.add( an_option_parser )
  
 
    #------------------ Extracting and verifying command-line arguments ----------------------
    an_options, an_args = an_option_parser.parse_args()
    
    an_enable_debug, a_log_file = common.options.extract( an_option_parser )
    
    AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY = amazon.security_options.extract( an_option_parser )
    
    a_cluster_id = ec2.use.options.extract( an_option_parser )

    a_hostfile = openmpi_config_options.extract( an_option_parser )

    a_number_threads = common.concurrency_options.extract( an_option_parser )

    an_upload_seed_size = data_seeding_options.extract( an_option_parser )

    a_case_dir, a_run_hook = solver_start_options.extract( an_option_parser )

    a_study_name = data_transfer_options.extract( an_option_parser )
    
    from cloudflu.preferences import get
    a_cluster_location = get( 'amazon.cluster.location' )
    a_host_port = int( get( 'amazon.cluster.host_port' ) )
    a_login_name = get( 'amazon.cluster.login_name' )


    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    import sys; an_engine = sys.argv[ 0 ]

    a_call = "%s %s %s %s %s" % ( an_engine,
                                  ec2.use.options.compose( a_cluster_id ),
                                  data_transfer_options.compose( a_study_name ), 
                                  solver_start_options.compose( a_case_dir, a_run_hook ),
                                  openmpi_config_options.compose( a_hostfile ) )
    print_d( a_call + '\n' )


    print_d( "\n----------------------- Running actual functionality ----------------------\n" )
    a_spent_time = Timer()

    an_ec2_conn = ec2.common.region_connect( a_cluster_location, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
    a_reservation = ec2.use.get_reservation( an_ec2_conn, a_cluster_id )
    print_d( '< %r > : %s\n' % ( a_reservation, a_reservation.instances ) )

    a_master_node = an_instance = a_reservation.instances[ 0 ]


    print_d( "\n--------------------- Uploading case data to S3 ---------------------------\n"  )
    import os; a_case_name = os.path.basename( a_case_dir )

    from cloudflu.amazon.s3 import TRootObject
    a_root_object = TRootObject.get( AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY )
    print_d( "a_root_object = %s\n" % a_root_object )
    
    import data_upload
    an_input_study = data_upload.entry_point( a_root_object, { a_case_dir : '' }, an_upload_seed_size, a_number_threads )

    an_input_study_name = an_input_study.name()
    print_d( "an_input_study_name = '%s'\n" % an_input_study_name )
    

    print_d( "\n------------------ Installing cloudflu to master node ----------------------\n"  )
    a_password = None
    a_host_name = an_instance.public_dns_name
    an_identity_file = ec2.run.get_identity_filepath( an_instance.key_name )
    
    ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
    
    an_instance2ssh = {}
    an_instance2ssh[ an_instance ] = a_ssh_client
    

    print_d( "\n------------- Downloading case data from S3 to the master node ------------\n"  )
    a_stdout_lines = ssh.command( a_ssh_client, 'python -c "import os, os.path, tempfile; print tempfile.mkdtemp()"' )
    a_working_dir = a_stdout_lines[ 0 ][ : -1 ]
    print_d( "a_working_dir = '%s'\n" % a_working_dir )

    ssh.command( a_ssh_client, "cloudflu-download --study-name=%s --output-dir=%s --remove" % ( an_input_study_name, a_working_dir ) )
    
    print_d( "\n--- Sharing the solver case folder for all the cluster nodes through NFS --\n" )
    ssh.command( a_ssh_client, "sudo sh -c 'echo %s *\(rw,no_root_squash,sync,subtree_check\) >> /etc/exports'" % ( a_working_dir ) )
    ssh.command( a_ssh_client, "sudo exportfs -a" ) # make changes effective on the running NFS server

    for an_instance in a_reservation.instances[ 1 : ] :
        a_host_name = an_instance.public_dns_name
        ssh.options.echo( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
        
        a_ssh_client = ssh.connect( a_password, an_identity_file, a_host_port, a_login_name, a_host_name )
        ssh.command( a_ssh_client, "mkdir -p %s" % ( a_working_dir ) )
        ssh.command( a_ssh_client, "sudo mount %s:%s %s" % ( a_master_node.private_ip_address, a_working_dir, a_working_dir ) )
        
        an_instance2ssh[ an_instance ] = a_ssh_client
        pass
    
    print_d( "\n------------------------ Get access the booked study ----------------------\n" )
    import study_book; 
    an_output_study = study_book.entry_point( a_root_object, a_study_name, True )


    print_d( "\n----------------------- Running of the solver case ------------------------\n" )
    a_num_nodes = len( a_reservation.instances )

    a_target_dir = os.path.join( a_working_dir, a_case_name )

    an_additional_args = "--study-name='%s' --booked --upload-seed-size=%d" % ( a_study_name, an_upload_seed_size )

    a_ssh_client = an_instance2ssh[ a_master_node ]
    ssh.command( a_ssh_client, "%s/%s %d '%s' %s" % ( a_target_dir, a_run_hook, a_num_nodes, a_hostfile, an_additional_args ) )

    an_output_study.seal()

    [ a_ssh_client.close() for a_ssh_client in an_instance2ssh.values() ]
    
    print_d( "a_spent_time = %s, sec\n" % a_spent_time )
    

    print_d( "\n------------------ Printing succussive pipeline arguments -----------------\n" )
    ec2.use.options.track( a_cluster_id )

    
    print_d( "\n--------------------------- Canonical substitution ------------------------\n" )
    print_d( a_call + '\n' )
    

    print_d( "\n-------------------------------------- OK ---------------------------------\n" )
    pass