示例#1
0
def write_cmd_file_to_remote( ssh, cmds, file_name_cmds ):
    """ Create text file on remote containing list of commands to execute
        SCP directly from memory, since this is a microservice need to assume no disk available 

    :param ssh: existing paramiko SSH connection
    :param cmds: list of commands to execute
    :param file_name_cmds: path/name.ext to save file remotely

    """
    cmds_string = '\n'.join(cmds)
    with create_named_in_memory_temp_file( cmds_string ) as temp_cmd_file:
        sftp_client = ssh.open_sftp()
        sftp_client.put( temp_cmd_file, file_name_cmds )
        sftp_client.close()
示例#2
0
def launch_remote_client( spot_batch_job_parm_table_name, spot_rsa_key_table_name, spot_request_item, region_name='us-east-1', profile_name=None ):
    """ SSH into remote client, SCP files to client, run script on client, return results

    :param spot_batch_job_parm_table_name: 
    :param spot_rsa_key_table_name: 
    :param spot_request_item: 
    :param region_name:  (Default value = 'us-east-1')
    :param profile_name:  (Default value = None)
    return: client_bootstrap_service_cmds_results, client_bootstrap_user_cmds_results

    """
    spot_batch_job_parm_item = get_batch_job_parm_item( spot_request_item[ TableSpotRequest.spot_master_uuid ], spot_batch_job_parm_table_name,  
                                                             region_name=region_name, profile_name=profile_name )
    batch_job_parm_item = BatchJobParmItem( stringParmFile=spot_batch_job_parm_item[ TableSpotBatchJobParm.raw_batch_job_parm_item ] )

    filename_client_parm_json = 'clientparm.json'
    filename_bootstrap_service_cmds = 'bootstrap_service_cmds'
    filename_bootstrap_user_cmds = 'bootstrap_user_cmds'
    cmd_client_launch = 'python -m awsspotbatch.client.clientlaunch ' + filename_client_parm_json + ' &'
    client_parm_json_string = create_client_parm_json_string( spot_request_item, batch_job_parm_item )
    # Get the RSA key, connect to remote instance and launch remote script
    rsa_key_item = get_rsa_key_item( spot_rsa_key_table_name, spot_request_item[ TableSpotRequest.spot_master_uuid ], region_name=region_name, profile_name=profile_name )
    kp_material_dec = decode( kp_enc_key, str( rsa_key_item[ TableSpotRSAKey.rsa_key_encoded ]) )
    key_file_obj = StringIO( kp_material_dec )
    pkey = paramiko.RSAKey.from_private_key( key_file_obj )
    
    instance_public_ip_address =  spot_request_item[ TableSpotRequest.instance_public_ip_address ]
    instance_username = spot_request_item[ TableSpotRequest.instance_username ]
    
    ssh = paramiko.SSHClient()
    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    ssh.connect( instance_public_ip_address, timeout=10, username=instance_username, pkey=pkey )
    
    # Bootstrap the system and user command
    client_bootstrap_service_primary_results = run_cmds(ssh, [batch_job_parm_item.client_bootstrap_service_primary] )
    if client_bootstrap_service_primary_results[0]['remote_exit_status'] > 2:
        logger.error( fmt_request_item_msg_hdr( spot_request_item ) + 'SEVERE ERROR: client_bootstrap_service_primary failed with remote_exit_status=' + 
                        str(client_bootstrap_service_primary_results[0]['remote_exit_status']) + 
                         ', buf_std_out' + str(client_bootstrap_service_primary_results[0]['buf_std_out'])+ 
                         ', buf_std_err' + str(client_bootstrap_service_primary_results[0]['buf_std_err'])  )
    write_cmd_file_to_remote( ssh, batch_job_parm_item.client_bootstrap_service_cmds, filename_bootstrap_service_cmds )
    client_bootstrap_service_cmds_results = run_cmds(ssh, ['python service/clientbootstrap.py ' + filename_bootstrap_service_cmds])
    write_cmd_file_to_remote( ssh, batch_job_parm_item.client_bootstrap_user_cmds, filename_bootstrap_user_cmds )
    client_bootstrap_user_cmds_results = run_cmds(ssh, ['python service/clientbootstrap.py ' + filename_bootstrap_user_cmds])
    
    ########################
    for cmd_result in client_bootstrap_service_cmds_results:
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '   service cmd: ' + cmd_result['cmd'])    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      remote_exit_status: ' + str(cmd_result['remote_exit_status']) )    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      buf_std_out: ' + cmd_result['buf_std_out'] )    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      buf_std_err: ' + cmd_result['buf_std_err'] )    
    for cmd_result in client_bootstrap_user_cmds_results:
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '   user cmd: ' + cmd_result['cmd'])    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      remote_exit_status: ' + str(cmd_result['remote_exit_status']) )    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      buf_std_out: ' + cmd_result['buf_std_out'] )    
        logger.info( fmt_request_item_msg_hdr( spot_request_item ) +  '      buf_std_err: ' + cmd_result['buf_std_err'] )    
    #########################   
    
    # put the parm file out to this instance - will have a different SpotRequestUUID and SpotRequestiID for each instance
    with create_named_in_memory_temp_file( client_parm_json_string ) as temp_parm_file:
        sftp_client = ssh.open_sftp()
        sftp_client.put( temp_parm_file, filename_client_parm_json )
        sftp_client.close()
    
    # write the user job parm item json file to disk - will be the same on every instance
    # user parm file is optional
    if TableSpotBatchJobParm.raw_user_job_parm_item in spot_batch_job_parm_item:
        user_job_parm_item_filename = 'userjobparmitem.json'
        user_job_parm_item_string = spot_batch_job_parm_item[ TableSpotBatchJobParm.raw_user_job_parm_item ]
        with create_named_in_memory_temp_file( user_job_parm_item_string ) as temp_user_job_parm_file:
            sftp_client = ssh.open_sftp()
            sftp_client.put( temp_user_job_parm_file, user_job_parm_item_filename )
            sftp_client.close()    
    
    # Don't wait for the clientlaunch to complete - it's running the users batch job and a monitor thread that sends SQS status msgs 
    chan = ssh._transport.open_session()   
    chan.exec_command( cmd_client_launch )
    
    ssh.close()

    return client_bootstrap_service_cmds_results, client_bootstrap_user_cmds_results