def start_round_robin_reschedule(nodes, procs_2b_rescheduled, current_procs, settings, output_storage_settings, relative_path_suffix): total_nodes = len(nodes) all_nodes = list(nodes) processes = len(procs_2b_rescheduled) if total_nodes > processes: total_nodes = processes all_nodes = nodes[:total_nodes] if total_nodes == 0: return proc_per_node = processes / total_nodes remaining_procs = processes % total_nodes index = 0 new_processes = current_procs rescheduled_procs = list(procs_2b_rescheduled) for cur_node in all_nodes: logger.debug('Schedule here %s' % cur_node) ip_address = cur_node.ip_address if not ip_address: ip_address = cur_node.private_ip_address logger.debug('ip_address=%s' % ip_address) #relative_path = output_storage_settings['type'] + '@' + settings['payload_destination'] relative_path = output_storage_settings[ 'type'] + '@' + relative_path_suffix procs_on_cur_node = proc_per_node if remaining_procs: procs_on_cur_node = proc_per_node + 1 remaining_procs -= 1 logger.debug('procs_cur_node=%d' % procs_on_cur_node) ids = get_procs_ids(procs_on_cur_node, rescheduled_procs=rescheduled_procs) #index += len(ids) #logger.debug('index=%d' % index) put_proc_ids(relative_path, ids, ip_address, settings) new_processes = construct_lookup_table(ids, ip_address, new_processes, status='reschedule_ready', maximum_retry=int( settings['maximum_retry'])) destination = get_url_with_credentials(settings, relative_path, is_relative_path=True, ip_address=ip_address) logger.debug('schedule destination=%s' % destination) makefile_path = get_make_path(destination) logger.debug('makefile_path=%s' % makefile_path) command = "cd %s; make %s" % ( makefile_path, 'start_schedule PAYLOAD_NAME=%s IDS=%s' % (settings['payload_name'], settings['filename_for_PIDs'])) command_out = '' errs = '' logger.debug("starting command for %s" % ip_address) try: ssh = open_connection(ip_address=ip_address, settings=settings) command_out, errs = run_command_with_status(ssh, command) except Exception, e: logger.error(e) finally:
def start_round_robin_reschedule(nodes, procs_2b_rescheduled, current_procs, settings, output_storage_settings, relative_path_suffix): total_nodes = len(nodes) all_nodes = list(nodes) processes = len(procs_2b_rescheduled) if total_nodes > processes: total_nodes = processes all_nodes = nodes[:total_nodes] if total_nodes == 0: return proc_per_node = processes / total_nodes remaining_procs = processes % total_nodes index = 0 new_processes = current_procs rescheduled_procs = list(procs_2b_rescheduled) for cur_node in all_nodes: logger.debug('Schedule here %s' % cur_node) ip_address = cur_node.ip_address if not ip_address: ip_address = cur_node.private_ip_address logger.debug('ip_address=%s' % ip_address) #relative_path = output_storage_settings['type'] + '@' + settings['payload_destination'] relative_path = output_storage_settings['type'] + '@' + relative_path_suffix procs_on_cur_node = proc_per_node if remaining_procs: procs_on_cur_node = proc_per_node + 1 remaining_procs -= 1 logger.debug('procs_cur_node=%d' % procs_on_cur_node) ids = get_procs_ids(procs_on_cur_node, rescheduled_procs=rescheduled_procs) #index += len(ids) #logger.debug('index=%d' % index) put_proc_ids(relative_path, ids, ip_address, settings) new_processes = construct_lookup_table( ids, ip_address, new_processes, status='reschedule_ready', maximum_retry=int(settings['maximum_retry'])) destination = get_url_with_credentials(settings, relative_path, is_relative_path=True, ip_address=ip_address) logger.debug('schedule destination=%s' % destination) makefile_path = get_make_path(destination) logger.debug('makefile_path=%s' % makefile_path) command = "cd %s; make %s" % (makefile_path, 'start_schedule %s %s %s' % (settings['payload_name'], settings['filename_for_PIDs'], settings['process_output_dirname'], settings['smart_connector_input'])) command_out = '' errs = '' logger.debug("starting command for %s" % ip_address) try: ssh = open_connection(ip_address=ip_address, settings=settings) command_out, errs = run_command_with_status(ssh, command) except Exception, e: logger.error(e) finally:
def start_round_robin_schedule(nodes, processes, schedule_index, settings, relative_path_suffix): total_nodes = len(nodes) all_nodes = list(nodes) if total_nodes > processes: total_nodes = processes all_nodes = nodes[:total_nodes] if total_nodes == 0: return proc_per_node = processes / total_nodes remaining_procs = processes % total_nodes index = schedule_index new_processes = [] for cur_node in all_nodes: ip_address = cur_node[1] #relative_path = settings['type'] + '@' + settings['payload_destination'] relative_path = settings['type'] + '@' + relative_path_suffix procs_on_cur_node = proc_per_node if remaining_procs: procs_on_cur_node = proc_per_node + 1 remaining_procs -= 1 logger.debug('procs_cur_node=%d' % procs_on_cur_node) ids = get_procs_ids(procs_on_cur_node, index=index) index += len(ids) logger.debug('index=%d' % index) put_proc_ids(relative_path, ids, ip_address, settings) new_processes = construct_lookup_table( ids, ip_address, new_processes, maximum_retry=int(settings['maximum_retry'])) destination = get_url_with_credentials( settings, relative_path, is_relative_path=True, ip_address=ip_address) logger.debug('schedule destination=%s' % destination) makefile_path = get_make_path(destination) logger.debug('makefile_path=%s' % makefile_path) command = "cd %s; make %s" % (makefile_path, 'start_schedule %s %s %s %s' % ( settings['payload_name'], settings['filename_for_PIDs'], settings['process_output_dirname'], settings['smart_connector_input'])) command_out = '' errs = '' logger.debug("starting command for %s" % ip_address) try: ssh = open_connection(ip_address=ip_address, settings=settings) command_out, errs = run_command_with_status(ssh, command) except Exception, e: logger.error(e) finally:
def start_round_robin_schedule(nodes, processes, schedule_index, settings, relative_path_suffix): total_nodes = len(nodes) all_nodes = list(nodes) if total_nodes > processes: total_nodes = processes all_nodes = nodes[:total_nodes] if total_nodes == 0: return proc_per_node = processes / total_nodes remaining_procs = processes % total_nodes index = schedule_index new_processes = [] for cur_node in all_nodes: ip_address = cur_node[1] #relative_path = settings['type'] + '@' + settings['payload_destination'] relative_path = settings['type'] + '@' + relative_path_suffix procs_on_cur_node = proc_per_node if remaining_procs: procs_on_cur_node = proc_per_node + 1 remaining_procs -= 1 logger.debug('procs_cur_node=%d' % procs_on_cur_node) ids = get_procs_ids(procs_on_cur_node, index=index) index += len(ids) logger.debug('index=%d' % index) put_proc_ids(relative_path, ids, ip_address, settings) new_processes = construct_lookup_table( ids, ip_address, new_processes, maximum_retry=int(settings['maximum_retry'])) destination = get_url_with_credentials( settings, relative_path, is_relative_path=True, ip_address=ip_address) logger.debug('schedule destination=%s' % destination) makefile_path = get_make_path(destination) logger.debug('makefile_path=%s' % makefile_path) command = "cd %s; make %s" % (makefile_path, 'start_schedule PAYLOAD_NAME=%s IDS=%s' % ( settings['payload_name'], settings['filename_for_PIDs'])) command_out = '' errs = '' logger.debug("starting command for %s" % ip_address) try: ssh = open_connection(ip_address=ip_address, settings=settings) command_out, errs = run_command_with_status(ssh, command) except Exception, e: logger.error(e) finally:
def _is_schedule_complete(ip, settings, destination): """ Return True if package job on instance_id has is_job_finished """ ssh = open_connection(ip_address=ip, settings=settings) makefile_path = get_make_path(destination) command = "cd %s; make %s" % (makefile_path, 'schedule_done IDS=%s' % (settings['filename_for_PIDs'])) command_out, _ = run_command_with_status(ssh, command) logger.debug('command=%s' % command) if command_out: logger.debug("command_out = %s" % command_out) for line in command_out: if 'All processes are scheduled' in line: return True return False
def _is_schedule_complete(ip, settings, destination): """ Return True if package job on instance_id has is_job_finished """ ssh = open_connection(ip_address=ip, settings=settings) makefile_path = get_make_path(destination) command = "cd %s; make %s" % (makefile_path, 'schedule_done IDS=%s' % ( settings['filename_for_PIDs'])) command_out, _ = run_command_with_status(ssh, command) logger.debug('command=%s' % command) if command_out: logger.debug("command_out = %s" % command_out) for line in command_out: if 'All processes are scheduled' in line: return True return False
def _start_bootstrap(instance, ip, settings, source, destination): """ Start the task on the instance, then return """ logger.info("run_task %s" % str(instance)) copy_directories(source, destination) makefile_path = get_make_path(destination) # TODO, FIXME: need to have timeout for yum install make # and then test can access, otherwise, loop. install_make = 'yum install -y make' command_out = '' errs = '' logger.debug("starting command for %s" % ip) ssh = '' try: ssh = open_connection(ip_address=ip, settings=settings) command_out, errs = run_command_with_status(ssh, install_make) logger.debug("command_out1=(%s, %s)" % (command_out, errs)) run_make(ssh, makefile_path, 'start_bootstrap') except Exception, e:#fixme: consider using reliability framework logger.error(e) raise
def _start_bootstrap(instance, ip, settings, source, destination): """ Start the task on the instance, then return """ logger.info("run_task %s" % str(instance)) copy_directories(source, destination) makefile_path = get_make_path(destination) # TODO, FIXME: need to have timeout for yum install make # and then test can access, otherwise, loop. install_make = 'yum install -y make' command_out = '' errs = '' logger.debug("starting command for %s" % ip) ssh = '' try: ssh = open_connection(ip_address=ip, settings=settings) command_out, errs = run_command_with_status(ssh, install_make) logger.debug("command_out1=(%s, %s)" % (command_out, errs)) run_make(ssh, makefile_path, 'start_bootstrap') except Exception, e: #fixme: consider using reliability framework logger.error(e) raise
def generate_rfs_key(parameters): key_generated = True message = 'Key generated successfully' password = '' if 'password' in parameters.keys(): password = parameters['password'] ssh_settings = {'username': parameters['username'], 'port': parameters['port'], 'password': password} storage_settings = {'params': ssh_settings, 'host': parameters['ip_address'], 'root': "/"} bdp_root_path = storage.get_bdp_root_path() key_name_org = os.path.splitext(os.path.basename(parameters['private_key_path']))[0] key_name = key_name_org private_key_absolute_path = os.path.join(bdp_root_path, parameters['private_key_path']) key_dir = os.path.dirname(private_key_absolute_path) if not os.path.exists(key_dir): os.makedirs(key_dir) counter = 1 while os.path.exists(os.path.join(key_dir, key_name)): key_name = '%s_%d' % (key_name_org, counter) counter += 1 parameters['private_key_path'] = os.path.join(os.path.dirname( parameters['private_key_path']), key_name) private_key_absolute_path = os.path.join(bdp_root_path, parameters['private_key_path']) public_key_absolute_path = '%s.pub' % private_key_absolute_path remote_key_path = os.path.join(parameters['home_path'], '.ssh', ('%s.pub' % key_name)) authorized_remote_path = os.path.join(parameters['home_path'], '.ssh', 'authorized_keys') try: private_key = paramiko.RSAKey.generate(1024) private_key.write_private_key_file(private_key_absolute_path) public_key = paramiko.RSAKey(filename=private_key_absolute_path) public_key_content = '%s %s' % (public_key.get_name(), public_key.get_base64()) f = open(public_key_absolute_path, 'w') f.write("\n%s\n" % public_key_content) f.close() fs = storage.RemoteStorage(settings=storage_settings) fs.save(remote_key_path, ContentFile(public_key_content)) ssh_client = open_connection(parameters['ip_address'], ssh_settings) #command = 'cat %s >> %s' % (remote_key_path, authorized_remote_path) space = " " command = 'echo %s >> %s; echo %s >> %s; echo %s >> %s; chmod 600 %s' % ( space, authorized_remote_path, public_key_content, authorized_remote_path, space, authorized_remote_path, authorized_remote_path) command_out, errs = run_command_with_status(ssh_client, command) if errs: if 'Permission denied' in errs: key_generated = False message = 'Permission denied to copy public key to %s/.ssh/authorized_keys' % parameters['home_path'] else: raise IOError except AuthError: key_generated = False message = 'Unauthorized access to %s' % parameters['ip_address'] except socket.gaierror, e: key_generated = False if 'Name or service not known' in e: message = 'Unknown IP address [%s]' % parameters['ip_address'] else: message = '[%s]: %s, %s' % (parameters['ip_address'], e.__doc__, e.strerror)
def generate_unix_key(parameters): key_generated = True message = 'Key generated successfully' password = '' if 'password' in parameters.keys(): password = parameters['password'] ssh_settings = {'username': parameters['username'], 'password': password} storage_settings = { 'params': ssh_settings, 'host': parameters['ip_address'], 'root': "/" } bdp_root_path = storage.get_bdp_root_path() key_name_org = os.path.splitext( os.path.basename(parameters['private_key_path']))[0] key_name = key_name_org private_key_absolute_path = os.path.join(bdp_root_path, parameters['private_key_path']) key_dir = os.path.dirname(private_key_absolute_path) if not os.path.exists(key_dir): os.makedirs(key_dir) counter = 1 while os.path.exists(os.path.join(key_dir, key_name)): key_name = '%s_%d' % (key_name_org, counter) counter += 1 parameters['private_key_path'] = os.path.join( os.path.dirname(parameters['private_key_path']), key_name) private_key_absolute_path = os.path.join(bdp_root_path, parameters['private_key_path']) public_key_absolute_path = '%s.pub' % private_key_absolute_path remote_key_path = os.path.join(parameters['home_path'], '.ssh', ('%s.pub' % key_name)) authorized_remote_path = os.path.join(parameters['home_path'], '.ssh', 'authorized_keys') try: private_key = paramiko.RSAKey.generate(1024) private_key.write_private_key_file(private_key_absolute_path) public_key = paramiko.RSAKey(filename=private_key_absolute_path) public_key_content = '%s %s' % (public_key.get_name(), public_key.get_base64()) f = open(public_key_absolute_path, 'w') f.write("\n%s\n" % public_key_content) f.close() fs = storage.RemoteStorage(settings=storage_settings) fs.save(remote_key_path, ContentFile(public_key_content)) ssh_client = open_connection(parameters['ip_address'], ssh_settings) #command = 'cat %s >> %s' % (remote_key_path, authorized_remote_path) space = " " command = 'echo %s >> %s; echo %s >> %s; echo %s >> %s; chmod 600 %s' % ( space, authorized_remote_path, public_key_content, authorized_remote_path, space, authorized_remote_path, authorized_remote_path) command_out, errs = run_command_with_status(ssh_client, command) if errs: if 'Permission denied' in errs: key_generated = False message = 'Permission denied to copy public key to %s/.ssh/authorized_keys' % parameters[ 'home_path'] else: raise IOError except AuthError: key_generated = False message = 'Unauthorized access to %s' % parameters['ip_address'] except socket.gaierror, e: key_generated = False if 'Name or service not known' in e: message = 'Unknown IP address [%s]' % parameters['ip_address'] else: message = '[%s]: %s, %s' % (parameters['ip_address'], e.__doc__, e.strerror)