def deploy_sensors(node_sensor_config): # check that path already exists copy_paths(node_sensor_config.conn, paths) with node_sensor_config.conn.open_sftp() as sftp: sensors_config = node_sensor_config.sensors.copy() sensors_config['source_id'] = node_sensor_config.source_id save_to_remote(sftp, config_remote_path, json.dumps(sensors_config))
def do_run(self, node, barrier, fio_cfg, pos, nolog=False): if self.use_sudo: sudo = "sudo " else: sudo = "" bash_file = "#!/bin/bash\n" + \ "cd {exec_folder}\n" + \ "{fio_path}fio --output-format=json --output={out_file} " + \ "--alloc-size=262144 {job_file} " + \ " >{err_out_file} 2>&1 \n" + \ "echo $? >{res_code_file}\n" exec_folder = self.config.remote_dir if self.use_system_fio: fio_path = "" else: if not exec_folder.endswith("/"): fio_path = exec_folder + "/" else: fio_path = exec_folder bash_file = bash_file.format(out_file=self.results_file, job_file=self.task_file, err_out_file=self.err_out_file, res_code_file=self.exit_code_file, exec_folder=exec_folder, fio_path=fio_path) with node.connection.open_sftp() as sftp: save_to_remote(sftp, self.task_file, str(fio_cfg)) save_to_remote(sftp, self.sh_file, bash_file) exec_time = execution_time(fio_cfg) timeout = int(exec_time + max(300, exec_time)) soft_tout = exec_time begin = time.time() fnames_before = run_on_node(node)("ls -1 " + exec_folder, nolog=True) barrier.wait() task = BGSSHTask(node, self.use_sudo) task.start(sudo + "bash " + self.sh_file) while True: try: task.wait(soft_tout, timeout) break except paramiko.SSHException: pass try: node.connection.close() except: pass reconnect(node.connection, node.conn_url) end = time.time() rossh = run_on_node(node) fnames_after = rossh("ls -1 " + exec_folder, nolog=True) conn_id = node.get_conn_id().replace(":", "_") if not nolog: logger.debug("Test on node {0} is finished".format(conn_id)) log_files_pref = [] if 'write_lat_log' in fio_cfg.vals: fname = fio_cfg.vals['write_lat_log'] log_files_pref.append(fname + '_clat') log_files_pref.append(fname + '_lat') log_files_pref.append(fname + '_slat') if 'write_iops_log' in fio_cfg.vals: fname = fio_cfg.vals['write_iops_log'] log_files_pref.append(fname + '_iops') if 'write_bw_log' in fio_cfg.vals: fname = fio_cfg.vals['write_bw_log'] log_files_pref.append(fname + '_bw') files = collections.defaultdict(lambda: []) all_files = [os.path.basename(self.results_file)] new_files = set(fnames_after.split()) - set(fnames_before.split()) for fname in new_files: if fname.endswith('.log') and fname.split('.')[0] in log_files_pref: name, _ = os.path.splitext(fname) if fname.count('.') == 1: tp = name.split("_")[-1] cnt = 0 else: tp_cnt = name.split("_")[-1] tp, cnt = tp_cnt.split('.') files[tp].append((int(cnt), fname)) all_files.append(fname) arch_name = self.join_remote('wally_result.tar.gz') tmp_dir = os.path.join(self.config.log_directory, 'tmp_' + conn_id) os.mkdir(tmp_dir) loc_arch_name = os.path.join(tmp_dir, 'wally_result.{0}.tar.gz'.format(conn_id)) file_full_names = " ".join(all_files) try: os.unlink(loc_arch_name) except: pass with node.connection.open_sftp() as sftp: exit_code = read_from_remote(sftp, self.exit_code_file) err_out = read_from_remote(sftp, self.err_out_file) exit_code = exit_code.strip() if exit_code != '0': msg = "fio exit with code {0}: {1}".format(exit_code, err_out) logger.critical(msg.strip()) raise StopTestError("fio failed") rossh("rm -f {0}".format(arch_name), nolog=True) pack_files_cmd = "cd {0} ; tar zcvf {1} {2}".format(exec_folder, arch_name, file_full_names) rossh(pack_files_cmd, nolog=True) sftp.get(arch_name, loc_arch_name) unpack_files_cmd = "cd {0} ; tar xvzf {1} >/dev/null".format(tmp_dir, loc_arch_name) subprocess.check_call(unpack_files_cmd, shell=True) os.unlink(loc_arch_name) for ftype, fls in files.items(): for idx, fname in fls: cname = os.path.join(tmp_dir, fname) loc_fname = "{0}_{1}_{2}.{3}.log".format(pos, conn_id, ftype, idx) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) cname = os.path.join(tmp_dir, os.path.basename(self.results_file)) loc_fname = "{0}_{1}_rawres.json".format(pos, conn_id) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) os.rmdir(tmp_dir) remove_remote_res_files_cmd = "cd {0} ; rm -f {1} {2}".format(exec_folder, arch_name, file_full_names) rossh(remove_remote_res_files_cmd, nolog=True) return begin, end
def do_run(self, node, barrier, fio_cfg, pos, nolog=False): if self.use_sudo: sudo = "sudo " else: sudo = "" bash_file = """ #!/bin/bash function get_dev() {{ if [ -b "$1" ] ; then echo $1 else echo $(df "$1" | tail -1 | awk '{{print $1}}') fi }} function log_io_activiti(){{ local dest="$1" local dev=$(get_dev "$2") local sleep_time="$3" dev=$(basename "$dev") echo $dev for (( ; ; )) ; do grep -E "\\b$dev\\b" /proc/diskstats >> "$dest" sleep $sleep_time done }} sync cd {exec_folder} log_io_activiti {io_log_file} {test_file} 1 & local pid="$!" {fio_path}fio --output-format=json --output={out_file} --alloc-size=262144 {job_file} >{err_out_file} 2>&1 echo $? >{res_code_file} kill -9 $pid """ exec_folder = self.config.remote_dir if self.use_system_fio: fio_path = "" else: if not exec_folder.endswith("/"): fio_path = exec_folder + "/" else: fio_path = exec_folder bash_file = bash_file.format(out_file=self.results_file, job_file=self.task_file, err_out_file=self.err_out_file, res_code_file=self.exit_code_file, exec_folder=exec_folder, fio_path=fio_path, test_file=self.config_params['FILENAME'], io_log_file=self.io_log_file).strip() with node.connection.open_sftp() as sftp: save_to_remote(sftp, self.task_file, str(fio_cfg)) save_to_remote(sftp, self.sh_file, bash_file) exec_time = execution_time(fio_cfg) timeout = int(exec_time + max(300, exec_time)) soft_tout = exec_time begin = time.time() fnames_before = run_on_node(node)("ls -1 " + exec_folder, nolog=True) barrier.wait() task = BGSSHTask(node, self.use_sudo) task.start(sudo + "bash " + self.sh_file) while True: try: task.wait(soft_tout, timeout) break except paramiko.SSHException: pass try: node.connection.close() except: pass reconnect(node.connection, node.conn_url) end = time.time() rossh = run_on_node(node) fnames_after = rossh("ls -1 " + exec_folder, nolog=True) conn_id = node.get_conn_id().replace(":", "_") if not nolog: logger.debug("Test on node {0} is finished".format(conn_id)) log_files_pref = [] if 'write_lat_log' in fio_cfg.vals: fname = fio_cfg.vals['write_lat_log'] log_files_pref.append(fname + '_clat') log_files_pref.append(fname + '_lat') log_files_pref.append(fname + '_slat') if 'write_iops_log' in fio_cfg.vals: fname = fio_cfg.vals['write_iops_log'] log_files_pref.append(fname + '_iops') if 'write_bw_log' in fio_cfg.vals: fname = fio_cfg.vals['write_bw_log'] log_files_pref.append(fname + '_bw') files = collections.defaultdict(lambda: []) all_files = [os.path.basename(self.results_file)] new_files = set(fnames_after.split()) - set(fnames_before.split()) for fname in new_files: if fname.endswith('.log') and fname.split( '.')[0] in log_files_pref: name, _ = os.path.splitext(fname) if fname.count('.') == 1: tp = name.split("_")[-1] cnt = 0 else: tp_cnt = name.split("_")[-1] tp, cnt = tp_cnt.split('.') files[tp].append((int(cnt), fname)) all_files.append(fname) elif fname == os.path.basename(self.io_log_file): files['iops'].append(('sys', fname)) all_files.append(fname) arch_name = self.join_remote('wally_result.tar.gz') tmp_dir = os.path.join(self.config.log_directory, 'tmp_' + conn_id) if os.path.exists(tmp_dir): shutil.rmtree(tmp_dir) os.mkdir(tmp_dir) loc_arch_name = os.path.join(tmp_dir, 'wally_result.{0}.tar.gz'.format(conn_id)) file_full_names = " ".join(all_files) try: os.unlink(loc_arch_name) except: pass with node.connection.open_sftp() as sftp: try: exit_code = read_from_remote(sftp, self.exit_code_file) except IOError: logger.error( "No exit code file found on %s. Looks like process failed to start", conn_id) return None err_out = read_from_remote(sftp, self.err_out_file) exit_code = exit_code.strip() if exit_code != '0': msg = "fio exit with code {0}: {1}".format(exit_code, err_out) logger.critical(msg.strip()) raise StopTestError("fio failed") rossh("rm -f {0}".format(arch_name), nolog=True) pack_files_cmd = "cd {0} ; tar zcvf {1} {2}".format( exec_folder, arch_name, file_full_names) rossh(pack_files_cmd, nolog=True) sftp.get(arch_name, loc_arch_name) unpack_files_cmd = "cd {0} ; tar xvzf {1} >/dev/null".format( tmp_dir, loc_arch_name) subprocess.check_call(unpack_files_cmd, shell=True) os.unlink(loc_arch_name) for ftype, fls in files.items(): for idx, fname in fls: cname = os.path.join(tmp_dir, fname) loc_fname = "{0}_{1}_{2}.{3}.log".format( pos, conn_id, ftype, idx) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) cname = os.path.join(tmp_dir, os.path.basename(self.results_file)) loc_fname = "{0}_{1}_rawres.json".format(pos, conn_id) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) os.rmdir(tmp_dir) remove_remote_res_files_cmd = "cd {0} ; rm -f {1} {2}".format( exec_folder, arch_name, file_full_names) rossh(remove_remote_res_files_cmd, nolog=True) return begin, end
def do_run(self, node, barrier, fio_cfg, pos, nolog=False): if self.use_sudo: sudo = "sudo " else: sudo = "" bash_file = """ #!/bin/bash function get_dev() {{ if [ -b "$1" ] ; then echo $1 else echo $(df "$1" | tail -1 | awk '{{print $1}}') fi }} function log_io_activiti(){{ local dest="$1" local dev=$(get_dev "$2") local sleep_time="$3" dev=$(basename "$dev") echo $dev for (( ; ; )) ; do grep -E "\\b$dev\\b" /proc/diskstats >> "$dest" sleep $sleep_time done }} sync cd {exec_folder} log_io_activiti {io_log_file} {test_file} 1 & local pid="$!" {fio_path}fio --output-format=json --output={out_file} --alloc-size=262144 {job_file} >{err_out_file} 2>&1 echo $? >{res_code_file} kill -9 $pid """ exec_folder = self.config.remote_dir if self.use_system_fio: fio_path = "" else: if not exec_folder.endswith("/"): fio_path = exec_folder + "/" else: fio_path = exec_folder bash_file = bash_file.format(out_file=self.results_file, job_file=self.task_file, err_out_file=self.err_out_file, res_code_file=self.exit_code_file, exec_folder=exec_folder, fio_path=fio_path, test_file=self.config_params['FILENAME'], io_log_file=self.io_log_file).strip() with node.connection.open_sftp() as sftp: save_to_remote(sftp, self.task_file, str(fio_cfg)) save_to_remote(sftp, self.sh_file, bash_file) exec_time = execution_time(fio_cfg) timeout = int(exec_time + max(300, exec_time)) soft_tout = exec_time begin = time.time() fnames_before = run_on_node(node)("ls -1 " + exec_folder, nolog=True) barrier.wait() task = BGSSHTask(node, self.use_sudo) task.start(sudo + "bash " + self.sh_file) while True: try: task.wait(soft_tout, timeout) break except paramiko.SSHException: pass try: node.connection.close() except: pass reconnect(node.connection, node.conn_url) end = time.time() rossh = run_on_node(node) fnames_after = rossh("ls -1 " + exec_folder, nolog=True) conn_id = node.get_conn_id().replace(":", "_") if not nolog: logger.debug("Test on node {0} is finished".format(conn_id)) log_files_pref = [] if 'write_lat_log' in fio_cfg.vals: fname = fio_cfg.vals['write_lat_log'] log_files_pref.append(fname + '_clat') log_files_pref.append(fname + '_lat') log_files_pref.append(fname + '_slat') if 'write_iops_log' in fio_cfg.vals: fname = fio_cfg.vals['write_iops_log'] log_files_pref.append(fname + '_iops') if 'write_bw_log' in fio_cfg.vals: fname = fio_cfg.vals['write_bw_log'] log_files_pref.append(fname + '_bw') files = collections.defaultdict(lambda: []) all_files = [os.path.basename(self.results_file)] new_files = set(fnames_after.split()) - set(fnames_before.split()) for fname in new_files: if fname.endswith('.log') and fname.split('.')[0] in log_files_pref: name, _ = os.path.splitext(fname) if fname.count('.') == 1: tp = name.split("_")[-1] cnt = 0 else: tp_cnt = name.split("_")[-1] tp, cnt = tp_cnt.split('.') files[tp].append((int(cnt), fname)) all_files.append(fname) elif fname == os.path.basename(self.io_log_file): files['iops'].append(('sys', fname)) all_files.append(fname) arch_name = self.join_remote('wally_result.tar.gz') tmp_dir = os.path.join(self.config.log_directory, 'tmp_' + conn_id) if os.path.exists(tmp_dir): shutil.rmtree(tmp_dir) os.mkdir(tmp_dir) loc_arch_name = os.path.join(tmp_dir, 'wally_result.{0}.tar.gz'.format(conn_id)) file_full_names = " ".join(all_files) try: os.unlink(loc_arch_name) except: pass with node.connection.open_sftp() as sftp: try: exit_code = read_from_remote(sftp, self.exit_code_file) except IOError: logger.error("No exit code file found on %s. Looks like process failed to start", conn_id) return None err_out = read_from_remote(sftp, self.err_out_file) exit_code = exit_code.strip() if exit_code != '0': msg = "fio exit with code {0}: {1}".format(exit_code, err_out) logger.critical(msg.strip()) raise StopTestError("fio failed") rossh("rm -f {0}".format(arch_name), nolog=True) pack_files_cmd = "cd {0} ; tar zcvf {1} {2}".format(exec_folder, arch_name, file_full_names) rossh(pack_files_cmd, nolog=True) sftp.get(arch_name, loc_arch_name) unpack_files_cmd = "cd {0} ; tar xvzf {1} >/dev/null".format(tmp_dir, loc_arch_name) subprocess.check_call(unpack_files_cmd, shell=True) os.unlink(loc_arch_name) for ftype, fls in files.items(): for idx, fname in fls: cname = os.path.join(tmp_dir, fname) loc_fname = "{0}_{1}_{2}.{3}.log".format(pos, conn_id, ftype, idx) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) cname = os.path.join(tmp_dir, os.path.basename(self.results_file)) loc_fname = "{0}_{1}_rawres.json".format(pos, conn_id) loc_path = os.path.join(self.config.log_directory, loc_fname) os.rename(cname, loc_path) os.rmdir(tmp_dir) remove_remote_res_files_cmd = "cd {0} ; rm -f {1} {2}".format(exec_folder, arch_name, file_full_names) rossh(remove_remote_res_files_cmd, nolog=True) return begin, end