def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret found_files = find_files_by_dir(args.base_dir, args.pattern) print("Found {} dirs with files to combine".format(len(found_files))) for f_dir, files in found_files.items(): if not files: logger.warn("No files with pattern '{}' found for dir '{}'".format(args.pattern, f_dir)) continue combo_file = os.path.join(f_dir, args.target_file) if os.path.exists(combo_file) and not args.overwrite: warning("Target file already exists: '{}' \n" "Skipping dir '{}'".format(combo_file, f_dir)) continue combo = combine([os.path.join(f_dir, tgt) for tgt in files]) write_combo(extract_header(os.path.join(f_dir, files[0])), combo, combo_file) return GOOD_RET # success
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret kbt = calc_kbt(args.temp) if args.src_file is not None: proc_data = to_zero_point(calc_rad(args.src_file, kbt)) write_csv(proc_data, create_out_fname(args.src_file, prefix=OUT_PFX), RAD_KEY_SEQ) else: found_files = find_files_by_dir(args.base_dir, args.pattern) logger.debug("Found '{}' dirs with files to process".format(len(found_files))) # noinspection PyCompatibility for f_dir, files in found_files.iteritems(): if not files: logger.warn("No files found for dir '{}'".format(f_dir)) continue for pmf_path in ([os.path.join(f_dir, tgt) for tgt in files]): proc_data = to_zero_point(calc_rad(pmf_path, kbt)) f_name = create_out_fname(pmf_path, prefix=OUT_PFX) if allow_write(f_name, overwrite=args.overwrite): write_csv(proc_data, f_name, RAD_KEY_SEQ) return GOOD_RET # success
def main(argv=None): """ Runs the main program. @param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != 0: return ret if args.src_file is not None: proc_data = calc_for_wham(args.src_file) write_csv(proc_data, create_out_fname(args.src_file, prefix=OUT_PFX), COLVAR_WHAM_KEY_SEQ) else: found_files = find_files_by_dir(args.base_dir, args.pattern) logger.debug("Found '%d' dirs with files to process", len(found_files)) # noinspection PyCompatibility for f_dir, files in found_files.iteritems(): if not files: logger.warn("No files found for dir '%s'", f_dir) continue for colvar_path in ([os.path.join(f_dir, tgt) for tgt in files]): proc_data = calc_for_wham(colvar_path) f_name = create_out_fname(colvar_path, prefix=OUT_PFX) if allow_write(f_name, overwrite=args.overwrite): list_to_file([str(d['r']) for d in proc_data if 'r' in d], f_name) # write_csv(proc_data, f_name, COLVAR_WHAM_KEY_SEQ, extrasaction="ignore") return 0 # success
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret found_files = find_files_by_dir(args.base_dir, args.pattern) print("Found {} dirs with files to combine".format(len(found_files))) for f_dir, files in found_files.items(): if not files: logger.warn("No files with pattern '{}' found for dir '{}'".format( args.pattern, f_dir)) continue combo_file = os.path.join(f_dir, args.target_file) if os.path.exists(combo_file) and not args.overwrite: warning("Target file already exists: '{}' \n" "Skipping dir '{}'".format(combo_file, f_dir)) continue combo = combine([os.path.join(f_dir, tgt) for tgt in files]) write_combo(extract_header(os.path.join(f_dir, files[0])), combo, combo_file) return GOOD_RET # success
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret kbt = calc_kbt(args.temp) if args.src_file is not None: proc_data = to_zero_point(calc_rad(args.src_file, kbt)) write_csv(proc_data, create_out_fname(args.src_file, prefix=OUT_PFX), RAD_KEY_SEQ) else: found_files = find_files_by_dir(args.base_dir, args.pattern) logger.debug("Found '{}' dirs with files to process".format( len(found_files))) # noinspection PyCompatibility for f_dir, files in found_files.items(): if not files: logger.warn("No files found for dir '{}'".format(f_dir)) continue for pmf_path in ([os.path.join(f_dir, tgt) for tgt in files]): proc_data = to_zero_point(calc_rad(pmf_path, kbt)) f_name = create_out_fname(pmf_path, prefix=OUT_PFX) if allow_write(f_name, overwrite=args.overwrite): write_csv(proc_data, f_name, RAD_KEY_SEQ) return GOOD_RET # success
def main(argv=None): # Read input args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret cfg = args.config try: if args.src_file is not None: process_file(args.src_file, cfg, delimiter=args.delimiter) else: found_files = find_files_by_dir(args.base_dir, cfg[FILE_PAT]) # noinspection PyCompatibility for f_dir, files in list(found_files.items()): if not files: warning("No files found for dir '{}'".format(f_dir)) continue for csv_path in ([os.path.join(f_dir, tgt) for tgt in files]): process_file(csv_path, cfg, delimiter=args.delimiter) except IOError as e: warning("Problems reading file:", e) return IO_ERROR except (ValueError, InvalidDataError) as e: warning("Problems reading data:", e) return INVALID_DATA return GOOD_RET # success
def test_headers(self): test_dict = find_files_by_dir(FES_OUT_SINGLE, DEF_FILE_PAT) self.assertEqual(1, len(test_dict)) f_dir, files = test_dict.popitem() headers = extract_header(os.path.join(f_dir, files[0])) ref_headers = header_lines(HEADER_DIR) self.assertEqual(len(headers), len(ref_headers)) self.assertListEqual(ref_headers, headers)
def test_writer(self): test_dict = find_files_by_dir(FES_OUT_MULTI, DEF_FILE_PAT) self.assertEqual(1, len(test_dict)) f_dir, files = test_dict.popitem() combo = combine([os.path.join(f_dir, tgt) for tgt in files]) try: write_combo(extract_header(os.path.join(f_dir, files[0])), combo, FES_ALL_MULTI_FILE) self.assertEqual(map_fes(FES_ALL_MULTI), map_fes(FES_ALL_MULTI_FILE)) finally: os.remove(FES_ALL_MULTI_FILE)
def test_find(self): found = find_files_by_dir(FES_DIR, DEF_FILE_PAT) exp_data = expected_dir_data() self.assertEqual(len(exp_data), len(found)) for key, files in exp_data.items(): found_files = found.get(key) try: # noinspection PyUnresolvedReferences self.assertCountEqual(files, found_files) except AttributeError: self.assertItemsEqual(files, found_files)
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET: return ret kbt = calc_kbt(args.temp) if args.coord_ts is not None: logger.info("Read TS coordinate value: '{:8.3f}'".format(args.coord_ts)) try: if args.src_file is not None: file_data = read_csv(args.src_file, data_conv=KEY_CONV) f_base_name = os.path.basename(args.src_file) try: pka, cur_corr, cur_coord = calc_pka(file_data, kbt, args.coord_ts) result = [{SRC_KEY: f_base_name, PKA_KEY: pka, MAX_VAL: cur_corr, MAX_LOC: cur_coord}] except NoMaxError: result = [{SRC_KEY: f_base_name, PKA_KEY: NO_MAX_RET, MAX_VAL: NO_MAX_RET, MAX_LOC: NO_MAX_RET}] write_result(result, args.src_file, args.overwrite) else: found_files = find_files_by_dir(args.base_dir, args.pattern) logger.debug("Found '{}' dirs with files to process".format(len(found_files))) if len(found_files) == 0: raise IOError("No files found in specified directory '{}'".format(args.base_dir)) for f_dir, files in found_files.items(): results = [] for pmf_path, fname in ([(os.path.join(f_dir, tgt), tgt) for tgt in sorted(files)]): file_data = read_csv(pmf_path, data_conv=KEY_CONV) try: pka, cur_corr, cur_coord = calc_pka(file_data, kbt, args.coord_ts) results.append({SRC_KEY: fname, PKA_KEY: pka, MAX_VAL: cur_corr, MAX_LOC: cur_coord}) except NoMaxError: results.append({SRC_KEY: fname, PKA_KEY: NO_MAX_RET, MAX_VAL: NO_MAX_RET, MAX_LOC: NO_MAX_RET}) write_result(results, os.path.basename(f_dir), args.overwrite, basedir=os.path.dirname(f_dir)) except IOError as e: warning(e) return IO_ERROR return GOOD_RET # success
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET: return ret found_files = find_files_by_dir(args.base_dir, args.pattern) logger.debug("Found '{}' dirs with files to process".format(len(found_files))) for f_dir, files in found_files.items(): bin_pfx = bin_by_pattern(files) for pfx, bin_f in bin_pfx.items(): bin_results = calc_avg_stdev([os.path.join(f_dir, tgt) for tgt in bin_f]) avg_fname = OUT_FNAME_FMT.format(pfx) write_avg_stdev(bin_results, os.path.join(f_dir, avg_fname), overwrite=args.overwrite) return GOOD_RET # success
def main(argv=None): """ Runs the main program. :param argv: The command line arguments. :return: The return code for the program's termination. """ args, ret = parse_cmdline(argv) if ret != GOOD_RET or args is None: return ret try: for meta_dir, meta_files in find_files_by_dir(args.base_dir, args.pattern).items(): for meta_file in meta_files: block_average(os.path.join(meta_dir, meta_file), args.steps, overwrite=args.overwrite) except TemplateNotReadableError as e: warning(e) return INVALID_DATA return GOOD_RET # success
def test_multi(self): test_dict = find_files_by_dir(FES_OUT_MULTI, DEF_FILE_PAT) self.assertEqual(1, len(test_dict)) f_dir, files = test_dict.popitem() combo = combine([os.path.join(f_dir, tgt) for tgt in files]) self.assertDictEqual(map_fes(FES_ALL_MULTI)[1], combo)