def fix_paths(site_pkg_dir, cov_data_file): site_pkg_dir = os.path.abspath(site_pkg_dir) paths = PathAliases() paths.add(site_pkg_dir, "src") old_cov_file = NamedTemporaryFile() old_cov_file.close() shutil.move(cov_data_file, old_cov_file.name) old_coverage_data = CoverageData(old_cov_file.name) old_coverage_data.read() new_coverage_data = CoverageData(cov_data_file) new_coverage_data.update(old_coverage_data, aliases=paths) new_coverage_data.write()
def debug_data_file(filename): """Implementation of 'coverage debug data'.""" data = CoverageData(filename) filename = data.data_filename() print(f"path: {filename}") if not os.path.exists(filename): print("No data collected: file doesn't exist") return data.read() print(f"has_arcs: {data.has_arcs()!r}") summary = line_counts(data, fullpath=True) filenames = human_sorted(summary.keys()) nfiles = len(filenames) print(f"{nfiles} file{plural(nfiles)}:") for f in filenames: line = f"{f}: {summary[f]} line{plural(summary[f])}" plugin = data.file_tracer(f) if plugin: line += f" [{plugin}]" print(line)
def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, keep=False): """Combine a number of data files together. Treat `data.filename` as a file prefix, and combine the data from all of the data files starting with that prefix plus a dot. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. If `data_paths` is provided, it is a list of directories or files to combine. Directories are searched for files that start with `data.filename` plus dot as a prefix, and those files are combined. If `data_paths` is not provided, then the directory portion of `data.filename` is used as the directory to search for data files. Unless `keep` is True every data file found and combined is then deleted from disk. If a file cannot be read, a warning will be issued, and the file will not be deleted. If `strict` is true, and no files are found to combine, an error is raised. """ # Because of the os.path.abspath in the constructor, data_dir will # never be an empty string. data_dir, local = os.path.split(data.base_filename()) localdot = local + '.*' data_paths = data_paths or [data_dir] files_to_combine = [] for p in data_paths: if os.path.isfile(p): files_to_combine.append(os.path.abspath(p)) elif os.path.isdir(p): pattern = os.path.join(os.path.abspath(p), localdot) files_to_combine.extend(glob.glob(pattern)) else: raise CoverageException("Couldn't combine from non-existent path '%s'" % (p,)) if strict and not files_to_combine: raise CoverageException("No data to combine") files_combined = 0 for f in files_to_combine: if f == data.data_filename(): # Sometimes we are combining into a file which is one of the # parallel files. Skip that file. if data._debug.should('dataio'): data._debug.write("Skipping combining ourself: %r" % (f,)) continue if data._debug.should('dataio'): data._debug.write("Combining data file %r" % (f,)) try: new_data = CoverageData(f, debug=data._debug) new_data.read() except CoverageException as exc: if data._warn: # The CoverageException has the file name in it, so just # use the message as the warning. data._warn(str(exc)) else: data.update(new_data, aliases=aliases) files_combined += 1 if not keep: if data._debug.should('dataio'): data._debug.write("Deleting combined data file %r" % (f,)) file_be_gone(f) if strict and not files_combined: raise CoverageException("No usable data files")
def combine_parallel_data( data, aliases=None, data_paths=None, strict=False, keep=False, message=None, ): """Combine a number of data files together. `data` is a CoverageData. Treat `data.filename` as a file prefix, and combine the data from all of the data files starting with that prefix plus a dot. If `aliases` is provided, it's a `PathAliases` object that is used to re-map paths to match the local machine's. If `data_paths` is provided, it is a list of directories or files to combine. Directories are searched for files that start with `data.filename` plus dot as a prefix, and those files are combined. If `data_paths` is not provided, then the directory portion of `data.filename` is used as the directory to search for data files. Unless `keep` is True every data file found and combined is then deleted from disk. If a file cannot be read, a warning will be issued, and the file will not be deleted. If `strict` is true, and no files are found to combine, an error is raised. """ files_to_combine = combinable_files(data.base_filename(), data_paths) if strict and not files_to_combine: raise NoDataError("No data to combine") files_combined = 0 for f in files_to_combine: if f == data.data_filename(): # Sometimes we are combining into a file which is one of the # parallel files. Skip that file. if data._debug.should('dataio'): data._debug.write(f"Skipping combining ourself: {f!r}") continue if data._debug.should('dataio'): data._debug.write(f"Combining data file {f!r}") try: new_data = CoverageData(f, debug=data._debug) new_data.read() except CoverageException as exc: if data._warn: # The CoverageException has the file name in it, so just # use the message as the warning. data._warn(str(exc)) else: data.update(new_data, aliases=aliases) files_combined += 1 if message: message(f"Combined data file {os.path.relpath(f)}") if not keep: if data._debug.should('dataio'): data._debug.write(f"Deleting combined data file {f!r}") file_be_gone(f) if strict and not files_combined: raise NoDataError("No usable data files")