Example #1
0
def verify_backup(rootdir, mdata, num_threads=None):
    # Results
    changed = []
    missing = []
    unexpected = []
    scan_errors = []

    # First, we get all the relevant paths
    scan_result = scan_backup(rootdir)
    scan_errors = scan_result.errors
    unexpected = scan_result.ignored

    # Build a dictionary with all original metdata
    original_metadata = {}
    for p in itertools.chain(mdata.files, mdata.symlinks, mdata.directories):
        original_metadata[p.name] = p

    # Build a dictionary with all current metadata
    digest_map = hashdeep.compute_digests(rootdir, scan_result.files,
                                          num_threads)
    uid_map = utils.get_uid_name_map()
    gid_map = utils.get_gid_name_map()
    current_metadata = {}
    for f in scan_result.files:
        current_metadata[f] = metadata.get_file_metadata(rootdir, f, digest_map,
                                                         uid_map, gid_map)
    for s in scan_result.symlinks:
        current_metadata[s] = metadata.get_symlink_metadata(rootdir, s, uid_map,
                                                            gid_map)
    for d in scan_result.directories:
        current_metadata[d] = metadata.get_directory_metadata(rootdir, d,
                                                              uid_map, gid_map)

    # Find missing and unexpected files
    all_current_paths = set(scan_result.files + scan_result.symlinks
                                              + scan_result.directories)
    all_original_paths = set(original_metadata.keys())

    missing = sorted(list(all_original_paths - all_current_paths))
    unexpected.extend(list(all_current_paths - all_original_paths))
    unexpected.sort()

    # Find changed files
    for p in all_current_paths.intersection(all_original_paths):
        if not lenient_match(current_metadata[p], original_metadata[p]):
            changed.append(p)

    # Return the final result
    return VerificationResult(changed, missing, unexpected, scan_errors)
Example #2
0
    keys = res.keys()
    if len(keys) != len(paths) or set(keys) != set(paths):
        raise ValueError('List of filenames returned by hashdeep does not '
            'match the input list.')
    
    return res


if __name__ == "__main__":
    import sys
    v = version()
    supp = is_supported_version(v)
    print 'hashdeep version: {}  (compatible: {})'.format(v, supp)
    if len(sys.argv) == 3:
        rootdir = sys.argv[1]
        filename_file = sys.argv[2]
        filenames = []
        with open(filename_file) as tmpfile:
            for line in tmpfile:
                filenames.append(line.strip())
        result = compute_digests(rootdir, filenames)
        print result

        for f in filenames:
            print metadata.get_file_metadata(rootdir, f, result)

        backup_metadata = metadata.get_backup_metadata(rootdir, filenames, [], [], result)
        import sys
        metadata.write_backup_metadata(sys.stdout, backup_metadata)