#!/usr/bin/env python

import hashdeep
import metadata
import os.path
import pattern
import sys

if __name__ == "__main__":
    if len(sys.argv) != 4:
        print "usage: %s rootdir patterns_file metadata_file" % sys.argv[0]
        sys.exit(1)

    rootdir = os.path.normpath(sys.argv[1])

    patterns_file = open(sys.argv[2])
    patterns = pattern.parse_pattern_file(patterns_file)
    pathlist = pattern.assemble_paths(rootdir, patterns)

    digests = hashdeep.compute_digests(rootdir, pathlist.filenames)
    backup_metadata = metadata.get_backup_metadata(rootdir=rootdir,
                                                   files=pathlist.filenames,
                                                   symlinks=pathlist.symlinks,
                                                   directories=pathlist.directories,
                                                   digest_map=digests)

    with open(sys.argv[3], "w") as metadata_file:
        metadata.write_backup_metadata(metadata_file, backup_metadata)
Example #2
0
    keys = res.keys()
    if len(keys) != len(paths) or set(keys) != set(paths):
        raise ValueError('List of filenames returned by hashdeep does not '
            'match the input list.')
    
    return res


if __name__ == "__main__":
    import sys
    v = version()
    supp = is_supported_version(v)
    print 'hashdeep version: {}  (compatible: {})'.format(v, supp)
    if len(sys.argv) == 3:
        rootdir = sys.argv[1]
        filename_file = sys.argv[2]
        filenames = []
        with open(filename_file) as tmpfile:
            for line in tmpfile:
                filenames.append(line.strip())
        result = compute_digests(rootdir, filenames)
        print result

        for f in filenames:
            print metadata.get_file_metadata(rootdir, f, result)

        backup_metadata = metadata.get_backup_metadata(rootdir, filenames, [], [], result)
        import sys
        metadata.write_backup_metadata(sys.stdout, backup_metadata)