예제 #1
0
def cmd_verify_snapshots(snapshots):
    """ Verify snapshot integrity
    """
    get_passphrase()
    store = cumulus.CumulusStore(options.store)
    for s in snapshots:
        cumulus.accessed_segments.clear()
        print("#### Snapshot", s)
        d = cumulus.parse_full(store.load_snapshot(s))
        check_version(d['Format'])
        print("## Root:", d['Root'])
        metadata = cumulus.iterate_metadata(store, d['Root'])
        for m in metadata:
            if m.fields['type'] not in ('-', 'f'): continue
            print("%s [%d bytes]" % (m.fields['name'], int(m.fields['size'])))
            verifier = cumulus.ChecksumVerifier(m.fields['checksum'])
            size = 0
            for block in m.data():
                data = store.get(block)
                verifier.update(data)
                size += len(data)
            if int(m.fields['size']) != size:
                raise ValueError("File size does not match!")
            if not verifier.valid():
                raise ValueError("Bad checksum found")

        # Verify that the list of segments included with the snapshot was
        # actually accurate: covered all segments that were really read, and
        # doesn't contain duplicates.
        listed_segments = set(d['Segments'].split())
        if cumulus.accessed_segments - listed_segments:
            print("Error: Some segments not listed in descriptor!")
            print(sorted(list(cumulus.accessed_segments - listed_segments)))
        if listed_segments - cumulus.accessed_segments:
            print("Warning: Extra unused segments listed in descriptor!")
            print(sorted(list(listed_segments - cumulus.accessed_segments)))
    store.cleanup()
예제 #2
0
파일: cmd_util.py 프로젝트: h8liu/cumulus
def cmd_verify_snapshots(snapshots):
    """ Verify snapshot integrity
    """
    get_passphrase()
    store = cumulus.CumulusStore(options.store)
    for s in snapshots:
        cumulus.accessed_segments.clear()
        print("#### Snapshot", s)
        d = cumulus.parse_full(store.load_snapshot(s))
        check_version(d['Format'])
        print("## Root:", d['Root'])
        metadata = cumulus.iterate_metadata(store, d['Root'])
        for m in metadata:
            if m.fields['type'] not in ('-', 'f'): continue
            print("%s [%d bytes]" % (m.fields['name'], int(m.fields['size'])))
            verifier = cumulus.ChecksumVerifier(m.fields['checksum'])
            size = 0
            for block in m.data():
                data = store.get(block)
                verifier.update(data)
                size += len(data)
            if int(m.fields['size']) != size:
                raise ValueError("File size does not match!")
            if not verifier.valid():
                raise ValueError("Bad checksum found")

        # Verify that the list of segments included with the snapshot was
        # actually accurate: covered all segments that were really read, and
        # doesn't contain duplicates.
        listed_segments = set(d['Segments'].split())
        if cumulus.accessed_segments - listed_segments:
            print("Error: Some segments not listed in descriptor!")
            print(sorted(list(cumulus.accessed_segments - listed_segments)))
        if listed_segments - cumulus.accessed_segments :
            print("Warning: Extra unused segments listed in descriptor!")
            print(sorted(list(listed_segments - cumulus.accessed_segments)))
    store.cleanup()
예제 #3
0
def cmd_restore_snapshot(args):
    """ Restore a snapshot, or some subset of files from it
    """
    get_passphrase()
    store = cumulus.CumulusStore(options.store)
    snapshot = cumulus.parse_full(store.load_snapshot(args[0]))
    check_version(snapshot['Format'])
    destdir = args[1]
    paths = args[2:]

    def matchpath(path):
        "Return true if the specified path should be included in the restore."

        # No specification of what to restore => restore everything
        if len(paths) == 0: return True

        for p in paths:
            if path == p: return True
            if path.startswith(p + "/"): return True
        return False

    def warn(m, msg):
        print("Warning: %s: %s" % (m.items.name, msg))

    # Phase 1: Read the complete metadata log and create directory structure.
    metadata_items = []
    metadata_paths = {}
    metadata_segments = {}
    for m in cumulus.iterate_metadata(store, snapshot['Root']):
        pathname = os.path.normpath(m.items.name)
        while os.path.isabs(pathname):
            pathname = pathname[1:]
        if not matchpath(pathname): continue

        destpath = os.path.join(destdir, pathname)
        if m.items.type == 'd':
            path = destpath
        else:
            (path, filename) = os.path.split(destpath)

        metadata_items.append((pathname, m))
        if m.items.type in ('-', 'f'):
            metadata_paths[pathname] = m
            for block in m.data():
                (segment, object, checksum, slice) \
                    = cumulus.CumulusStore.parse_ref(block)
                if segment not in metadata_segments:
                    metadata_segments[segment] = set()
                metadata_segments[segment].add(pathname)

        try:
            if not os.path.isdir(path):
                print("mkdir:", path)
                os.makedirs(path)
        except Exception as e:
            warn(m, "Error creating directory structure: %s" % (e, ))
            continue

    # Phase 2: Restore files, ordered by how data is stored in segments.
    def restore_file(pathname, m):
        assert m.items.type in ('-', 'f')
        print("extract:", pathname)
        destpath = os.path.join(destdir, pathname)

        file = open(destpath, 'wb')
        verifier = cumulus.ChecksumVerifier(m.items.checksum)
        size = 0
        for block in m.data():
            data = store.get(block)
            verifier.update(data)
            size += len(data)
            file.write(data)
        file.close()
        if int(m.fields['size']) != size:
            raise ValueError("File size does not match!")
        if not verifier.valid():
            raise ValueError("Bad checksum found")

    while metadata_segments:
        (segment, items) = metadata_segments.popitem()
        print("+ Segment", segment)
        for pathname in sorted(items):
            if pathname in metadata_paths:
                restore_file(pathname, metadata_paths[pathname])
                del metadata_paths[pathname]

    print("+ Remaining files")
    while metadata_paths:
        (pathname, m) = metadata_paths.popitem()
        restore_file(pathname, m)

    # Phase 3: Restore special files (symlinks, devices).
    # Phase 4: Restore directory permissions and modification times.
    for (pathname, m) in reversed(metadata_items):
        print("permissions:", pathname)
        destpath = os.path.join(destdir, pathname)
        (path, filename) = os.path.split(destpath)

        # TODO: Check for ../../../paths that might attempt to write outside
        # the destination directory.  Maybe also check attempts to follow
        # symlinks pointing outside?

        try:
            if m.items.type in ('-', 'f', 'd'):
                pass
            elif m.items.type == 'l':
                try:
                    target = m.items.target
                except:
                    # Old (v0.2 format) name for 'target'
                    target = m.items.contents
                os.symlink(target, destpath)
            elif m.items.type == 'p':
                os.mkfifo(destpath)
            elif m.items.type in ('c', 'b'):
                if m.items.type == 'c':
                    mode = 0o600 | stat.S_IFCHR
                else:
                    mode = 0o600 | stat.S_IFBLK
                os.mknod(destpath, mode, os.makedev(*m.items.device))
            elif m.items.type == 's':
                pass  # TODO: Implement
            else:
                warn(m, "Unknown type code: " + m.items.type)
                continue

        except Exception as e:
            warn(m, "Error restoring: %s" % (e, ))
            continue

        try:
            uid = m.items.user[0]
            gid = m.items.group[0]
            os.lchown(destpath, uid, gid)
        except Exception as e:
            warn(m, "Error restoring file ownership: %s" % (e, ))

        if m.items.type == 'l':
            continue

        try:
            os.chmod(destpath, m.items.mode)
        except Exception as e:
            warn(m, "Error restoring file permissions: %s" % (e, ))

        try:
            os.utime(destpath, (time.time(), m.items.mtime))
        except Exception as e:
            warn(m, "Error restoring file timestamps: %s" % (e, ))

    store.cleanup()
예제 #4
0
파일: cmd_util.py 프로젝트: h8liu/cumulus
def cmd_restore_snapshot(args):
    """ Restore a snapshot, or some subset of files from it
    """
    get_passphrase()
    store = cumulus.CumulusStore(options.store)
    snapshot = cumulus.parse_full(store.load_snapshot(args[0]))
    check_version(snapshot['Format'])
    destdir = args[1]
    paths = args[2:]

    def matchpath(path):
        "Return true if the specified path should be included in the restore."

        # No specification of what to restore => restore everything
        if len(paths) == 0: return True

        for p in paths:
            if path == p: return True
            if path.startswith(p + "/"): return True
        return False

    def warn(m, msg):
        print("Warning: %s: %s" % (m.items.name, msg))

    # Phase 1: Read the complete metadata log and create directory structure.
    metadata_items = []
    metadata_paths = {}
    metadata_segments = {}
    for m in cumulus.iterate_metadata(store, snapshot['Root']):
        pathname = os.path.normpath(m.items.name)
        while os.path.isabs(pathname):
            pathname = pathname[1:]
        if not matchpath(pathname): continue

        destpath = os.path.join(destdir, pathname)
        if m.items.type == 'd':
            path = destpath
        else:
            (path, filename) = os.path.split(destpath)

        metadata_items.append((pathname, m))
        if m.items.type in ('-', 'f'):
            metadata_paths[pathname] = m
            for block in m.data():
                (segment, object, checksum, slice) \
                    = cumulus.CumulusStore.parse_ref(block)
                if segment not in metadata_segments:
                    metadata_segments[segment] = set()
                metadata_segments[segment].add(pathname)

        try:
            if not os.path.isdir(path):
                print("mkdir:", path)
                os.makedirs(path)
        except Exception as e:
            warn(m, "Error creating directory structure: %s" % (e,))
            continue

    # Phase 2: Restore files, ordered by how data is stored in segments.
    def restore_file(pathname, m):
        assert m.items.type in ('-', 'f')
        print("extract:", pathname)
        destpath = os.path.join(destdir, pathname)

        file = open(destpath, 'wb')
        verifier = cumulus.ChecksumVerifier(m.items.checksum)
        size = 0
        for block in m.data():
            data = store.get(block)
            verifier.update(data)
            size += len(data)
            file.write(data)
        file.close()
        if int(m.fields['size']) != size:
            raise ValueError("File size does not match!")
        if not verifier.valid():
            raise ValueError("Bad checksum found")

    while metadata_segments:
        (segment, items) = metadata_segments.popitem()
        print("+ Segment", segment)
        for pathname in sorted(items):
            if pathname in metadata_paths:
                restore_file(pathname, metadata_paths[pathname])
                del metadata_paths[pathname]

    print("+ Remaining files")
    while metadata_paths:
        (pathname, m) = metadata_paths.popitem()
        restore_file(pathname, m)

    # Phase 3: Restore special files (symlinks, devices).
    # Phase 4: Restore directory permissions and modification times.
    for (pathname, m) in reversed(metadata_items):
        print("permissions:", pathname)
        destpath = os.path.join(destdir, pathname)
        (path, filename) = os.path.split(destpath)

        # TODO: Check for ../../../paths that might attempt to write outside
        # the destination directory.  Maybe also check attempts to follow
        # symlinks pointing outside?

        try:
            if m.items.type in ('-', 'f', 'd'):
                pass
            elif m.items.type == 'l':
                try:
                    target = m.items.target
                except:
                    # Old (v0.2 format) name for 'target'
                    target = m.items.contents
                os.symlink(target, destpath)
            elif m.items.type == 'p':
                os.mkfifo(destpath)
            elif m.items.type in ('c', 'b'):
                if m.items.type == 'c':
                    mode = 0o600 | stat.S_IFCHR
                else:
                    mode = 0o600 | stat.S_IFBLK
                os.mknod(destpath, mode, os.makedev(*m.items.device))
            elif m.items.type == 's':
                pass        # TODO: Implement
            else:
                warn(m, "Unknown type code: " + m.items.type)
                continue

        except Exception as e:
            warn(m, "Error restoring: %s" % (e,))
            continue

        try:
            uid = m.items.user[0]
            gid = m.items.group[0]
            os.lchown(destpath, uid, gid)
        except Exception as e:
            warn(m, "Error restoring file ownership: %s" % (e,))

        if m.items.type == 'l':
            continue

        try:
            os.chmod(destpath, m.items.mode)
        except Exception as e:
            warn(m, "Error restoring file permissions: %s" % (e,))

        try:
            os.utime(destpath, (time.time(), m.items.mtime))
        except Exception as e:
            warn(m, "Error restoring file timestamps: %s" % (e,))

    store.cleanup()