Exemple #1
0
 def __init__(self, vdev, dataset, idx):
     self._sa = None
     self._sa_attrs = idx
     self._sa_attrs_dnode = dataset[idx]
     sa_attrs_zap = zap_factory(vdev, self._sa_attrs_dnode)
     self._sa_layout_id = sa_attrs_zap['LAYOUTS']
     self._sa_registry_id = sa_attrs_zap['REGISTRY']
     registry = dataset[self._sa_registry_id]
     layout = dataset[self._sa_layout_id]
     print("[+] SA registry: %s" % (str(registry)))
     print("[+] SA layout  : %s" % (str(layout)))
     self._r_zap = zap_factory(vdev, registry)
     self._l_zap = zap_factory(vdev, layout)
     self._reg = {}
     for k in self._r_zap.keys():
         v = self._r_zap[k]
         # 64      56      48      40      32      24      16      8       0
         # +-------+-------+-------+-------+-------+-------+-------+-------+
         # |        unused         |      len      | bswap |   attr num    |
         # +-------+-------+-------+-------+-------+-------+-------+-------+
         n = v & 0xffff
         l = v >> 24 & 0xffff
         self._reg[n] = {'len': l, 'name': k.lower()}
     self._lay = {}
     for k in self._l_zap.keys():
         b = self._l_zap[k]
         self._lay[k] = []
         for i in range(len(b) // 2):
             idx, = struct.unpack(">H", b[i * 2:(i + 1) * 2])
             self._lay[k].append(self._reg[idx])
 def _export_dir(self, csv_obj, dir_node_id, dir_prefix='/'):
     print("[+]  Exporting directory object {}".format(dir_node_id))
     dir_dnode = self[dir_node_id]
     if dir_dnode is None:
         csv_obj.writerow([dir_node_id, -1, dir_prefix])
         return
     zap = zap_factory(self._vdev, dir_dnode)
     if zap is None:
         return
     keys = sorted(zap.keys())
     for name in keys:
         value = zap[name]
         t = value >> 60
         v = value & ~(15 << 60)
         k = TYPECODES[t]
         entry_dnode = self[v]
         size = entry_dnode.bonus.zp_size if entry_dnode is not None else -1
         full_name = dir_prefix + name
         print("{} {}".format(v, full_name))
         if k == 'f':
             csv_obj.writerow([v, size, full_name])
         if k == 'l':
             csv_obj.writerow([v, size, full_name + " -> ..."])
         if k == 'd':
             csv_obj.writerow([v, 0, full_name + '/'])
             self._export_dir(csv_obj, v, dir_prefix=full_name + '/')
    def analyse(self, name=""):
        self.name = name
        if self.broken:
            print("[-]  Dataset is broken")
            return
        # Read the master node
        master_dnode = self[1]
        if master_dnode is None:
            print("[-]  Master node missing/unreachable")
            return
        print("[+]  Master node", master_dnode)
        if master_dnode.type != 21:
            print("[-]  Master node object imax_obj_ids of wrong type")
            return
        z = zap_factory(self._vdev, master_dnode)
        if z:
            self._rootdir_id = z["ROOT"]
            if self._rootdir_id is None:
                z.debug()

            # try load System Attribute Layout and registry:
            try:
                self._sa = SystemAttr(self._vdev, self, z["SA_ATTRS"])
            except Exception as e:
                print("[-] Unable to parse System Attribute tables: %s" %
                      (str(e)))

        if self._rootdir_id is None:
            print("[-]  Root directory ID is not in master node")
            return
        self.rootdir_dnode = self[self._rootdir_id]
        if self.rootdir_dnode is None:
            print("[-]  Root directory dnode missing/unreachable")
            return
        if self.rootdir_dnode.type != 20:
            print("[-]  Root directory object is of wrong type")
        num_dnodes = min(self.dnodes_per_block, self.max_obj_id + 1)
        print("----------------------------------")
        print("[+]  First block of the object set:")
        for n in range(1, 32):  #range(self.max_obj_id):
            try:
                d = self[n]
            except:
                d = None
                pass
            if d is None:
                # Bad - very likely the block tree is broken
                print("[-]  Object set (partially) unreachable")
                #break
            print("[+]  dnode[{:>2}]={}".format(n, d))
 def traverse_dir(self, dir_dnode_id, depth=1, dir_prefix='/'):
     dir_dnode = self[dir_dnode_id]
     if dir_dnode is None:
         print("[-]  Directory dnode {} unreachable".format(dir_dnode_id))
         return
     zap = None
     try:
         zap = zap_factory(self._vdev, dir_dnode)
     except:
         pass
     if zap is None:
         print("[-]  Unable to create ZAP object")
         return
     keys = sorted(zap.keys())
     for name in keys:
         value = zap[name]
         t = value >> 60
         v = value & ~(15 << 60)
         k = TYPECODES[t]
         entry_dnode = self[v]
         if entry_dnode is None:
             mode = "?????????"
             size = "?"
         else:
             try:
                 mode = entry_dnode.bonus.zp_mode
                 size = entry_dnode.bonus.zp_size
                 modes = [
                     'r' if (mode & MODE_UR) else '-', 'w' if
                     (mode & MODE_UW) else '-', 'x' if
                     (mode & MODE_UX) else '-', 'r' if
                     (mode & MODE_GR) else '-', 'w' if
                     (mode & MODE_GW) else '-', 'x' if
                     (mode & MODE_GX) else '-', 'r' if
                     (mode & MODE_OR) else '-', 'w' if
                     (mode & MODE_OW) else '-', 'x' if
                     (mode & MODE_OX) else '-'
                 ]
                 mode = "".join(modes)
             except:
                 mode = "?????????"
                 size = "?"
         print("{}{} {:>8} {:>14} {}{}".format(k, mode, v, size, dir_prefix,
                                               name))
         if k == 'd' and depth > 0:
             self.traverse_dir(v,
                               depth=depth - 1,
                               dir_prefix=dir_prefix + name + '/')
    def readdir(self, dir_dnode_id, inoderoot, relpath):
        print("r> [%d] dir %s" % (dir_dnode_id, relpath))
        dir_dnode = self[dir_dnode_id]
        r = []
        if dir_dnode is None:
            print("[-]  Directory dnode {} unreachable".format(dir_dnode_id))
            return None
        zap = None
        try:
            zap = zap_factory(self._vdev, dir_dnode)
        except:
            pass
        if zap is None:
            print("[-]  Unable to create ZAP object")
            return []
        keys = sorted(zap.keys())
        print("vvvvvvvv dir %s vvvvvvvvvvv" % (relpath))
        for name in keys:
            value = zap[name]
            t = value >> 60
            v = value & ~(15 << 60)
            k = TYPECODES[t]
            entry_dnode = self[v]
            if k == 'd' and not entry_dnode._type == 20:
                print("Mismatch")
            print("%25s> %28s: %s : %s" %
                  (entry_dnode._idxstr, color.PURPLE + name + color.END, k,
                   str(entry_dnode)))
            mode = 0
            size = 0
            try:
                size = entry_dnode.bonus.zp_size
                mode = entry_dnode.bonus.zp_mode
            except:
                pass
            r.append(
                zfsnode(self, entry_dnode, k, mode, v, size, name, inoderoot,
                        v, relpath + "/" + name))
        print("^^^^^^^^^^^^^^^^^^^^^^^^^^\n")

        return r
Exemple #6
0
 def analyse(self):
     if self.broken:
         print("[-]  Dataset is broken")
         return
     # Read the master node
     master_dnode = self[1]
     if master_dnode is None:
         print("[-]  Master node missing/unreachable")
         return
     print("[+]  Master node", master_dnode)
     if master_dnode.type != 21:
         print("[-]  Master node object is of wrong type")
         return
     z = zap_factory(self._vdev, master_dnode)
     if z:
         self._rootdir_id = z["ROOT"]
         if self._rootdir_id is None:
             z.debug()
     if self._rootdir_id is None:
         print("[-]  Root directory ID is not in master node")
         return
     rootdir_dnode = self[self._rootdir_id]
     if rootdir_dnode is None:
         print("[-]  Root directory dnode missing/unreachable")
         return
     if rootdir_dnode.type != 20:
         print("[-]  Root directory object is of wrong type")
     num_dnodes = min(self.dnodes_per_block, self.max_obj_id + 1)
     print("[+]  First block of the object set:")
     for n in range(num_dnodes):
         d = self[n]
         if d is None:
             # Bad - very likely the block tree is broken
             print("[-]  Object set (partially) unreachable")
             break
         print("[+]  dnode[{:>2}]={}".format(n, d))
    def _archive(self, tar, dir_node_id, temp_dir, skip_objs, dir_prefix=''):
        print("[+]  Archiving directory object {}".format(dir_node_id))
        dir_dnode = self[dir_node_id]
        if dir_dnode is None:
            print("[-]  Archiving failed")
            return
        zap = zap_factory(self._vdev, dir_dnode)
        if zap is None:
            print("[-]  Archiving failed")
            return
        tmp_name = os.path.join(temp_dir, "extract.tmp")
        keys = sorted(zap.keys())
        for name in keys:
            value = zap[name]
            t = value >> 60
            v = value & ~(15 << 60)
            k = TYPECODES[t]
            if v in skip_objs:
                print("[+]  Skipping {} ({}) per request".format(name, v))
                continue
            if k in ['d', 'f', 'l']:
                entry_dnode = self[v]
                if entry_dnode is None:
                    print("[-]  Skipping unreadable object")
                    continue
                file_info = entry_dnode.bonus
                full_name = dir_prefix + name
                print("[+]  Archiving {} ({} bytes)".format(
                    name, file_info.size()))
                if k == 'f':
                    success = self.extract_file(v, tmp_name)
                    if not success:
                        full_name += "._corrupted"
                    tar_info = tar.gettarinfo(name=tmp_name, arcname=full_name)
                    tar_info.uname = ""
                    tar_info.gname = ""
                elif k == 'd':
                    tar_info = tarfile.TarInfo()
                    tar_info.type = tarfile.DIRTYPE
                    tar_info.size = 0
                    tar_info.name = full_name
                else:
                    tar_info = tarfile.TarInfo()
                    tar_info.type = tarfile.SYMTYPE
                    tar_info.size = 0
                    tar_info.name = full_name
                    if file_info.zp_size > len(file_info.zp_inline_content):
                        # Link target is in the file content
                        linkf = FileObj(self._vdev, entry_dnode)
                        link_target = linkf.read(file_info.zp_size)
                        if link_target is None or len(
                                link_target) < file_info.zp_size:
                            print(
                                "[-]  Insufficient content for symlink target")
                            # entry_dnode.dump_data('{}/dnode_{}.raw'.format(temp_dir, v))
                            # raise Exception("Insufficient link target content")
                            continue
                        tar_info.linkname = safe_decode_string(link_target)
                    else:
                        # Link target is inline in the bonus data
                        tar_info.linkname = safe_decode_string(
                            file_info.zp_inline_content[:file_info.zp_size])

                #tar_info.mtime = file_info.zp_mtime
                #tar_info.mode = file_info.zp_mode  # & 0x1ff
                #tar_info.uid = file_info.zp_uid
                #tar_info.gid = file_info.zp_gid

                # print("[+]  Archiving {} bytes from {}".format(tar_info.size, tar_info.name))
                # f = FileObj(self._vdev, entry_dnode) if k == 'f' else None
                try:
                    if k == 'f':
                        if os.path.isfile(tmp_name):
                            with open(tmp_name, 'rb') as f:
                                tar.addfile(tar_info, f)
                            os.unlink(tmp_name)
                    else:
                        tar.addfile(tar_info)
                except:
                    print("[-]  Archiving {} failed".format(tar_info.name))
                if k == 'd':
                    self._archive(tar,
                                  v,
                                  temp_dir,
                                  skip_objs,
                                  dir_prefix=full_name + '/')
    try:
        mos = ObjectSet(pool_dev, root_blkptr, dvas=(dva, ))
    except Exception as e:
        print("[-] MOS read fail: %s" % (str(e)))
        continue
    for n in range(len(mos)):
        d = mos[n]
        print("[+]  dnode[{:>3}]={}".format(n, d))
        if d and d.type == 16:
            datasets[n] = d

            if d.bonus.ds_num_children > 0:
                ds_dir_obj = d.bonus.ds_dir_obj
                dir_obj = mos[ds_dir_obj]
                child_dir_zap = mos[dir_obj.bonus.dd_child_dir_zapobj]
                dir_obj_zap = zap_factory(pool_dev, child_dir_zap)
                print(str(dir_obj_zap))

print("[+] {} root dataset")
rds_z = mos[1]
rds_zap = zap_factory(pool_dev, rds_z)
rds_id = rds_zap['root_dataset']
rdir = mos[rds_id]
cdzap_id = rdir.bonus.dd_child_dir_zapobj
cdzap_z = mos[cdzap_id]
cdzap_zap = zap_factory(pool_dev, cdzap_z)
for k, v in cdzap_zap._entries.items():
    if not k[0:1] == '$':
        child = mos[v]
        cds = child.bonus.dd_head_dataset_obj
        print("child %s with dataset %d" % (k, cds))
Exemple #9
0
datasets = {}

# Try all copies of the MOS
for dva in range(3):
    mos = ObjectSet(pool_dev, root_blkptr, dvas=(dva, ))
    for n in range(len(mos)):
        d = mos[n]
        # print("[+]  dnode[{:>3}]={}".format(n, d))
        if d and d.type == 16:
            datasets[n] = d

print("[+] add one level of child datasets")
try:
    rds_z = mos[1]
    rds_zap = zap_factory(pool_dev, rds_z)
    rds_id = rds_zap['root_dataset']
    rdir = mos[rds_id]
    cdzap_id = rdir.bonus.dd_child_dir_zapobj
    cdzap_z = mos[cdzap_id]
    cdzap_zap = zap_factory(pool_dev, cdzap_z)
    for k, v in cdzap_zap._entries.items():
        if not k[0:1] == '$':
            child = mos[v]
            cds = child.bonus.dd_head_dataset_obj
            print("[+] child %s with dataset %d" % (k, cds))
            # mos[cds] points to a 'zap' with "bonus  DSL dataset"
            datasets[cds] = mos[cds]
            if args.child:
                DS_TO_ARCHIVE.append(cds)
except: