Example #1
0
def main():
    notify = util.Notifier()
    total_count, total_size, bucket_size = upload_all(util.STAGING_PATH)
    if total_count:
        notify.send(
            "uploaded %d files of total size %s; new bucket size is %s" %
            (total_count, util.sizeof_fmt(total_size),
             util.sizeof_fmt(bucket_size)))
    else:
        print("nothing to do")
Example #2
0
def main():
    #	notify = util.Notifier()
    start_time = time.time()
    total_count, total_size, bucket_size = download_all(util.UNSTAGING_PATH)
    total_time = time.time() - start_time
    if total_count:
        print(
            "downloaded %d files of total size %s in %f seconds; total bucket size is %s"
            % (total_count, util.sizeof_fmt(total_size), round(
                total_time, 1), util.sizeof_fmt(bucket_size)))
    else:
        print("nothing to do")
Example #3
0
 def write(self, path):
     if self.active:
         self.data = np.array(self.data, dtype='bool') \
             if self.name == 'y' or self.name == 'pf' else np.vstack(self.data)
         print "Writing {:}, dtype={:}, size={:}".format(self.name, str(self.data.dtype),
                                                         util.sizeof_fmt(self.data.nbytes))
         np.save(path + self.name, self.data)
    def getFileAtUSN(self, filename, filerecord, filekey, usn, previousVersion=None, exactSize=True):
        missing_pages = 0
        decrypt_offset = 0
        file_pages = []
        logicalSize = filerecord.dataFork.logicalSize
        for extent in self.volume.getAllExtents(filerecord.dataFork, filerecord.fileID):
            for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
                pn = self.pagesForLBN(bn).get(usn) #fail
                if pn:
                    clear = self.decryptFileBlock(pn[-1], filekey, bn, decrypt_offset)
                    file_pages.append(clear)
                elif previousVersion:
                    file_pages.append(previousVersion[len(file_pages)])
                else:
                    file_pages.append(self.blankPage)
                    missing_pages += 1
                decrypt_offset += self.pageSize

        print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
            (filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
        filename =  "%d_%d_%s" % (filerecord.fileID, usn, filename)
        if missing_pages == 0:
            filename = "OK_" + filename
            self.okfiles += 1
        data = "".join(file_pages)
        if exactSize:
            data = data[:logicalSize]
        self.writeUndeletedFile(filename, data)
        return file_pages
 def dump_nand(self, filename):
     f = open(filename, "wb")
     self.send_command(CMD_DUMP)
     zz = self.s.recv(8)
     totalSize = struct.unpack("<Q", zz)[0]
     recvSize = 0
     print "Dumping %s NAND to %s" % (sizeof_fmt(totalSize), filename)
     pbar = ProgressBar(totalSize)
     pbar.start()
     h = hashlib.sha1()
     while recvSize < totalSize:
         pbar.update(recvSize)
         d = self.s.recv(8192 * 2)
         if not d or len(d) == 0:
             break
         h.update(d)
         f.write(d)
         recvSize += len(d)
     pbar.finish()
     f.close()
     print "NAND dump time : %s" % str(
         datetime.timedelta(seconds=pbar.seconds_elapsed))
     print "SHA1: %s" % h.hexdigest()
     if recvSize != totalSize:
         print "dump_nand FAIL"
Example #6
0
    def search(self, search_string, free_only=True, include_headers=True):
        """
        Search the given string search_string on the Play Store.

        search_string   -- the string to search on the Play Store
        free_only       -- True if only costless apps should be searched for
        include_headers -- True if the result table should show column names
        """
        try:
            results = self.api.search(search_string)
        except IndexError:
            results = []
        if not results:
            logger.info("No result")
            return
        all_results = []
        if include_headers:
            # Name of the columns
            col_names = ["Title", "Creator", "Size", "Downloads", "Last Update", "AppID", "Version", "Rating"]
            all_results.append(col_names)
        # Compute results values
        for doc in results:
            for cluster in doc["child"]:
                for app in cluster["child"]:
                    # skip that app if it not free
                    # or if it's beta (pre-registration)
                    if ('offer' not in app  # beta apps (pre-registration)
                            or free_only
                            and app['offer'][0]['checkoutFlowRequired']  # not free to download
                    ):
                        continue
                    details = app['details']['appDetails']
                    detail = [app['title'],
                              app['creator'],
                              util.sizeof_fmt(int(details['installationSize']))
                              if int(details['installationSize']) > 0 else 'N/A',
                              details['numDownloads'],
                              details['uploadDate'],
                              app['docid'],
                              details['versionCode'],
                              "%.2f" % app["aggregateRating"]["starRating"]
                              ]
                    all_results.append(detail)

        # Print a nice table
        col_width = []
        for column_indice in range(len(all_results[0])):
            col_length = max([len("%s" % row[column_indice]) for row in all_results])
            col_width.append(col_length + 2)

        for result in all_results:
            for indice, item in enumerate(result):
                out = "".join(str(item).strip().ljust(col_width[indice]))
                try:
                    print(out, end='')
                except UnicodeEncodeError:
                    out = out.encode('utf-8', errors='replace')
                    print(out, end='')
            print()
        return all_results
Example #7
0
    def load_app_info_table(self):
        self.info = {}
        self.info["Application Name"] = self.apk.get_app_name()
        self.info["Application Size"] = util.sizeof_fmt(
            os.path.getsize(self.apk_path))
        self.info["Android Version Name"] = self.apk.get_androidversion_name()
        self.info["Android Version Code"] = self.apk.get_androidversion_code()
        self.info["Android Package Name"] = self.apk.get_package()
        self.info["Signature Name"] = self.apk.get_signature_name()
        self.info["Uses Dynamic Code Loading"] = str(
            analysis.is_dyn_code(self.x))
        self.info["Uses Reflection"] = str(analysis.is_reflection_code(self.x))
        self.info["Uses Crypto"] = str(analysis.is_crypto_code(self.x))
        self.info["Privacy Leaks"] = str(len(self.get_privacy_leaks()))
        self.info["Number of Providers"] = str(len(self.apk.get_providers()))
        self.info["Number of Activities"] = str(len(self.apk.get_activities()))
        self.info["Number of Services"] = str(len(self.apk.get_services()))
        self.info["Number of Libraries"] = str(len(self.apk.get_libraries()))
        self.info["Number of Permissions"] = str(
            len(self.get_uses_permissions()))

        self.info_actions = {}
        self.info_actions["Application Name"] = None
        self.info_actions["Application Size"] = None
        self.info_actions["Android Version Name"] = None
        self.info_actions["Android Version Code"] = None
        self.info_actions["Android Package Name"] = None
        self.info_actions["Signature Name"] = self.show_signature
        self.info_actions["Uses Dynamic Code Loading"] = self.show_dyncode
        self.info_actions["Uses Reflection"] = self.show_reflection
        self.info_actions["Uses Crypto"] = self.show_cryptocode
        self.info_actions["Privacy Leaks"] = self.show_privacy_leaks
        self.info_actions["Number of Providers"] = self.show_providers
        self.info_actions["Number of Activities"] = self.show_activities
        self.info_actions["Number of Services"] = self.show_services
        self.info_actions["Number of Libraries"] = self.show_libraries
        self.info_actions["Number of Permissions"] = self.show_permissions
        info_table = self.ui.appInfoTable
        info_table.setRowCount(len(self.info))
        info_table.setColumnWidth(1, 200)
        info_table.horizontalHeader().setResizeMode(0,
                                                    QtGui.QHeaderView.Stretch)
        row = 0
        for key in sorted(self.info):
            action = self.info_actions[key]
            action_button = None
            if action is not None:
                action_button = QtGui.QPushButton()
                action_button.setText("Show")
                action_button.clicked.connect(action)
            key_item = QtGui.QTableWidgetItem(key)
            value_item = QtGui.QTableWidgetItem(self.info[key])
            info_table.setItem(row, 0, key_item)
            info_table.setItem(row, 1, value_item)
            if action_button is not None:
                info_table.setCellWidget(row, 2, action_button)
            row += 1
Example #8
0
    def to_string(self) -> bool:
        """
        print all value in a class
        """
        print('Image size: {0}x{1}'.format(self.width, self.height))
        print('-Total numbers: {}'.format(self.num))
        print('-Total size: {}\n'.format(sizeof_fmt(self.total_size)))

        return True
 def AddCurrentItem(self, item):
     if item not in self.DirList:
         self.DirList.append(item)
         self.lstSelectedDir.addItem(item)
         self.TotalQty += count_files(item)
         self.TotalSize += file_size(item)
         self.sb.showMessage('Photo Selected: ' + str(self.TotalQty) +
             ' (' + sizeof_fmt(self.TotalSize) + ')')
     else:
         msg = "The directory '" + item + "' already exist into the list"
         reply = QtGui.QMessageBox.information(self, 'Warning', msg, QtGui.QMessageBox.Ok)
    def initGeometry(self, d):
        self.metaSize = d.get("meta-per-logical-page", 0)
        if self.metaSize == 0:
            self.metaSize = 12
        dumpedPageSize = d.get("dumpedPageSize", d["#page-bytes"] + self.metaSize + 8)
        self.dump_size=  d["#ce"] * d["#ce-blocks"] * d["#block-pages"] * dumpedPageSize
        self.totalPages = d["#ce"] * d["#ce-blocks"] * d["#block-pages"]
        nand_size = d["#ce"] * d["#ce-blocks"] * d["#block-pages"] * d["#page-bytes"]
        hsize = sizeof_fmt(nand_size)
        self.bfn = d.get("boot-from-nand", False)
        self.ppn = d.get("ppn-device", False)
        self.dumpedPageSize = dumpedPageSize
        self.pageSize = d["#page-bytes"]
        self.bootloaderBytes = d.get("#bootloader-bytes", 1536)
        self.logicalPageSize = d.get("logical-page-size", self.pageSize)
        self.emptyBootloaderPage = "\xFF" * self.bootloaderBytes
        self.blankPage = "\xFF" * self.pageSize
        self.nCEs =d["#ce"]
        self.blocksPerCE = d["#ce-blocks"]
        self.pagesPerBlock = d["#block-pages"]
        self.pagesPerCE = self.blocksPerCE * self.pagesPerBlock
        self.vendorType = d["vendor-type"]
        self.deviceReadId = d.get("device-readid", 0)
        self.banks_per_ce_vfl = d["banks-per-ce"]

        if self.ppn:
            self.slc_pages = d.get("slc-pages", 0)
            self.block_bits =  d.get("block-bits", 0)
            self.cau_bits = d.get("cau-bits", 0)
            self.page_bits = d.get("page-bits", 0)

        if d.has_key("metadata-whitening"):
            self.metadata_whitening = (d["metadata-whitening"].data == "\x01\x00\x00\x00")
        if nand_chip_info.has_key(self.deviceReadId):
            self.banks_per_ce_physical = nand_chip_info.get(self.deviceReadId)[7]
        elif self.ppn:
            self.banks_per_ce_physical = struct.unpack("<L", d["caus-ce"].data)[0]
        else:
            #raise Exception("Unknown deviceReadId %x" % self.deviceReadId)
            print "!!! Unknown deviceReadId %x, assuming 1 physical bank /CE, will probably fail" % self.deviceReadId
            self.banks_per_ce_physical = 1
        print "Chip id 0x%x banks per CE physical %d" % (self.deviceReadId, self.banks_per_ce_physical)
        self.blocks_per_bank = self.blocksPerCE / self.banks_per_ce_physical
        if self.blocksPerCE & (self.blocksPerCE-1) == 0:
            self.bank_address_space = self.blocks_per_bank
            self.total_block_space = self.blocksPerCE
        else:
            bank_address_space = next_power_of_two(self.blocks_per_bank)
            self.bank_address_space = bank_address_space
            self.total_block_space = ((self.banks_per_ce_physical-1)*bank_address_space) + self.blocks_per_bank
        self.bank_mask = int(math.log(self.bank_address_space * self.pagesPerBlock,2))
        print "NAND geometry : %s (%d CEs (%d physical banks/CE) of %d blocks of %d pages of %d bytes data, %d bytes metdata)" % \
            (hsize, self.nCEs, self.banks_per_ce_physical, self.blocksPerCE, self.pagesPerBlock, self.pageSize, d["meta-per-logical-page"])    
Example #11
0
    def load_app_info_table(self):
        self.info = {}
        self.info["Application Name"]            = self.apk.get_app_name()
        self.info["Application Size"]            = util.sizeof_fmt(os.path.getsize(self.apk_path))
        self.info["Android Version Name"]        = self.apk.get_androidversion_name()
        self.info["Android Version Code"]        = self.apk.get_androidversion_code()
        self.info["Android Package Name"]        = self.apk.get_package()
        self.info["Signature Name"]              = self.apk.get_signature_name()
        self.info["Uses Dynamic Code Loading"]   = str(analysis.is_dyn_code(self.x))
        self.info["Uses Reflection"]             = str(analysis.is_reflection_code(self.x))
        self.info["Uses Crypto"]                 = str(analysis.is_crypto_code(self.x))
        self.info["Privacy Leaks"]               = str(len(self.get_privacy_leaks()))
        self.info["Number of Providers"]         = str(len(self.apk.get_providers()))
        self.info["Number of Activities"]        = str(len(self.apk.get_activities()))
        self.info["Number of Services"]          = str(len(self.apk.get_services()))
        self.info["Number of Libraries"]         = str(len(self.apk.get_libraries()))
        self.info["Number of Permissions"]       = str(len(self.get_uses_permissions()))

        self.info_actions = {}
        self.info_actions["Application Name"]            = None
        self.info_actions["Application Size"]            = None
        self.info_actions["Android Version Name"]        = None
        self.info_actions["Android Version Code"]        = None
        self.info_actions["Android Package Name"]        = None
        self.info_actions["Signature Name"]              = self.show_signature
        self.info_actions["Uses Dynamic Code Loading"]   = self.show_dyncode
        self.info_actions["Uses Reflection"]             = self.show_reflection
        self.info_actions["Uses Crypto"]                 = self.show_cryptocode
        self.info_actions["Privacy Leaks"]               = self.show_privacy_leaks
        self.info_actions["Number of Providers"]         = self.show_providers
        self.info_actions["Number of Activities"]        = self.show_activities
        self.info_actions["Number of Services"]          = self.show_services
        self.info_actions["Number of Libraries"]         = self.show_libraries
        self.info_actions["Number of Permissions"]       = self.show_permissions
        info_table = self.ui.appInfoTable
        info_table.setRowCount(len(self.info))
        info_table.setColumnWidth(1, 200)
        info_table.horizontalHeader().setResizeMode(0, QtGui.QHeaderView.Stretch)
        row = 0
        for key in sorted(self.info):
            action = self.info_actions[key]
            action_button = None
            if action is not None:
                action_button = QtGui.QPushButton()
                action_button.setText("Show")
                action_button.clicked.connect(action)
            key_item = QtGui.QTableWidgetItem(key)
            value_item = QtGui.QTableWidgetItem(self.info[key])
            info_table.setItem(row,0,key_item)
            info_table.setItem(row,1,value_item)
            if action_button is not None:
                info_table.setCellWidget(row,2,action_button)
            row += 1
Example #12
0
def main():
    notify = util.Notifier()
    errors, counts = check_incoming("/home/user/QubesIncoming",
                                    util.STAGING_PATH)
    lines = []
    if errors:
        lines += ["encountered %d errors:" % len(errors)]
        for err in errors:
            lines += [" * %s" % err]
    if counts:
        tcount = sum(count for count, size in counts.values())
        tsize = sum(size for count, size in counts.values())
        lines += [
            "accepted %d files from %d VMs (%s)" %
            (tcount, len(counts), util.sizeof_fmt(tsize))
        ]
        for vm, (count, size) in sorted(counts.items()):
            lines += [
                " * %s: %d (%s)" % (repr(vm), count, util.sizeof_fmt(size))
            ]
    if lines:
        notify.send("\n".join(lines))
 def pushButtonClrClk(self):
     if self.lstSelectedDir.count() > 0:
         reply = QtGui.QMessageBox.question(self,
             'Clear List', "Are you sure to remove ALL items?",
             QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,
             QtGui.QMessageBox.No)
         if reply == QtGui.QMessageBox.Yes:
             self.lstSelectedDir.clear()
             self.DirList = []
             self.TotalQty = 0
             self.TotalSize = 0
             self.sb.showMessage('Photo Selected: ' + str(self.TotalQty) +
                 ' (' + sizeof_fmt(self.TotalSize) + ')')
Example #14
0
    def final_stats(self):
        # Cave: the rates (data rate, frame rate) calculated here are an
        # overestimate, in particular for short input videos. The reason is
        # that we start keeping time only when we get the very first result,
        # which means that we don't account for the decoding time of the
        # first `nframes_per_process` frames.
        # To get a better measure of decoding speed, use the multiprocreceiver
        # with a high `repeat` parameter.

        nbytes = util.sizeof_fmt(self.fragments_ok*64)
        duration = time.time() - self.start
        datarate = util.sizeof_fmt(self.fragments_ok*64./duration,
                                   suffix='B/s')
        framerate = self.framecount / duration
        fmt = 'Decoded {nbytes} from {s.framecount} frames in {duration:.2f} s'
        print fmt.format(s=self, nbytes=nbytes, duration=duration)
        print 'Data rate: {}, frame rate: {:3.1f} frames/s'.format(datarate,
                                                                   framerate)
        if len(self.status_count) > 0:
            print 'Status:',
            print ', '.join('{}={}'.format(key, value)
                            for key, value in self.status_count.iteritems())
 def pushButtonLeftClk(self):
     if self.lstSelectedDir.currentItem() is not None:
         reply = QtGui.QMessageBox.question(self, 'Remove Item',
             "Are you sure to remove this item?",
             QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No)
         if reply == QtGui.QMessageBox.Yes:
             current = self.lstSelectedDir.currentItem()
             self.TotalQty -= count_files(current.text())
             self.TotalSize -= file_size(current.text())
             self.sb.showMessage('Photo Selected: ' + str(self.TotalQty) +
                 ' (' + sizeof_fmt(self.TotalSize) + ')')
             self.DirList.remove(current.text())
             self.lstSelectedDir.takeItem(self.lstSelectedDir.row(current))
Example #16
0
    def final_stats(self):
        # Cave: the rates (data rate, frame rate) calculated here are an
        # overestimate, in particular for short input videos. The reason is
        # that we start keeping time only when we get the very first result,
        # which means that we don't account for the decoding time of the
        # first `nframes_per_process` frames.
        # To get a better measure of decoding speed, use the multiprocreceiver
        # with a high `repeat` parameter.

        nbytes = util.sizeof_fmt(self.fragments_ok * 64)
        duration = time.time() - self.start
        datarate = util.sizeof_fmt(self.fragments_ok * 64. / duration,
                                   suffix='B/s')
        framerate = self.framecount / duration
        fmt = 'Decoded {nbytes} from {s.framecount} frames in {duration:.2f} s'
        print fmt.format(s=self, nbytes=nbytes, duration=duration)
        print 'Data rate: {}, frame rate: {:3.1f} frames/s'.format(
            datarate, framerate)
        if len(self.status_count) > 0:
            print 'Status:',
            print ', '.join('{}={}'.format(key, value)
                            for key, value in self.status_count.iteritems())
Example #17
0
 def do_ls(self, p):
     dirDict = self.volume.ls((self.curdir + "/" + p).replace("//","/")) 
     if not dirDict:
         return
     for name in sorted(dirDict.keys()):
         size = ""
         protection_class = ""
         record = dirDict[name]
         if hasattr(record, "fileID"):
             size = sizeof_fmt(record.dataFork.logicalSize)
             cprotect = self.volume.getXattr(record.fileID, "com.apple.system.cprotect")
             if cprotect:
                 protection_class = PROTECTION_CLASSES[struct.unpack("<L", cprotect[8:12])[0]]
         print "%s\t%s\t%s\t%s" % (name[:30].ljust(30), size.ljust(10), hfs_date(record.createDate), protection_class)
 def do_ptable(self, p):
     pt = self.image.getPartitionTable()
     print "Block device partition table"
     print "".join(
         map(lambda x: x.ljust(12),
             ["Index", "Name", "Start LBA", "End LBA", "Size"]))
     for i in xrange(len(pt)):
         p = pt[i]
         print "".join(
             map(lambda x: str(x).ljust(12), [
                 i, p.name, p.first_lba, p.last_lba,
                 sizeof_fmt(
                     (p.last_lba - p.first_lba) * self.image.pageSize)
             ]))
Example #19
0
    def _pretty_str(manifest):
        strs = []
        if hasattr(manifest, "project_root"):
            strs.append(manifest.project_root)
        else:
            strs.append("(Unknown project root)")
        if hasattr(manifest, "timestamp"):
            strs.append("\tDate: %s" % manifest.timestamp.strftime("%c"))
        if hasattr(manifest, "size"):
            strs.append("\tSize: %s" % util.sizeof_fmt(manifest.size))
        if hasattr(manifest, "git_branch"):
            strs.append("\tBranch: %s" % manifest.git_branch)
        if hasattr(manifest, "git_hash"):
            strs.append("\tHash: %s" % manifest.git_hash)

        return "\n".join(strs)
Example #20
0
    def _pretty_str(manifest):
        strs = []
        if hasattr(manifest, "project_root"):
            strs.append(manifest.project_root)
        else:
            strs.append("(Unknown project root)")
        if hasattr(manifest, "timestamp"):
            strs.append("\tDate: %s" % manifest.timestamp.strftime("%c"))
        if hasattr(manifest, "size"):
            strs.append("\tSize: %s" % util.sizeof_fmt(manifest.size))
        if hasattr(manifest, "git_branch"):
            strs.append("\tBranch: %s" % manifest.git_branch)
        if hasattr(manifest, "git_hash"):
            strs.append("\tHash: %s" % manifest.git_hash)

        return "\n".join(strs)
Example #21
0
 def json(self):
     ''' Return photo info in json
     '''
     json_photo = {
         'id': self.id,
         'path': self.path,
         'title': self.title,
         'caption': self.caption,
         'size': sizeof_fmt(self.size),
         'width': self.width,
         'height': self.height,
         'format': self.format,
         'mode': self.mode,
         'added_at': self.added_at,
         'updated_at': self.updated_at,
         'filename': self.filename()
     }
     return json_photo
Example #22
0
 def dumpToFile(self, outputfilename):
     hs = sizeof_fmt((self.last_lba - self.lbaoffset) * self.pageSize)
     print "Dumping partition to %s (%s)" % (outputfilename, hs)
     flags = os.O_CREAT | os.O_RDWR
     if sys.platform == "win32":
         flags |= os.O_BINARY
     fd=os.open(outputfilename, flags)
     
     pbar = ProgressBar(self.last_lba - self.lbaoffset - 1)
     pbar.start()
     for i in xrange(self.lbaoffset, self.last_lba):
         pbar.update(i-self.lbaoffset)
         d = self.nand.readLPN(i, self.key)
         if i == self.lbaoffset and d[0x400:0x402] != "HX":
             print "FAIL? Not HFS partition or wrong key"
         os.write(fd, d)
     pbar.finish()
     os.close(fd)
Example #23
0
    def dumpToFile(self, outputfilename):
        hs = sizeof_fmt((self.last_lba - self.lbaoffset) * self.pageSize)
        print("Dumping partition to %s (%s)" % (outputfilename, hs))
        flags = os.O_CREAT | os.O_RDWR
        if sys.platform == "win32":
            flags |= os.O_BINARY
        fd = os.open(outputfilename, flags)

        pbar = ProgressBar(self.last_lba - self.lbaoffset - 1)
        pbar.start()
        for i in range(self.lbaoffset, self.last_lba):
            pbar.update(i - self.lbaoffset)
            d = self.nand.readLPN(i, self.key)
            if i == self.lbaoffset and d[0x400:0x402] != "HX":
                print("FAIL? Not HFS partition or wrong key")
            os.write(fd, d)
        pbar.finish()
        os.close(fd)
def carveEMFVolumeJournal(volume):
    journal = volume.readJournal()
    print "Journal size : %s" % sizeof_fmt(len(journal))
    hdr = journal_header.parse(journal)
    sector_size = hdr.jhdr_size
    nodeSize = volume.catalogTree.nodeSize
    print "Collecting existing file ids"
    fileIds = volume.listAllFileIds()
    print "%d file IDs" % len(fileIds.keys())
    files = {}
    keys = {}

    for i in xrange(0, len(journal), sector_size):
        for k, v in carveBtreeNode(journal[i:i + nodeSize], HFSPlusCatalogKey,
                                   HFSPlusCatalogData):
            if v.recordType == kHFSPlusFileRecord:
                name = getString(k)
                h = hashlib.sha1(HFSPlusCatalogKey.build(k)).digest()
                if files.has_key(h):
                    continue
                if not fileIds.has_key(v.data.fileID):
                    #we only keep files where the first block is not marked as in use
                    if volume.isBlockInUse(
                            v.data.dataFork.HFSPlusExtentDescriptor[0].
                            startBlock) == False:
                        print "Found deleted file record", v.data.fileID, name
                        files[h] = (name, v)
        for k, v in carveBtreeNode(journal[i:i + nodeSize], HFSPlusAttrKey,
                                   HFSPlusAttrData):
            if getString(k) == "com.apple.system.cprotect":
                if not fileIds.has_key(k.fileID):
                    filekeys = keys.setdefault(k.fileID, [])
                    try:
                        cprotect = cprotect_xattr.parse(v.data)
                    except:
                        continue
                    #assert cprotect.xattr_major_version == 2
                    filekey = volume.keybag.unwrapKeyForClass(
                        cprotect.persistent_class, cprotect.persistent_key)
                    if filekey and not filekey in filekeys:
                        print "Found key for file", k.fileID
                        filekeys.append(filekey)

    return files.values(), keys
Example #25
0
    def search(self, results_list, search_string, nb_results, free_only=True, include_headers=True):
        try:
            results = self.raw_search(results_list, search_string, nb_results)
        except IndexError:
            results = list()
        if not results:
            print("No result")
            return
        all_results = list()
        if include_headers:
            # Name of the columns
            col_names = ["Title", "Creator", "Size", "Downloads", "Last Update", "AppID", "Version", "Rating"]
            all_results.append(col_names)
        # Compute results values
        for result in results:
            if free_only and result['offer'][0]['checkoutFlowRequired']:  # if not Free to download
                continue
            l = [result['title'],
                 result['author'],
                 util.sizeof_fmt(result['installationSize']),
                 result['numDownloads'],
                 result['uploadDate'],
                 result['docId'],
                 result['versionCode'],
                 "%.2f" % result["aggregateRating"]["starRating"]
                ]
            if len(all_results) < int(nb_results) + 1:
                all_results.append(l)

        if self.verbose:
            # Print a nice table
            col_width = list()
            for column_indice in range(len(all_results[0])):
                col_length = max([len("%s" % row[column_indice]) for row in all_results])
                col_width.append(col_length + 2)

            for result in all_results:
                print("".join(str("%s" % item).strip().ljust(col_width[indice]) for indice, item in
                              enumerate(result)))
        return all_results
Example #26
0
def carveEMFVolumeJournal(volume):
    journal = volume.readJournal()
    print "Journal size : %s" % sizeof_fmt(len(journal))
    hdr = journal_header.parse(journal)
    sector_size = hdr.jhdr_size
    nodeSize = volume.catalogTree.nodeSize
    print "Collecting existing file ids"
    fileIds = volume.listAllFileIds()
    print "%d file IDs" % len(fileIds.keys())
    files = {}
    keys = {}
    
    for i in xrange(0,len(journal),sector_size):
        for k,v in carveBtreeNode(journal[i:i+nodeSize],HFSPlusCatalogKey, HFSPlusCatalogData):
            if v.recordType == kHFSPlusFileRecord:
                name = getString(k)
                h = hashlib.sha1(HFSPlusCatalogKey.build(k)).digest()
                if files.has_key(h):
                    continue
                if not fileIds.has_key(v.data.fileID):
                    #we only keep files where the first block is not marked as in use
                    if volume.isBlockInUse(v.data.dataFork.HFSPlusExtentDescriptor[0].startBlock) == False:
                        print "Found deleted file record", v.data.fileID, name
                        files[h] = (name,v)
        for k,v in carveBtreeNode(journal[i:i+nodeSize],HFSPlusAttrKey, HFSPlusAttrData):
            if getString(k) == "com.apple.system.cprotect":
                if not fileIds.has_key(k.fileID):
                    filekeys = keys.setdefault(k.fileID, [])
                    try:
                        cprotect = cprotect_xattr.parse(v.data)
                    except:
                        continue
                    #assert cprotect.xattr_major_version == 2
                    filekey = volume.keybag.unwrapKeyForClass(cprotect.persistent_class, cprotect.persistent_key)
                    if filekey and not filekey in filekeys:
                        print "Found key for file", k.fileID
                        filekeys.append(filekey)
    
    return files.values(), keys
Example #27
0
def download_all(local_path):
    if not os.path.isdir(local_path):
        raise Exception("nonexistent unstaging directory")
    bucket = util.connect_bucket()
    all_available = {obj.key: obj.size for obj in bucket.objects.all()}
    remaining = set(all_available.keys())
    for path, folders, files in os.walk(local_path):
        for f in files:
            lpath = os.path.join(path, f)
            rpath = os.path.relpath(lpath, local_path)
            local_size = os.stat(lpath).st_size
            if rpath not in remaining:
                raise RuntimeError(
                    "expected previously downloaded file %s of size %d to still exist remotely"
                    % (rpath, local_size))
            if local_size != all_available[rpath]:
                raise Exception(
                    "expected previously downloaded file %s to be of size %d, not %d"
                    % (rpath, all_available[rpath], local_size))
            remaining.remove(rpath)
    total_count, total_size = 0, 0
    for rpath in remaining:
        assert validate_path(rpath), "invalid path: %s" % repr(rpath)
        lpath = os.path.join(local_path, rpath)
        print("downloading", rpath, "to", lpath, "of size",
              util.sizeof_fmt(all_available[rpath]))
        if not os.path.isdir(os.path.dirname(lpath)):
            os.makedirs(os.path.dirname(lpath))
        bucket.download_file(rpath, lpath)
        local_size = os.stat(lpath).st_size
        if local_size != all_available[rpath]:
            raise Exception(
                "expected newly downloaded file %s to be of size %d, not %d" %
                (rpath, all_available[rpath], local_size))
        total_count += 1
        total_size += local_size
    bucket_size = sum(all_available.values())
    return total_count, total_size, bucket_size
    def getFileAtUSN(self,
                     filename,
                     filerecord,
                     filekey,
                     usn,
                     previousVersion=None,
                     exactSize=True):
        missing_pages = 0
        decrypt_offset = 0
        file_pages = []
        logicalSize = filerecord.dataFork.logicalSize
        for extent in self.volume.getAllExtents(filerecord.dataFork,
                                                filerecord.fileID):
            for bn in xrange(extent.startBlock,
                             extent.startBlock + extent.blockCount):
                pn = self.pagesForLBN(bn).get(usn)  #fail
                if pn:
                    clear = self.decryptFileBlock(pn[-1], filekey, bn,
                                                  decrypt_offset)
                    file_pages.append(clear)
                elif previousVersion:
                    file_pages.append(previousVersion[len(file_pages)])
                else:
                    file_pages.append(self.blankPage)
                    missing_pages += 1
                decrypt_offset += self.pageSize

        print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
            (filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
        filename = "%d_%d_%s" % (filerecord.fileID, usn, filename)
        if missing_pages == 0:
            filename = "OK_" + filename
            self.okfiles += 1
        data = "".join(file_pages)
        if exactSize:
            data = data[:logicalSize]
        self.writeUndeletedFile(filename, data)
        return file_pages
Example #29
0
 def dump_nand(self, filename):
     f = open(filename, "wb")
     self.send_command(CMD_DUMP)
     zz = self.s.recv(8)
     totalSize = struct.unpack("<Q", zz)[0]
     recvSize = 0
     print "Dumping %s NAND to %s" % (sizeof_fmt(totalSize), filename)
     pbar = ProgressBar(totalSize)
     pbar.start()
     h = hashlib.sha1()
     while recvSize < totalSize:
         pbar.update(recvSize)
         d = self.s.recv(8192*2)
         if not d or len(d) == 0:
             break
         h.update(d)
         f.write(d)
         recvSize += len(d)
     pbar.finish()
     f.close()
     print "NAND dump time : %s" % str(datetime.timedelta(seconds=pbar.seconds_elapsed))
     print "SHA1: %s" % h.hexdigest()
     if recvSize != totalSize:
         print "dump_nand FAIL"
Example #30
0
 def do_FTP_LIST(self, netloc, path, user, passwd):
     if not path.endswith('/'):
         self.path += '/'
     lst = []
     table = '<table class="content"><thead><tr><th align="left">Content</th><th align="right">Size</th><th align="right">Modify</th></tr></thead><tbody>'
     try:
         ftp = ftplib.FTP(netloc)
         ftp.login(user, passwd)
         response = ftp.retrlines("LIST %s" % path, lst.append)
         ftp.quit()
         for line in lst:
             self.logger.debug(line)
             line_split = line.split(None, 8)
             if line.startswith('d'):
                 line_split[8] += '/'
             table += '<tr><td align="left"><a href="%s%s">%s</a></td><td align="right">%s</td><td align="right">%s %s %s</td></tr>\r\n' % (
                 self.path, urlquote(line_split[8]), line_split[8], line_split[4] if line.startswith('d') else sizeof_fmt(int(line_split[4])), line_split[5], line_split[6], line_split[7])
         table += '<tr><td align="left">================</td><td align="right">==========</td><td align="right">=============</td></tr></tbody></table>\r\n'
         table += '<p>%s</p>' % response
     except Exception as e:
         self.logger.warning("FTP Exception: %r" % e)
         self.send_error(504, repr(e))
     else:
         msg = ['<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"><html>\n',
                '<head><style type="text/css">.content tr{font-family:Consolas,"Droid Sans Mono", Menlo, Monospace;}</style></head>',
                "<title>Directory listing for %s</title>\n" % path,
                "<body>\n<h2>Directory listing for %s</h2>\n<hr>\n" % path,
                table,
                "<hr>\n</body>\n</html>\n"]
         self.write(200, ''.join(msg), 'text/html')
Example #31
0
 def size_fmt(self):
     return util.sizeof_fmt(self.size)
Example #32
0
 def get_file_size(self):
     return util.sizeof_fmt(self.file_location)
    def readFileHax(self, filename, filerecord, filekeys):
        lba0 = self.first_lba + filerecord.dataFork.HFSPlusExtentDescriptor[
            0].startBlock
        filekey = None
        good_usn = None
        first_vpn = 0
        first_usn = 0
        hax = self.ftlhax
        print "%d versions for first lba" % len(hax.get(lba0, []))
        for k in filekeys:
            for vpn in hax.get(lba0, []):
                s, ciphertext = self.nand.ftl.YAFTL_readPage(vpn,
                                                             key=None,
                                                             lpn=None)
                if not ciphertext:
                    continue
                d = self.decryptFileBlock2(ciphertext, k, lba0, 0)
                #hexdump(d[:16])
                if isDecryptedCorrectly(d):
                    filekey = k
                    first_vpn = vpn
                    first_usn = good_usn = s.usn
                    block = vpn / self.nand.ftl.vfl.pages_per_sublk
                    break
        if not filekey:
            return False
        logicalSize = filerecord.dataFork.logicalSize
        missing_pages = 0
        file_pages = []
        lbns = []
        for extent in self.volume.getAllExtents(filerecord.dataFork,
                                                filerecord.fileID):
            for bn in xrange(extent.startBlock,
                             extent.startBlock + extent.blockCount):
                lbns.append(self.first_lba + bn)
        datas = {}
        usnblocksToLookAT = sorted(
            filter(lambda x: x >= good_usn, self.userblocks.keys()))[:5]
        print usnblocksToLookAT
        usnblocksToLookAT.insert(0, 0)
        first_block = True
        done = False
        for usn in usnblocksToLookAT:
            if first_block:
                bbtoc = self.getBBTOC(block)
                first_block = False
            else:
                bbtoc = self.getBBTOC(self.userblocks[usn])
            for lbn in bbtoc.keys():
                if not lbn in lbns:
                    continue
                idx = lbns.index(lbn)
                s, ciphertext = self.nand.ftl.YAFTL_readPage(bbtoc[lbn],
                                                             key=None,
                                                             lpn=None)
                if not ciphertext:
                    continue
                ciphertext = self.decryptFileBlock2(ciphertext, filekey, lbn,
                                                    idx * self.pageSize)
                if idx == 0:
                    if not isDecryptedCorrectly(ciphertext):
                        continue
                datas[idx * self.pageSize] = (ciphertext, lbn - self.first_lba)
                #if idx == len(lbns):
                if len(datas) == len(lbns):
                    done = True
                    break

            if done:
                break
        cleartext = ""
        decrypt_offset = 0
        for i in xrange(0, logicalSize, self.pageSize):
            if datas.has_key(i):
                ciphertext, lbn = datas[i]
                cleartext += ciphertext
            else:
                cleartext += self.blankPage
                missing_pages += 1
            decrypt_offset += self.pageSize

        print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
            (filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
        filename = "%d_%d_%s" % (filerecord.fileID, first_usn, filename)
        if missing_pages == 0:
            filename = "OK_" + filename
            self.okfiles += 1
        if True:  #exactSize:
            cleartext = cleartext[:logicalSize]
        self.writeUndeletedFile(filename, cleartext)
        return True
    def readFileHax(self, filename, filerecord, filekeys):
        lba0 = self.first_lba + filerecord.dataFork.HFSPlusExtentDescriptor[0].startBlock
        filekey = None
        good_usn = None
        first_vpn = 0
        first_usn = 0
        hax = self.ftlhax
        print "%d versions for first lba" % len(hax.get(lba0, []))
        for k in filekeys:
            for vpn in hax.get(lba0, []):
                s, ciphertext = self.nand.ftl.YAFTL_readPage(vpn, key=None, lpn=None)
                if not ciphertext:
                    continue
                d = self.decryptFileBlock2(ciphertext, k, lba0, 0)
                #hexdump(d[:16])
                if isDecryptedCorrectly(d):
                    filekey = k
                    first_vpn = vpn
                    first_usn = good_usn = s.usn
                    block = vpn / self.nand.ftl.vfl.pages_per_sublk
                    break
        if not filekey:
            return False
        logicalSize = filerecord.dataFork.logicalSize
        missing_pages = 0
        file_pages = []
        lbns = []
        for extent in self.volume.getAllExtents(filerecord.dataFork, filerecord.fileID):
            for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
                lbns.append(self.first_lba + bn)
        datas = {}
        usnblocksToLookAT = sorted(filter(lambda x: x >= good_usn, self.userblocks.keys()))[:5]
        print usnblocksToLookAT
        usnblocksToLookAT.insert(0, 0)
        first_block = True
        done = False
        for usn in usnblocksToLookAT:
            if first_block:
                bbtoc = self.getBBTOC(block)
                first_block = False
            else:
                bbtoc = self.getBBTOC(self.userblocks[usn])
            for lbn in bbtoc.keys():
                if not lbn in lbns:
                    continue
                idx = lbns.index(lbn)
                s, ciphertext = self.nand.ftl.YAFTL_readPage(bbtoc[lbn], key=None, lpn=None)
                if not ciphertext:
                    continue
                ciphertext = self.decryptFileBlock2(ciphertext, filekey, lbn, idx*self.pageSize)
                if idx == 0:
                    if not isDecryptedCorrectly(ciphertext):
                        continue
                datas[idx*self.pageSize] = (ciphertext, lbn - self.first_lba)
                #if idx == len(lbns):
                if len(datas) == len(lbns):
                    done=True
                    break
                    
            if done:
                break
        cleartext = ""
        decrypt_offset = 0
        for i in xrange(0,logicalSize, self.pageSize):
            if datas.has_key(i):
                ciphertext, lbn = datas[i]
                cleartext += ciphertext
            else:
                cleartext += self.blankPage
                missing_pages += 1
            decrypt_offset += self.pageSize

        print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
            (filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
        filename =  "%d_%d_%s" % (filerecord.fileID, first_usn, filename)
        if missing_pages == 0:
            filename = "OK_" + filename
            self.okfiles += 1
        if True:#exactSize:
            cleartext = cleartext[:logicalSize]
        self.writeUndeletedFile(filename, cleartext)
        return True
 def do_ptable(self, p):
     pt = self.image.getPartitionTable()
     print "Block device partition table"
     print "".join(map(lambda x:x.ljust(12), ["Index", "Name", "Start LBA", "End LBA", "Size"]))
     for i in xrange(len(pt)):
         p = pt[i]
         print "".join(map(lambda x:str(x).ljust(12), [i, p.name, p.first_lba, p.last_lba, sizeof_fmt((p.last_lba - p.first_lba)*self.image.pageSize)])) 
    def readFileHax(self, filename, filerecord, filekeys):
        lba0 = self.first_lba + filerecord.dataFork.HFSPlusExtentDescriptor[0].startBlock
        filekey = None
        good_usn = None
        first_usn = 0
        lba0_versions = self.nand.ftl.findAllVersions(lba0)
        print "%d versions for first lba" % len(lba0_versions)
        for k in filekeys:
            for addr in lba0_versions:
                ciphertext = self.nand.ftl.readPage1(addr, key=None, lpn=lba0)
                if not ciphertext:
                    continue
                d = self.decryptFileBlock2(ciphertext, k, lba0, 0)
                if isDecryptedCorrectly(d):
                    hexdump(d[:16])
                    filekey = k
                    weaveSeq = addr[0]
                    break
        if not filekey:
            return False
        logicalSize = filerecord.dataFork.logicalSize
        missing_pages = 0
        file_pages = []
        lbns = []
        for extent in self.volume.getAllExtents(filerecord.dataFork, filerecord.fileID):
            for bn in xrange(extent.startBlock, extent.startBlock + extent.blockCount):
                lbns.append(self.first_lba + bn)
        datas = {}
        
        first_block = True
        done = False
        for weaveSeq,lbn,ce,block,page in self.nand.ftl.findPagesInRange(weaveSeq, weaveSeq+50000):
            if not lbn in lbns:
                continue
            idx = lbns.index(lbn)
            ciphertext = self.nand.ftl.readPage1((weaveSeq,ce,block,page), key=None, lpn=lbn)
            if not ciphertext:
                continue
            ciphertext = self.decryptFileBlock2(ciphertext, filekey, lbn, idx*self.pageSize)
            if idx == 0:
                if not isDecryptedCorrectly(ciphertext):
                    continue
            datas[idx*self.pageSize] = (ciphertext, lbn - self.first_lba)
            #if idx == len(lbns):
            if len(datas) == len(lbns):
                done=True
                break
            if done:
                break
        cleartext = ""
        decrypt_offset = 0
        for i in xrange(0,logicalSize, self.pageSize):
            if datas.has_key(i):
                ciphertext, lbn = datas[i]
                cleartext += ciphertext
            else:
                cleartext += self.blankPage
                missing_pages += 1
            decrypt_offset += self.pageSize

        print "Recovered %d:%s %d missing pages, size %s, created %s, contentModDate %s" % \
            (filerecord.fileID, filename.encode("utf-8"), missing_pages, sizeof_fmt(logicalSize), hfs_date(filerecord.createDate), hfs_date(filerecord.contentModDate))
        filename =  "%d_%d_%s" % (filerecord.fileID, first_usn, filename)
        if missing_pages == 0:
            filename = "OK_" + filename
            self.okfiles += 1
        if True:#exactSize:
            cleartext = cleartext[:logicalSize]
        self.writeUndeletedFile(filename, cleartext)
        return True
Example #37
0
def main():
    """ main CLI Entry """
    parser = argparse.ArgumentParser()
    # Add positional arguments
    parser.add_argument('PATH',
                        nargs='+',
                        help='''Provides Source Directory(s) and Destination
                                Directory for image sorting.  If --summary is
                                given, only needed Source Directory(s).''')

    # Add optional arguments
    parser.add_argument('-r',
                        '--recursive',
                        action='store_true',
                        help='get all images from subsequent directories')
    parser.add_argument('-c',
                        '--copy',
                        action='store_true',
                        help='copy instead of move image files')
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='print detail information')
    parser.add_argument('-s',
                        '--summary',
                        action='store_true',
                        help='simulate the run, no file will be moved/copied')
    parser.add_argument('-d',
                        '--dry-run',
                        action='store_true',
                        help='same as --summary option')
    parser.add_argument('-i',
                        '--include',
                        action='store',
                        type=str,
                        help='''sorting only certain size indicated by
                                this option''')
    parser.add_argument('-e',
                        '--exclude',
                        action='store',
                        type=str,
                        help='''exclude certain image size indicated by
                                this option''')
    parser.add_argument('--unknown',
                        action='store_true',
                        help='sort all unknown/unreadable images into folder')
    parser.add_argument('--unknownonly',
                        action='store_true',
                        help='sort only unknown/unreadable images only')

    args = parser.parse_args()

    # check error on arguments
    _check_error(len(args.PATH), args.summary, (args.include, args.exclude),
                 (args.unknown, args.unknownonly))

    # flag --summary if --dry-run is flaged
    args.summary = bool(args.dry_run)

    # Create destination directory if not exists
    # print on-screen if verbose is true
    if not args.summary:
        util.create_dir(args.PATH[-1])
        if args.verbose:
            print('{}: is created.\n'.format(args.PATH[-1]))

    # get the args.include or args.exclude value
    limit_size: List[int] = []

    if args.include:
        limit_size = [int(num) for num in re.split('[x,]', args.include)]
    elif args.exclude:
        limit_size = [int(num) for num in re.split('[x,]', args.exclude)]

    # Putting all boolean args into one bundle
    bool_value: BoolCollection = BoolCollection(args.recursive, args.copy,
                                                args.verbose, args.unknown,
                                                bool(args.include))

    # process image sorting functionality separately if --unknownonly option
    # is on.
    # it will process whethere --summary option is on or off
    if args.unknownonly:
        # src and dest will change depends on flag args.summary
        src: str = args.PATH[:-1] if not args.summary else args.PATH
        dest: str = args.PATH[-1] if not args.summary else ''
        result: List[int] = sort_images.unknown_only([0, 0], src, dest,
                                                     args.summary, bool_value)
        # print out info if --summary is flaged
        if args.summary:
            print('-Total numbers: {}'.format(result[0]))
            print('-Total size: {}\n'.format(util.sizeof_fmt(result[1])))
    # If summary arguments is true, no actual images is sorted
    elif args.summary:
        lst: List[ImagePtr] = []
        lst = sort_images.summary(lst, args.PATH, bool_value, limit_size)
        if not lst:
            print('No image files found!  Maybe using it with -r option?')
        else:
            print('\n===SUMMARY===')
            for node in lst:
                node.to_string()
    else:
        # if unknown is true, create unknown folder in destination before
        # sorting
        if args.unknown:
            util.create_dir(os.path.join(args.destination, 'unknown'))
        sort_images.sort_img(args.PATH[:-1], args.PATH[-1], bool_value,
                             limit_size)
Example #38
0
#!/usr/bin/env python
import signac
from util import sizeof_fmt


project = signac.get_project()
counts = {'Coupon': 0}
sizes = {'Coupon': 0}


for job in project:
    for fn in counts:
        counts[fn] += job.doc.get(fn, {'shape': (0, 0)})['shape'][0]
    for fn in sizes:
        sizes[fn] += job.doc.get(fn, {'file_size': 0})['file_size']

print(counts)
for fn in sizes:
    print(fn, sizeof_fmt(sizes[fn]))