Exemplo n.º 1
0
def merge_data_bsdiff(source_data, overlay_data):
    if len(source_data) == 0 or len(overlay_data) == 0:
        raise IOError(
            "[Error] Not valid data length: %d, %d" %
            (len(source_data), len(overlay_data)))
    recover = bsdiff4.patch(source_data, overlay_data)
    return recover
Exemplo n.º 2
0
 def decompress(self, rev, filename=None):
     rev = int(rev)
     if rev == 0:
         f2 = self.unpack(open(self.base, 'rb').read())
         if filename is None:
             return f2
         else:
             x = open(filename, 'wb')
             x.write(f2)
             x.close()
         return
     if rev < 0 or rev > len(self.patches):
         return False
     now = len(self.patches)
     f = self.unpack(open(self.base, 'rb').read())
     while now >= rev:
         pt = self.patches[now - 1]
         f2 = open(os.path.join(os.path.dirname(self.base), pt),
                   'rb').read()
         p = bsdiff4.patch(f, f2)
         f = p
         now -= 1
     if filename is None:
         return f
     fx = open(filename, 'wb')
     fx.write(f)
     fx.close()
Exemplo n.º 3
0
 def patch(self, target: str):
     """Base + Delta -> Patched"""
     if not self.delta:
         self.read()
     result = bsdiff4.patch(self.get_source_data_with_cache(), self.delta)
     with open(target, "wb") as f:
         f.write(result)
Exemplo n.º 4
0
def merge_data_bsdiff(source_data, overlay_data):
    if len(source_data) == 0 or len(overlay_data) == 0:
        raise IOError(
            "[Error] Not valid data length: %d, %d" %
            (len(source_data), len(overlay_data)))
    recover = bsdiff4.patch(source_data, overlay_data)
    return recover
Exemplo n.º 5
0
def data_for_op(op, out_file, old_file):
    args.payloadfile.seek(data_offset + op.data_offset)
    data = args.payloadfile.read(op.data_length)

    # assert hashlib.sha256(data).digest() == op.data_sha256_hash, 'operation data hash mismatch'

    if op.type == op.REPLACE_XZ:
        dec = lzma.LZMADecompressor()
        data = dec.decompress(data)
        out_file.seek(op.dst_extents[0].start_block * block_size)
        out_file.write(data)
    elif op.type == op.REPLACE_BZ:
        dec = bz2.BZ2Decompressor()
        data = dec.decompress(data)
        out_file.seek(op.dst_extents[0].start_block * block_size)
        out_file.write(data)
    elif op.type == op.REPLACE:
        out_file.seek(op.dst_extents[0].start_block * block_size)
        out_file.write(data)
    elif op.type == op.SOURCE_COPY:
        if not args.diff:
            print("SOURCE_COPY supported only for differential OTA")
            sys.exit(-2)
        out_file.seek(op.dst_extents[0].start_block * block_size)
        for ext in op.src_extents:
            old_file.seek(ext.start_block * block_size)
            data = old_file.read(ext.num_blocks * block_size)
            out_file.write(data)
    elif op.type == op.SOURCE_BSDIFF:
        if not args.diff:
            print("SOURCE_BSDIFF supported only for differential OTA")
            sys.exit(-3)
        out_file.seek(op.dst_extents[0].start_block * block_size)
        tmp_buff = io.BytesIO()
        for ext in op.src_extents:
            old_file.seek(ext.start_block * block_size)
            old_data = old_file.read(ext.num_blocks * block_size)
            tmp_buff.write(old_data)
        tmp_buff.seek(0)
        old_data = tmp_buff.read()
        tmp_buff.seek(0)
        tmp_buff.write(bsdiff4.patch(old_data, data))
        n = 0
        tmp_buff.seek(0)
        for ext in op.dst_extents:
            tmp_buff.seek(n * block_size)
            n += ext.num_blocks
            data = tmp_buff.read(ext.num_blocks * block_size)
            out_file.seek(ext.start_block * block_size)
            out_file.write(data)
    elif op.type == op.ZERO:
        for ext in op.dst_extents:
            out_file.seek(ext.start_block * block_size)
            out_file.write('\0' * ext.num_blocks * block_size)
    else:
        print("Unsupported type = %d" % op.type)
        sys.exit(-1)

    return data
Exemplo n.º 6
0
def create_rom_file(patch_file: str) -> Tuple[dict, str]:
    data = Utils.parse_yaml(
        lzma.decompress(load_bytes(patch_file)).decode("utf-8-sig"))
    patched_data = bsdiff4.patch(get_base_rom_bytes(), data["patch"])
    target = os.path.splitext(patch_file)[0] + ".sfc"
    with open(target, "wb") as f:
        f.write(patched_data)
    return data["meta"], target
Exemplo n.º 7
0
 def apply_delta_B(src, delta):
     """
     BSDIFF4 diff
     :param src:
     :param delta:
     :return:
     """
     return bsdiff4.patch(src, delta)
Exemplo n.º 8
0
def create_rom_bytes(patch_file: str) -> Tuple[dict, str, bytearray]:
    data = Utils.parse_yaml(
        lzma.decompress(load_bytes(patch_file)).decode("utf-8-sig"))
    patched_data = bsdiff4.patch(get_base_rom_bytes(), data["patch"])
    rom_hash = patched_data[int(0x7FC0):int(0x7FD5)]
    data["meta"]["hash"] = "".join(chr(x) for x in rom_hash)
    target = os.path.splitext(patch_file)[0] + ".sfc"
    return data["meta"], target, patched_data
Exemplo n.º 9
0
def compose_patch(*li_content):
    if len(li_content) == 0:
        raise SystemError('compose_patch need at least one element, not 0')
    elif len(li_content) == 1:
        return li_content[0]

    a, b, *t = li_content
    return compose_patch(bsdiff4.patch(a, b), *t)
Exemplo n.º 10
0
 def apply_delta_B(src, delta):
     # type: (six.text_type, six.binary_type) -> six.text_type
     """
     BSDIFF4 diff
     :param src:
     :param delta:
     :return:
     """
     return smart_text(bsdiff4.patch(src, delta))
Exemplo n.º 11
0
def get_data_from_entry(entry):
    data = open(os.path.normpath(entry['_path']), "rb").read()

    if entry.get('patch', None) is not None:
        if entry.get('patch_format') == "bsdiff4":
            import bsdiff4
            data = bsdiff4.patch(data, open(entry['patch'], "rb").read())

    return bytearray(data)
Exemplo n.º 12
0
 def _apply_patches_in_memory(self):
     # Applies a sequence of patches in memory
     log.debug('Applying patches')
     for i in self.patch_binary_data:
         try:
             self.og_binary = bsdiff4.patch(self.og_binary, i)
             log.debug('Applied patch successfully')
         except Exception as err:
             log.debug(err, exc_info=True)
             raise PatcherError('Patch failed to apply')
Exemplo n.º 13
0
 def _apply_patches_in_memory(self):
     # Applies a sequence of patches in memory
     log.debug('Applying patches')
     for i in self.patch_binary_data:
         try:
             self.og_binary = bsdiff4.patch(self.og_binary, i)
             log.debug('Applied patch successfully')
         except Exception as err:
             log.debug(err, exc_info=True)
             raise PatcherError('Patch failed to apply')
Exemplo n.º 14
0
def patch(src_path, dst_path, patch_path, evt_mgr=None, super_id=None):
    if evt_mgr:
        from encore.events.api import ProgressManager
    else:
        from egginst.console import ProgressManager

    x = zipfile.ZipFile(src_path)
    y = zipfile.ZipFile(dst_path, 'w', zipfile.ZIP_DEFLATED)
    z = zipfile.ZipFile(patch_path)

    xnames = x.namelist()
    znames = set(z.namelist())

    n = 0
    tot = len(xnames) + len(znames)

    progress = ProgressManager(
                evt_mgr, source=patch,
                operation_id=uuid4(),
                message="patching",
                steps=tot,
                # ---
                progress_type="patching", filename=basename(patch_path),
                disp_amount=str(tot),
                super_id=super_id)

    with progress:
        for name in xnames:
            if name not in znames:
                 y.writestr(x.getinfo(name), x.read(name))
            n += 1
            progress(step=n)

        for name in z.namelist():
            if name == '__zdiff_info__.json':
                continue
            zdata = z.read(name)
            if zdata.startswith('BSDIFF4'):
                ydata = bsdiff4.patch(x.read(name), zdata)
            elif zdata.startswith('BZ'):
                ydata = bz2.decompress(zdata)
            elif zdata.startswith('RM'):
                continue
            else:
                raise Exception("Hmm, didn't expect to get here: %r" % zdata)

            y.writestr(name, ydata)
            progress(step=n)

    z.close()
    y.close()
    x.close()
Exemplo n.º 15
0
    def patch_to_file(self, mfile, start, end, patch):
        """ do patch: using bsdiff to patch data
            NOTE: pass the same mfile throughout can combine patch results.
            @param mfile: file where patch data will store.
            @param start: the start pos of to-patched data.
            @param end: the end pos of to-patched data.
            @param patch: patch data. """

        with open(self.path(), 'rb') as file:
            file.seek(start, 0)
            sdata = file.read(end - start)
            res = bsdiff4.patch(sdata, patch)
            mfile.append_data(res)
Exemplo n.º 16
0
 def test_branch_conflict(self):
     view = View(self.log, self.root_key)
     view.build()
     home_path = os.path.join(os.sep, 'home-' + utils.random_ascii())
     view.mkdir(home_path)
     key = Key.generate()
     view.grant(home_path, 'user', key)
     view = View(self.log, key)
     view.build()
     parent_node = view.get(home_path)
     user_path = os.path.join(home_path, 'user-' + utils.random_ascii())
     max_hash = None
     enc_content = ''
     for ix in range(12):
         content = 'content-' + utils.random_ascii(32)
         prev = enc_content
         enc_content = bsdiff4.diff(enc_content, content)
         entry = self.log.write(parent_node.entry,
                                user_path,
                                key,
                                attachment=enc_content)
         max_hash = max(max_hash, entry.hash) if max_hash else entry.hash
     view = View(self.log, self.root_key)
     view.build()
     self.assertEqual(
         bsdiff4.patch(prev, self.log.entries[max_hash].get_content()),
         view.get(user_path).content)
     # Admin branch more power
     admin_content = 'content-' + utils.random_ascii(32)
     content = bsdiff4.diff(enc_content, admin_content)
     self.log.write(parent_node.entry,
                    user_path,
                    self.root_key,
                    attachment=content)
     view.build()
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     alt_content = bsdiff4.diff(content, ('content-' +
                                          utils.random_ascii(32)).encode())
     self.log.write(parent_node.entry,
                    user_path,
                    key,
                    attachment=alt_content)
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     # Grant consistency with prev state
     view.grant(os.sep, 'user', key)
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     view.build()
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     # Test prints
     self.log.print_tree(view=view, color=True)
     self.log.print_tree(view=view, ascii=True)
Exemplo n.º 17
0
 def _apply_patches_in_memory(self):
     # Applies a sequence of patches in memory
     log.debug(u'Applying patches')
     # Beginning the patch process
     self.new_binary = self.og_binary
     progress_signal.send(info=u'Applying Patches')
     for i in self.patch_binary_data:
         try:
             self.new_binary = bsdiff4.patch(self.new_binary, i)
         except Exception as err:
             progress_signal.send(info=u'Failed to apply patches')
             log.debug(err, exc_info=True)
             log.error(err)
             raise PatcherError(u'Patch failed to apply')
Exemplo n.º 18
0
 def _apply_patches_in_memory(self):
     # Applies a sequence of patches in memory
     log.debug(u'Applying patches')
     # Beginning the patch process
     self.new_binary = self.og_binary
     progress_signal.send(info=u'Applying Patches')
     for i in self.patch_binary_data:
         try:
             self.new_binary = bsdiff4.patch(self.new_binary, i)
         except Exception as err:
             progress_signal.send(info=u'Failed to apply patches')
             log.debug(err, exc_info=True)
             log.error(err)
             raise PatcherError(u'Patch failed to apply')
Exemplo n.º 19
0
def create_rom_bytes(
        patch_file: str,
        ignore_version: bool = False) -> Tuple[dict, str, bytearray]:
    data = Utils.parse_yaml(
        lzma.decompress(load_bytes(patch_file)).decode("utf-8-sig"))
    if not ignore_version and data[
            "compatible_version"] > current_patch_version:
        raise RuntimeError(
            "Patch file is incompatible with this patcher, likely an update is required."
        )
    patched_data = bsdiff4.patch(get_base_rom_bytes(), data["patch"])
    rom_hash = patched_data[int(0x7FC0):int(0x7FD5)]
    data["meta"]["hash"] = "".join(chr(x) for x in rom_hash)
    target = os.path.splitext(patch_file)[0] + ".sfc"
    return data["meta"], target, patched_data
 def doPatch(self, patch, urls):
     patchContents = self.downloadPatch(patch, urls)
     if not patchContents:
         raise Exception('Error in procuring patch')
     oldContents = self.getContents()
     patchedContents = bsdiff4.patch(oldContents, patchContents)
     del oldContents
     del patchContents
     if self.hash != self.__hash(patchedContents):
         raise Exception('In-memory patch did not have correct hash after patching! Patching failed!')
     fileHandle = self._getFile('wb')
     fileHandle.write(patchedContents)
     fileHandle.close()
     del patchedContents
     return True
Exemplo n.º 21
0
	def _get_doc_rev(doc_path, revision = "latest"):
		revision == "latest" or __class__._check_doc_rev_exists(doc_path / revision)
		
		all_revisions = sorted(doc_path.iterdir(), key = lambda e: _get_mtime(e.stat()))
		content = None
		rev_path = None
		for path in all_revisions:
			rev_path = path
			patch = path.read_bytes()
			content = bsdiff4.patch(content, patch) if content else patch
			if path.name == revision:
				break
		
		if rev_path is None:
			return None, None
		return rev_path, content
Exemplo n.º 22
0
def patch(src_path, dst_path, patch_path):
    x = zipfile.ZipFile(src_path)
    y = zipfile.ZipFile(dst_path, 'w', zipfile.ZIP_DEFLATED)
    z = zipfile.ZipFile(patch_path)

    xnames = x.namelist()
    znames = set(z.namelist())

    n = 0
    tot = len(xnames) + len(znames)
    getLogger('progress.start').info(dict(
            amount = tot,
            disp_amount = str(tot),
            filename = basename(patch_path),
            action = 'patching'))

    for name in xnames:
        if name not in znames:
             y.writestr(x.getinfo(name), x.read(name))
        n += 1
        getLogger('progress.update').info(n)

    for name in z.namelist():
        if name == '__zdiff_info__.json':
            continue
        zdata = z.read(name)
        if zdata.startswith('BSDIFF4'):
            ydata = bsdiff4.patch(x.read(name), zdata)
        elif zdata.startswith('BZ'):
            ydata = bz2.decompress(zdata)
        elif zdata.startswith('RM'):
            continue
        else:
            raise Exception("Hmm, didn't expect to get here: %r" % zdata)

        y.writestr(name, ydata)
        getLogger('progress.update').info(n)

    getLogger('progress.stop').info(None)

    z.close()
    y.close()
    x.close()
Exemplo n.º 23
0
def main(input_file, output_file):
    """Patches default firmware to allow reading of any address in memory."""
    with open(input_file, 'rb') as file:
        contents = file.read()

    with open("resources/firmware.patch", 'rb') as file:
        patch = file.read()

    hash = hashlib.sha1(contents).hexdigest()
    
    if hash != '94ffc60b9127e64308b3ce1bc8b81ee11cfc2dff':
        print("Invalid hash for input firmware. Aborting.")
        exit(1)

    patched_firmware = bsdiff4.patch(contents, patch)

    with open(output_file, 'wb') as file:
        file.write(patched_firmware)
    
    print("Patching complete.")
Exemplo n.º 24
0
    def html_and_level(self):
        if self.html_delta:
            prev_html, prev_level = self.prev_update.html_and_level
            result = bsdiff4.patch(
                prev_html.encode(),
                self.html_delta,
            ).decode()
        elif self.html_raw:
            result = self.html_raw
            prev_level = 0
        else:
            result, prev_level = self.prev_update.html_and_level

        result_checksum = hashlib.md5((result or '').encode()).hexdigest()
        if result_checksum != self.html_checksum:
            raise Exception(
                'Update {} HTML failed verification. Expected {}, got {}.'.
                format(self.id, self.html_checksum, result_checksum))

        return result, prev_level + 1
Exemplo n.º 25
0
 def OnButton6Button(self, event):
     event.Skip()
     dlg = wx.FileDialog(self, 'Save patched', '.', '', '*', wx.SAVE)
     try:
         if dlg.ShowModal() == wx.ID_OK:
             filename = dlg.GetPath()
             
             destinationFile = open(filename,'wb')
             
             patchFile = bsdiff4.patch(bytes(patchOriginalBytes),patchFilePatch)
             destinationFile.write(patchFile)
             destinationFile.close()
             
     finally:
         dlg.Destroy()
         
         ok_dlg = wx.MessageDialog (self, u'Completed!!',
             u'Completed',
             wx.OK | wx.ICON_INFORMATION
             )
         ok_dlg.ShowModal ()
         ok_dlg.Destroy () 
 def doPatch(self, patch, urls):
     # First download the patch
     patchContents = self.downloadPatch(patch, urls)
     if not patchContents:
         raise Exception('Error in procuring patch')
     # Then prepare to do the patch
     oldContents = self.getContents()
     # Now do the lib stuff
     patchedContents = bsdiff4.patch(oldContents, patchContents)
     # Delete the old and patch contents to conserve memory, as they can be large with big files
     del oldContents
     del patchContents
     # Alright, what's the hash, broseph? Is it good?
     if self.hash != self.__hash(patchedContents):
         # Oh, that's not good...
         raise Exception('In-memory patch did not have correct hash after patching! Patching failed!')
     # Sweet, now write the patched contents to disk
     fileHandle = self._getFile('wb')
     fileHandle.write(patchedContents)
     fileHandle.close()
     del patchedContents
     return True
 def doPatch(self, patch, urls):
     # First download the patch
     patchContents = self.downloadPatch(patch, urls)
     if not patchContents:
         raise Exception('Error in procuring patch')
     # Then prepare to do the patch
     oldContents = self.getContents()
     # Now do the lib stuff
     patchedContents = bsdiff4.patch(oldContents, patchContents)
     # Delete the old and patch contents to conserve memory, as they can be large with big files
     del oldContents
     del patchContents
     # Alright, what's the hash, broseph? Is it good?
     if self.hash != self.__hash(patchedContents):
         # Oh, that's not good...
         raise Exception('In-memory patch did not have correct hash after patching! Patching failed!')
     # Sweet, now write the patched contents to disk
     fileHandle = self._getFile('wb')
     fileHandle.write(patchedContents)
     fileHandle.close()
     del patchedContents
     return True
Exemplo n.º 28
0
 def content(self):
     if not hasattr(self, '_content'):
         content = b""
         if self.entry.action == self.entry.WRITE:
             logger.debug("Uncached BSDIFF4 of %s", self.entry.path)
             ancestor = self.entry
             ancestors = [ancestor]
             while True:
                 ancestor = ancestor.parent
                 if ancestor is None or ancestor.action != self.entry.action:
                     break
                 ancestors.insert(0, ancestor)
             for ancestor in ancestors:
                 ancestor_content = ancestor.get_content()
                 if not ancestor_content:
                     content = ancestor_content
                 else:
                     content = bsdiff4.patch(content, ancestor_content)
         self._content = content
     else:
         logger.debug("Cached BSDIFF4 %s", self.entry.path)
     return self._content
Exemplo n.º 29
0
 def test_branch_conflict(self):
     view = View(self.log, self.root_key)
     view.build()
     home_path = os.path.join(os.sep, 'home-' + utils.random_ascii())
     view.mkdir(home_path)
     key = Key.generate()
     view.grant(home_path, 'user', key)
     view = View(self.log, key)
     view.build()
     parent_node = view.get(home_path)
     user_path = os.path.join(home_path, 'user-' + utils.random_ascii())
     max_hash = None
     enc_content = ''
     for ix in range(12):
         content = 'content-' + utils.random_ascii(32)
         prev = enc_content
         enc_content = bsdiff4.diff(enc_content, content)
         entry = self.log.write(parent_node.entry, user_path, key, attachment=enc_content)
         max_hash = max(max_hash, entry.hash) if max_hash else entry.hash
     view = View(self.log, self.root_key)
     view.build()
     self.assertEqual(bsdiff4.patch(prev, self.log.entries[max_hash].get_content()), view.get(user_path).content)
     # Admin branch more power
     admin_content = 'content-' + utils.random_ascii(32)
     content = bsdiff4.diff(enc_content, admin_content)
     self.log.write(parent_node.entry, user_path, self.root_key, attachment=content)
     view.build()
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     alt_content = bsdiff4.diff(content, ('content-' + utils.random_ascii(32)).encode())
     self.log.write(parent_node.entry, user_path, key, attachment=alt_content)
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     # Grant consistency with prev state
     view.grant(os.sep, 'user', key)
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     view.build()
     self.assertEqual(admin_content.encode(), view.get(user_path).content)
     # Test prints
     self.log.print_tree(view=view, color=True)
     self.log.print_tree(view=view, ascii=True)
Exemplo n.º 30
0
 def content(self):
     if not hasattr(self, '_content'):
         content = b""
         if self.entry.action == self.entry.WRITE:
             logger.debug("Uncached BSDIFF4 of %s", self.entry.path)
             ancestor = self.entry
             ancestors = [ancestor]
             while True:
                 ancestor = ancestor.parent
                 if ancestor is None or ancestor.action != self.entry.action:
                     break
                 ancestors.insert(0, ancestor)
             for ancestor in ancestors:
                 ancestor_content = ancestor.get_content()
                 if not ancestor_content:
                     content = ancestor_content
                 else:
                     content = bsdiff4.patch(content, ancestor_content)
         self._content = content
     else:
         logger.debug("Cached BSDIFF4 %s", self.entry.path)
     return self._content
Exemplo n.º 31
0
def apply_diff(orig_file: IOIter, diff_file: IOIter, new_file: IOIter) -> None:
    """ Given an original file and a diff file, write out a new file with the diff applied

    :param orig_file: an IOIter object whose contents are the "original" data
    :param diff_file: an IOIter object whose contents are the diff to be applied
    :param new_file: an IOIter object where the new file data will be written
    """

    # The outer loop reads a chunk of data at a time; the inner loop parses
    # the read chunk one step at a time and applies it
    diff = b''
    new_writer = new_file.writer()
    next(new_writer)
    orig_reader = orig_file.reader()
    logger.debug2('applying diff')
    for diff_chunk in diff_file.reader():
        diff += diff_chunk
        while diff:
            # try to parse the next chunk; if we can't, break out of the loop to get more data
            try:
                diff_len_str, remainder = diff.split(SEPARATOR, 1)
            except ValueError:
                break

            diff_len = int(diff_len_str)
            if len(remainder) < diff_len:
                break

            try:
                orig_block = next(orig_reader)
            except StopIteration:
                orig_block = b''
            new_writer.send(bsdiff4.patch(orig_block, remainder[:diff_len]))
            diff = remainder[diff_len:]

    if diff:
        raise DiffParseError(f'Un-parseable diff: {diff}')  # type: ignore
Exemplo n.º 32
0
 def round_trip(self, src, dst):
     p = diff(src, dst)
     #print(len(src), len(p))
     dst2 = patch(src, p)
     self.assertEqual(dst, dst2)
Exemplo n.º 33
0
 def apply_entry_diff(self, entry, path, data):
     return bsdiff4.patch(data, entry.payload)
Exemplo n.º 34
0
def patch(patch, reference):
    """Return an uncompressed file by applying the patch to the reference"""
    return bsdiff4.patch(reference, patch)
Exemplo n.º 35
0
def testPatching(args):
    '''tests the db patches generating the expected databases by using the information in the provided ServerDatabase leveldb database
    @param args - the namespace object we get from argparse.parse_args()
    '''

    lg = logging.getLogger("main")

    if args.verbose:
        lg.setLevel("DEBUG")


    # open the ServerDatabase
    sbObj = ServerDatabase(args.serverDatabasePath, lg)

    latestDbHasher = hashlib.sha256()

    # first find the most recent database, this is what all the other databases + the patch should equal to.
    latestDbTime = sbObj[ServerDatabaseEnums.KEY_LAST_DOWNLOAD_TIME]
    latestDbResult = sbObj.getWithPrefix(ServerDatabaseEnums.PREFIX_DATABASE_TIME_TO_LOCATION, [latestDbTime])
    latestDbFilepath = pathlib.Path(latestDbResult)

    lg.info("Latest db time is: {}".format(latestDbTime))
    lg.info("Latest db is located at: {}".format(latestDbFilepath))

    with open(str(latestDbFilepath), "rb") as f:
        latestDbHasher.update(f.read())
    latestDbHexDigest = latestDbHasher.hexdigest()

    lg.info("Latest db's sha256 is {}".format(latestDbHexDigest))

    # now go through every database we can find, and then get its associated patch, patch the database and see if it 
    # equals the latest database's hash

    errorList = list()
    for iterDbKey, iterDbValue in sbObj.getGeneratorWithPrefix(ServerDatabaseEnums.PREFIX_DATABASE_TIME_TO_LOCATION, [""]):


        iterDbTime = iterDbKey.decode("utf-8")
        iterDbTime = iterDbTime[iterDbTime.find(":")+1:]
        iterDbPath = iterDbValue.decode("utf-8")

        lg.info("on database version: {} ({})".format(iterDbTime, arrow.get(iterDbTime).isoformat()))


        if iterDbTime == latestDbTime:
            lg.info("\tSkipping database because its the same as the latest db")
            continue

        # get the patch between this database and the latest database
        try:
            iterPatchResult = sbObj.getWithPrefix(ServerDatabaseEnums.PREFIX_DB_PATCH, [iterDbTime, latestDbTime])
        except KeyError:
            lg.error("\tPatch entry from {} to {} doesn't exist in the ServerDatabase!".format(iterDbTime, latestDbTime))
            errorList.append(iterDbTime)
            continue

        iterPatchPath = pathlib.Path(iterPatchResult)

        if not iterPatchPath.exists():
            lg.info("\tERROR: patch from {} to {} doesn't exist on disk ({})".format(iterDbTime, iterDbPath, iterPatchPath))
            errorList.append(iterDbTime)
            continue

        # apply the patch

        iterDbFile = open(str(iterDbPath), "rb")
        iterPatchFile = open(str(iterPatchPath), "rb")

        lg.debug("\tPatching with patchfile {}".format(iterPatchPath))
        patchedDbBytes = bsdiff4.patch(iterDbFile.read(), iterPatchFile.read())

        iterDbFile.close()
        iterPatchFile.close()

        # hash the patched db to see if it eqals the latest db's sha256

        iterDbHasher = hashlib.sha256()
        iterDbHasher.update(patchedDbBytes)
        patchedDbHexDigest = iterDbHasher.hexdigest()

        lg.debug("\t\thex digest (latest) {}".format(latestDbHexDigest))
        lg.debug("\t\thex digest (patch ) {}".format(patchedDbHexDigest))

        if patchedDbHexDigest == latestDbHexDigest:
            lg.info("\tthey match")
        else:
            lg.error("\tHex digests don't match! latest({}) != patched({})".format(latestDbHexDigest, patchedDbHexDigest))
            errorList.append(iterDbTime)
            continue


    if errorList:
        lg.info("These databases had errors: {}".format(pprint.pformat(errorList)))
    else:
        lg.info("no errors found!")

    lg.info("done")
Exemplo n.º 36
0
 def _merge(klass, parent_data, current_data):
     return bsdiff4.patch(parent_data, current_data)
Exemplo n.º 37
0
 def round_trip(self, src, dst):
     p = diff(src, dst)
     #print(len(src), len(p))
     dst2 = patch(src, p)
     self.assertEqual(dst, dst2)