Пример #1
0
    def delete_patch(self, patch):
        """Attempts to delete a patch and its metadata from
        the backend ZoiaLibraryApp directory.

        patch: A string representing the path to the patch to be
               deleted.

        raise: RenamingError if the file could not be renamed
               correctly.
        raise: BadPathError if patch was not a valid path.
        """

        if patch is None:
            raise errors.DeletionError(None)

        # Remove any file extension if it is included.
        if os.path.sep in patch:
            patch = patch.split(os.path.sep)[-1]
        patch = patch.split(".")[0]

        # Try to delete the file and metadata file.
        try:
            # Should the patch directory not exist, a BadPathError is raised.
            new_path = os.path.join(self.back_path, patch.split("_")[0])
            os.remove(os.path.join(new_path, patch + ".bin"))
            os.remove(os.path.join(new_path, patch + ".json"))
            if new_path is not None and len(os.listdir(new_path)) == 2:
                # If there aren't multiple patches left, drop the version
                # extension on the remaining patch.
                for left_files in os.listdir(new_path):
                    try:
                        os.rename(
                            os.path.join(new_path, left_files),
                            os.path.join(
                                new_path,
                                "{}.{}".format(
                                    left_files.split("_")[0], left_files.split(".")[-1]
                                ),
                            ),
                        )
                    except FileNotFoundError or FileExistsError:
                        raise errors.RenamingError(left_files, 601)
            elif new_path is not None and len(os.listdir(new_path)) == 0:
                # Special case: There are no more patches left in the
                # patch directory. As such, the directory should be removed.
                os.rmdir(new_path)
        except FileNotFoundError:
            raise errors.BadPathError(patch, 301)
Пример #2
0
def delete_patch(patch):
    """Attempts to delete a patch and its metadata from
    the backend ZoiaLibraryApp directory.

    patch: A string representing the patch to be deleted.
    Raises a RenamingError if the file could not be renamed correctly.
    Raises a BadPathError if patch was not a valid path.
    """

    global backend_path
    if backend_path is None:
        backend_path = determine_backend_path()

    if patch is None:
        raise errors.DeletionError(None)

    # Remove any file extension if it is included.
    if os.path.sep in patch:
        patch = patch.split(os.path.sep)[-1]
    patch = patch.split(".")[0]

    # Try to delete the file and metadata file.
    try:
        # Should the patch directory not exist, a BadPathError is raised.
        new_path = os.path.join(backend_path, patch.split("_")[0])
        os.remove(os.path.join(new_path, patch + ".bin"))
        os.remove(os.path.join(new_path, patch + ".json"))
        if new_path is not None and len(os.listdir(new_path)) == 2:
            for left_files in os.listdir(new_path):
                try:
                    front = left_files.split("_")[0]
                    end = left_files.split(".")[1]
                    os.rename(
                        os.path.join(new_path, left_files),
                        os.path.join(new_path, "{}.{}".format(front, end)))
                except FileNotFoundError or FileExistsError:
                    raise errors.RenamingError(left_files, 601)
        elif new_path is not None and len(os.listdir(new_path)) == 0:
            # Special case: There are no more patches left in the
            # patch directory. As such, the directory should be removed.
            os.rmdir(new_path)
    except FileNotFoundError:
        raise errors.BadPathError(patch, 301)
Пример #3
0
def patch_decompress(patch):
    """ Method stub for decompressing files retrieved from the PS API.

    patch: A tuple containing the downloaded file
           data and the patch metadata, comes from ps.download(IDX).
           patch[0] is raw binary data, while patch[1] is json data.
    Raises a SavingError should the contents fail to save.
    Raises a RenamingError should the contents fail to be renamed.
    """

    # No need to determine it again if we have done so before.
    global backend_path
    if backend_path is None:
        backend_path = determine_backend_path()

    patch_name = str(patch[1]['id'])

    pch = os.path.join(backend_path, "{}".format(str(patch_name)))
    if not os.path.isdir(pch):
        os.mkdir(pch)

    if patch[1]["files"][0]["filename"].split(".")[1] == "zip":
        # .zip files
        name_zip = os.path.join(pch, "{}.zip".format(patch_name))
        with open(name_zip, "wb") as f:
            f.write(patch[0])
        with zipfile.ZipFile(os.path.join(pch, "{}.zip".format(patch_name)),
                             'r') as zipObj:
            # Extract all the contents into the patch directory
            zipObj.extractall(pch)
        # Ditch the zip
        os.remove(name_zip)
        to_delete = None

        for file in os.listdir(pch):
            if os.path.isdir(os.path.join(pch, file)):
                to_delete = (os.path.join(pch, file))
                pch = os.path.join(pch, file)

        i = 0
        for file in os.listdir(pch):
            if file.split(".")[1] == "bin":
                i += 1
                try:
                    name = file
                    # Rename the file to follow the conventional format
                    # TODO Change this to rename the file based on the
                    #  date modified.
                    os.rename(
                        os.path.join(pch, file),
                        os.path.join(pch,
                                     "{}_v{}.bin".format(patch[1]["id"], i)))
                    patch[1]["files"][0]["filename"] = name
                    save_metadata_json(patch[1], i)
                except FileNotFoundError or FileExistsError:
                    raise errors.RenamingError(patch, 601)
            else:
                # Remove any additional files.
                # TODO make this better. Shouldn't just delete
                #  additional files. Especially .txt, would want to
                #  add that to the content attribute in the JSON.
                os.remove(os.path.join(pch, file))
        if to_delete is not None:
            for file in os.listdir(to_delete):
                correct_pch = os.path.join(backend_path,
                                           "{}".format(str(patch_name)))
                shutil.copy(os.path.join(to_delete, file),
                            os.path.join(correct_pch))
            try:
                shutil.rmtree(to_delete)
            except FileNotFoundError:
                raise errors.RenamingError(patch)

    else:
        # Unexpected file extension encountered.
        # TODO Handle this case gracefully.
        raise errors.SavingError(patch[1]["title"], 501)
Пример #4
0
def save_to_backend(patch):
    """Attempts to save a simple binary patch and its metadata
    to the backend ZoiaLibraryApp directory. This method is meant
    to work for patches retrieved via the PS API. As such, it should
    only be called with the returned output from download() located
    in api.py. Other input will most likely cause a SavingError.

    For local patch importing, see import_to_backend().

    patch: A tuple containing the downloaded file
           data and the patch metadata, comes from ps.download(IDX).
           patch[0] is raw binary data, while patch[1] is json data.
    Raises a SavingError should the patch fail to save.
    Raises a RenamingError should the patch fail to be renamed.
    """

    global backend_path
    if backend_path is None:
        backend_path = determine_backend_path()

    # Don't try to save a file when we are missing necessary info.
    if patch is None or patch[0] is None \
            or patch[1] is None or backend_path is None \
            or not isinstance(patch[0], bytes) \
            or not isinstance(patch[1], dict):
        raise errors.SavingError(None)

    try:
        # Ensure that the data is in valid json format.
        json.dumps(patch[1])
    except ValueError:
        raise errors.JSONError(patch[1], 801)

    pch_id = str(patch[1]['id'])
    if len(pch_id) == 5:
        # This is an imported patch. Unfortunately, we need to make sure
        # that its a unique binary by checking every patch currently
        # stored. TODO Use binary analysis to improve this process.
        for direc in os.listdir(backend_path):
            if os.path.isdir(os.path.join(backend_path, direc)) \
                    and direc != "Banks" and direc != "sample_files" \
                    and direc != ".DS_Store":
                for files in os.listdir(os.path.join(backend_path, direc)):
                    if files.split(".")[1] == "bin":
                        with open(os.path.join(backend_path, direc, files),
                                  "rb") as f:
                            data = f.read()
                        if patch[0] == data:
                            raise errors.SavingError(patch[1]["title"], 503)
    pch = os.path.join(backend_path, "{}".format(pch_id))
    # Check to see if a directory needs to be made
    # (new patch, no version control needed yet).
    if not os.path.isdir(pch):
        os.mkdir(pch)
        if "files" in patch[1] \
                and patch[1]["files"][0]["filename"].split(".")[1] != "bin":
            # If it isn't a straight bin additional work must be done.
            if patch[1]["files"][0]["filename"].split(".")[1] == "py":
                # We are not responsible for .py files.
                shutil.rmtree(os.path.join(backend_path, pch))
                raise errors.SavingError(patch[1], 501)
            else:
                patch_decompress(patch)
        # Make sure the files attribute exists.
        elif "files" in patch[1] and isinstance(patch[0], bytes):
            name_bin = os.path.join(pch, "{}.bin".format(pch_id))
            with open(name_bin, "wb") as f:
                f.write(patch[0])
            save_metadata_json(patch[1])
        else:
            # No files attribute,
            raise errors.SavingError(patch[1], 502)
    else:
        """ A directory already existed for this patch id, so 
        we need to check if this is a unique patch version 
        (otherwise there is no need to save it).
        """
        # Case 1: Check if this is a compressed patch download.
        if "files" in patch[1] \
                and patch[1]["files"][0]["filename"].split(".")[1] != "bin":
            # We need to check the individual binary files to see which,
            # if any, differ from the ones currently stored.

            # Figure out which file compression is being used.
            if patch[1]["files"][0]["filename"].split(".")[1] == "zip":
                # Create a temporary directory to store
                # the extracted files.
                os.mkdir(os.path.join(backend_path, "temp"))
                # Write the zip
                zfile = os.path.join(backend_path, "temp.zip")
                with open(zfile, "wb") as zf:
                    zf.write(patch[0])
                with zipfile.ZipFile(zfile, 'r') as zipObj:
                    # Extract all the contents into the temporary directory.
                    zipObj.extractall(os.path.join(backend_path, "temp"))
                # Ditch the zip
                os.remove(zfile)
                # For each binary file, call the method again
                # and see if the data has been changed.
                diff = False
                for file in os.listdir(os.path.join(backend_path, "temp")):
                    try:
                        # We only care about .bin files.
                        if file.split(".")[1] == "bin":
                            with open(file, "rb") as bin_file:
                                raw_bin = bin_file.read()
                            save_to_backend((raw_bin, patch[1]))
                            diff = True
                    except FileNotFoundError or errors.SavingError:
                        pass
                # Cleanup and finish.
                shutil.rmtree(os.path.join(backend_path, "temp"))
                if not diff:
                    # No files changed, so we should raise a SavingError
                    raise errors.SavingError(patch[1]["title"], 503)
                return
            else:
                # TODO Cover the other compression cases.
                raise errors.SavingError(patch[1]["title"])

        # If we get here, we are working with a .bin, so we
        # need to to see if the binary is already saved.
        for file in os.listdir(os.path.join(pch)):
            if file.split(".")[1] == "bin":
                with open(os.path.join(pch, file), "rb") as f:
                    if f.read() == patch[0]:
                        # This exact binary is already saved onto the system.
                        raise errors.SavingError(patch[1]["title"], 503)
                f.close()

        # If we get here, we have a unique patch, so we need to find
        # out what version # to give it.

        # Case 2: Only one version of the patch existed previously.
        if len(os.listdir(os.path.join(backend_path, pch))) == 2:
            name_bin = os.path.join(pch, "{}_v1.bin".format(pch_id))
            with open(name_bin, "wb") as f:
                f.write(patch[0])
            save_metadata_json(patch[1], 1)
            try:
                os.rename(os.path.join(pch, "{}.bin".format(pch_id)),
                          os.path.join(pch, "{}_v2.bin".format(pch_id)))
                os.rename(os.path.join(pch, "{}.json".format(pch_id)),
                          os.path.join(pch, "{}_v2.json".format(pch_id)))
            except FileNotFoundError or FileExistsError:
                raise errors.RenamingError(patch, 601)
            with open(os.path.join(pch, "{}_v2.json".format(pch_id)),
                      "r") as f:
                jf = json.loads(f.read())
            jf["revision"] = 2
            with open(os.path.join(pch, "{}_v2.json".format(pch_id)),
                      "w") as f:
                json.dump(jf, f)
        # Case 3: There were already multiple versions in the patch directory.
        elif len(os.listdir(os.path.join(backend_path, pch))) > 2:
            # Increment the version number for each file in the directory.
            try:
                for file in reversed(sorted(os.listdir(os.path.join(pch)))):
                    ver = int(file.split("v")[1].split(".")[0]) + 1
                    extension = file.split(".")[1]
                    os.rename(
                        os.path.join(
                            pch, "{}_v{}.{}".format(pch_id, str(ver - 1),
                                                    extension)),
                        os.path.join(
                            pch, "{}_v{}.{}".format(pch_id, str(ver),
                                                    extension)))
                    # Update the revision number in each metadata file
                    with open(
                            os.path.join(
                                pch, "{}_v{}.json".format(pch_id, str(ver))),
                            "r") as f:
                        jf = json.loads(f.read())

                    jf["revision"] = ver

                    with open(
                            os.path.join(
                                pch, "{}_v{}.json".format(pch_id, str(ver))),
                            "w") as f:
                        json.dump(jf, f)

            except FileNotFoundError or FileExistsError:
                raise errors.SavingError(patch)
            # Save the newest version
            name_bin = os.path.join(pch, "{}_v1.bin".format(pch_id))
            with open(name_bin, "wb") as f:
                f.write(patch[0])
            save_metadata_json(patch[1], 1)
        else:
            """ Getting here indicates that the amount of files in the 
            directory was less than 2 (which would imply some form of 
            corruption occurred). 
            """
            raise errors.SavingError(patch[1]["title"])
Пример #5
0
    def _patch_decompress(self, patch):
        """Method stub for decompressing files retrieved from the PS
        API. Currently only supports .zip and .rar files.

        patch: A tuple containing the downloaded file
               data and the patch metadata, comes from ps.download().
               patch[0] is raw binary data, while patch[1] is json data.

        raise: SavingError should the contents fail to save.
        raise: RenamingError should the contents fail to be renamed.
        """

        patch_id = str(patch[1]["id"])

        pch = os.path.join(self.back_path, "{}".format(patch_id))
        if not os.path.isdir(pch):
            os.mkdir(pch)

        if patch[1]["files"][0]["filename"].split(".")[-1] == "zip":
            # .zip files
            name_zip = os.path.join(pch, "{}.zip".format(patch_id))
            with open(name_zip, "wb") as f:
                f.write(patch[0])
            with zipfile.ZipFile(os.path.join(pch, "{}.zip".format(patch_id)),
                                 "r") as zip_obj:
                # Extract all the contents into the patch directory
                zip_obj.extractall(pch)
            # Ditch the zip
            os.remove(name_zip)
            to_delete = None
        elif (patch[1]["files"][0]["filename"].split(".")[-1] == "rar"
              and platform.system().lower() != "darwin"):
            # .rar files
            name_rar = os.path.join(pch, "{}.rar".format(patch_id))
            with open(name_rar, "wb") as f:
                f.write(patch[0])
            try:
                with rarfile.RarFile(
                        os.path.join(pch, "{}.rar".format(patch_id)),
                        "r") as rar_obj:
                    # Extract all the contents into the patch directory
                    rar_obj.extractall(pch)
                # Ditch the rar
                os.remove(name_rar)
                to_delete = None
            except rarfile.BadRarFile:
                print("File is not properly compressed in the RAR format")
                try:
                    shutil.rmtree(pch)
                except FileNotFoundError:
                    pass
                raise errors.SavingError(patch[1]["title"], 506)
            except rarfile.RarCannotExec:
                print("As .rar compression is a commercial product, you must "
                      "download external software to download this patch "
                      "(i.e. You need WinRAR installed for this to work).")
                try:
                    shutil.rmtree(pch)
                except FileNotFoundError:
                    pass
                raise errors.SavingError(patch[1]["title"], 501)
        else:
            # Unexpected file extension encountered.
            os.rmdir(pch)
            raise errors.SavingError(patch[1]["title"], 501)

        # Get to the uncompressed directory.
        for file in os.listdir(pch):
            if os.path.isdir(os.path.join(pch, file)) and len(
                    os.listdir(pch)) == 1:
                to_delete = os.path.join(pch, file)
                pch = os.path.join(pch, file)
            elif os.path.isdir(os.path.join(pch, file)):
                # Oh boy they compressed it with a directory and some
                # stray files because they hate us.
                shutil.rmtree(os.path.join(pch, file))

        if len(os.listdir(pch)) == 1:
            # The compressed file only contained 1 patch.
            for file in os.listdir(pch):
                name = file
                os.rename(
                    os.path.join(pch, file),
                    os.path.join(pch, "{}.bin".format(patch_id)),
                )
                patch[1]["files"][0]["filename"] = name
                self.save_metadata_json(patch[1])
        else:
            # The compressed file contained more than 1 patch.
            i = 0
            for file in os.listdir(pch):
                if file.split(".")[-1] == "bin":
                    i += 1
                    try:
                        name = file
                        # Rename the file to follow the conventional format
                        os.rename(
                            os.path.join(pch, file),
                            os.path.join(pch, "{}_v{}.bin".format(patch_id,
                                                                  i)),
                        )
                        patch[1]["files"][0]["filename"] = name
                        self.save_metadata_json(patch[1], i)
                    except FileNotFoundError or FileExistsError:
                        raise errors.RenamingError(patch, 601)
                else:
                    # Remove any additional files.
                    # TODO make this better. Shouldn't just delete
                    #  additional files. Especially .txt, would want to
                    #  add that to the content attribute in the JSON.
                    os.remove(os.path.join(pch, file))
        if to_delete is not None:
            # We need to cleanup.
            for file in os.listdir(to_delete):
                correct_pch = os.path.join(self.back_path,
                                           "{}".format(str(patch_id)))
                shutil.copy(os.path.join(to_delete, file),
                            os.path.join(correct_pch))
            try:
                shutil.rmtree(to_delete)
            except FileNotFoundError:
                raise errors.RenamingError(patch)
Пример #6
0
    def save_to_backend(self, patch):
        """Attempts to save a simple binary patch and its metadata
        to the backend ZoiaLibraryApp directory. This method is meant
        to work for patches retrieved via the PS API. As such, it should
        only be called with the returned output from download() located
        in api.py. Other input will most likely cause a SavingError.

        For local patch importing, see import_to_backend().

        patch: A tuple containing the downloaded file
               data and the patch metadata, comes from ps.download(IDX).
               patch[0] is raw binary data, while patch[1] is json data.

        raise: SavingError should the patch fail to save.
        raise: RenamingError should the patch fail to be renamed.
        """

        # Don't try to save a file when we are missing necessary info.
        if (patch is None or patch[0] is None or patch[1] is None
                or self.back_path is None or not isinstance(patch[0], bytes)
                or not isinstance(patch[1], dict)):
            raise errors.SavingError(None)

        try:
            # Ensure that the data is in valid json format.
            json.dumps(patch[1])
        except ValueError:
            raise errors.JSONError(patch[1], 801)

        pch_id = str(patch[1]["id"])
        if len(pch_id) == 5:
            # This is an imported patch. Unfortunately, we need to make sure
            # that its a unique binary by checking every patch currently
            # stored.
            for fld in os.listdir(self.back_path):
                if (os.path.isdir(os.path.join(self.back_path, fld))
                        and fld != "Banks" and fld != "Folders"
                        and fld != "sample_files" and fld != ".DS_Store"):
                    for files in os.listdir(os.path.join(self.back_path, fld)):
                        # Check every .bin file only.
                        if files.split(".")[-1] == "bin":
                            with open(os.path.join(self.back_path, fld, files),
                                      "rb") as f:
                                data = f.read()
                            if patch[0] == data:
                                with open(
                                        os.path.join(
                                            self.back_path,
                                            fld,
                                            files.split(".")[0] + ".json",
                                        ),
                                        "r",
                                ) as f:
                                    meta = json.load(f)
                                if "_v" in files:
                                    meta = meta["title"] + ": {}".format(
                                        patch[1]["files"][0]["filename"].split(
                                            ".")[0].split("_zoia_")
                                        [-1].replace("_", " "))
                                else:
                                    meta = meta["title"]
                                raise errors.SavingError(meta, 503)

        pch = os.path.join(self.back_path, "{}".format(pch_id))
        # Check to see if a directory needs to be made
        # (new patch, no version control needed yet).
        if not os.path.isdir(pch):
            os.mkdir(pch)
            if ("files" in patch[1] and
                    patch[1]["files"][0]["filename"].split(".")[-1] != "bin"):
                # If it isn't a straight bin additional work must be done.
                if patch[1]["files"][0]["filename"].split(".")[-1] == "py":
                    # We are not responsible for .py files.
                    shutil.rmtree(os.path.join(self.back_path, pch))
                    raise errors.SavingError(patch[1], 501)
                else:
                    # Try to decompress the patch.
                    self._patch_decompress(patch)
            # Make sure the files attribute exists.
            elif "files" in patch[1]:
                name_bin = os.path.join(pch, "{}.bin".format(pch_id))
                with open(name_bin, "wb") as f:
                    f.write(patch[0])
                self.save_metadata_json(patch[1])
            else:
                # No files attribute,
                raise errors.SavingError(patch[1], 502)
        else:
            # A directory already existed for this patch id, so
            # we need to check if this is a unique patch version
            # (otherwise there is no need to save it).

            # Case 1: Check if this is a compressed patch download.
            if ("files" in patch[1] and
                    patch[1]["files"][0]["filename"].split(".")[-1] != "bin"):
                # We need to check the individual binary files to see which,
                # if any, differ from the ones currently stored.

                # Figure out which file compression is being used.
                if patch[1]["files"][0]["filename"].split(".")[-1] == "zip":
                    # Create a temporary directory to store
                    # the extracted files.
                    os.mkdir(os.path.join(self.back_path, "temp"))
                    # Write the zip
                    zfile = os.path.join(self.back_path, "temp.zip")
                    with open(zfile, "wb") as zf:
                        zf.write(patch[0])
                    with zipfile.ZipFile(zfile, "r") as zipObj:
                        # Extract all the contents into the temporary
                        # directory.
                        zipObj.extractall(os.path.join(self.back_path, "temp"))
                    # Ditch the zip
                    os.remove(zfile)
                elif patch[1]["files"][0]["filename"].split(".")[-1] == "rar":
                    # Create a temporary directory to store
                    # the extracted files.
                    os.mkdir(os.path.join(self.back_path, "temp"))
                    # Write the rar
                    rfile = os.path.join(self.back_path, "temp.zip")
                    with open(rfile, "wb") as rf:
                        rf.write(patch[0])
                    try:
                        with rarfile.RarFile(rfile, "r") as rar_obj:
                            # Extract all the contents into the temporary
                            # directory.
                            rar_obj.extractall(
                                os.path.join(self.back_path, "temp"))
                    except rarfile.RarCannotExec:
                        # No WinRAR installed
                        os.remove(rfile)
                        raise errors.SavingError(patch[1]["title"])
                    # Ditch the rar
                    os.remove(rfile)
                else:
                    # If we get here we encountered a new compression algo.
                    # Logic needs to be added above to deal with it.
                    raise errors.SavingError(patch[1]["title"])
                # For each binary file, call the method again
                # and see if the data has been changed.
                diff = False
                for file in os.listdir(os.path.join(self.back_path, "temp")):
                    try:
                        # We only care about .bin files.
                        if file.split(".")[-1] == "bin":
                            with open(file, "rb") as bin_file:
                                raw_bin = bin_file.read()
                            self.save_to_backend((raw_bin, patch[1]))
                            diff = True
                    except FileNotFoundError or errors.SavingError:
                        pass
                # Cleanup and finish.
                shutil.rmtree(os.path.join(self.back_path, "temp"))
                if not diff:
                    # No files changed, so we should raise a SavingError
                    raise errors.SavingError(patch[1]["title"], 503)
                return

            # If we get here, we are working with a .bin, so we
            # need to to see if the binary is already saved.
            for file in os.listdir(os.path.join(pch)):
                if file.split(".")[-1] == "bin":
                    with open(os.path.join(pch, file), "rb") as f:
                        if f.read() == patch[0]:
                            # This exact binary is already saved onto the
                            # system.
                            raise errors.SavingError(patch[1]["title"], 503)

            # If we get here, we have a unique patch, so we need to find
            # out what version # to give it.

            # Case 2: Only one version of the patch existed previously.
            if len(os.listdir(os.path.join(self.back_path, pch))) == 2:
                name_bin = os.path.join(pch, "{}_v1.bin".format(pch_id))
                with open(name_bin, "wb") as f:
                    f.write(patch[0])
                self.save_metadata_json(patch[1], 1)
                # Add the version suffix to the patch that was previously
                # in the directory.
                try:
                    os.rename(
                        os.path.join(pch, "{}.bin".format(pch_id)),
                        os.path.join(pch, "{}_v2.bin".format(pch_id)),
                    )
                    os.rename(
                        os.path.join(pch, "{}.json".format(pch_id)),
                        os.path.join(pch, "{}_v2.json".format(pch_id)),
                    )
                except FileNotFoundError or FileExistsError:
                    raise errors.RenamingError(patch, 601)
                # Update the revision number in the metadata.
                # (Used for sorting purposes).
                with open(os.path.join(pch, "{}_v2.json".format(pch_id)),
                          "r") as f:
                    jf = json.loads(f.read())
                jf["revision"] = 2
                with open(os.path.join(pch, "{}_v2.json".format(pch_id)),
                          "w") as f:
                    json.dump(jf, f)
            # Case 3: There were already multiple versions in the patch
            # directory.
            elif len(os.listdir(os.path.join(self.back_path, pch))) > 2:
                # Increment the version number for each file in the directory.
                try:
                    # for file in sorted(os.listdir(pch), key=len):
                    for file in sorted(sorted(os.listdir(pch), key=len),
                                       key=natural_key,
                                       reverse=True):
                        ver = int(file.split("v")[1].split(".")[0]) + 1
                        extension = file.split(".")[-1]
                        os.rename(
                            os.path.join(
                                pch,
                                "{}_v{}.{}".format(pch_id, str(ver - 1),
                                                   extension)),
                            os.path.join(
                                pch,
                                "{}_v{}.{}".format(pch_id, str(ver),
                                                   extension)),
                        )
                        # Update the revision number in each metadata file
                        if extension == "json":
                            with open(
                                    os.path.join(
                                        pch,
                                        "{}_v{}.json".format(pch_id,
                                                             str(ver))),
                                    "r",
                            ) as f:
                                jf = json.loads(f.read())

                            jf["revision"] = ver

                            with open(
                                    os.path.join(
                                        pch,
                                        "{}_v{}.json".format(pch_id,
                                                             str(ver))),
                                    "w",
                            ) as f:
                                json.dump(jf, f)

                except FileNotFoundError or FileExistsError:
                    raise errors.SavingError(patch)
                # Save the newest version
                name_bin = os.path.join(pch, "{}_v1.bin".format(pch_id))
                with open(name_bin, "wb") as f:
                    f.write(patch[0])
                self.save_metadata_json(patch[1], 1)
            else:
                """Getting here indicates that the amount of files in the
                directory was less than 2 (which would imply some form of
                corruption occurred).
                """
                raise errors.SavingError(patch[1]["title"])