예제 #1
0
    def check_for_updates(self):
        """ Upon startup, automatically retrieve the latest version of
        patches from PS, should any that have been previously downloaded
        are updated.

        This method will check the updated_at attribute of each
        downloaded patch, should this differ compared to what is
        returned by PS, a new patch will attempt to be saved. If the
        binary file is determined to be identical to the one stored
        within the backend, the saving is aborted at there was no update
        to the patch itself. Otherwise, a new version of the patch is
        added and saved within the patch directory.

        return: A tuple containing the number of patches that updated as
                an int as the first element, and the names of the
                patches that updated in as strings in an array as the
                second element.
        """

        meta = []

        for patch in os.listdir(self.back_path):
            # Only check for updates for patches hosted on PS
            # (denoted via the 6-digit ID numbers).
            if os.path.isdir(os.path.join(self.back_path, patch)) \
                    and len(patch) > 5 \
                    and len(
                os.listdir(os.path.join(self.back_path, patch))) > 2 \
                    and patch != "Banks" and patch != ".DS_Store":
                # Multiple versions, only need the latest.
                with open(os.path.join(self.back_path, patch,
                                       "{}_v1.json".format(patch)), "r") as f:
                    temp = json.loads(f.read())
            elif os.path.isdir(os.path.join(self.back_path, patch)) \
                    and len(patch) > 5:
                # Just a single patch in the directory, easy.
                with open(os.path.join(self.back_path, patch,
                                       "{}.json".format(patch)), "r") as f:
                    temp = json.loads(f.read())
            else:
                continue
            # Only need the id and updated_at for comparison purposes.
            meta_small = {
                "id": temp["id"],
                "updated_at": temp["updated_at"]
            }
            meta.append(meta_small)

        # Get a list of binary/metadata for all files that have been updated
        # on PatchStorage.
        ps = api.PatchStorage()
        pch_list = ps.get_potential_updates(meta)

        # Try to save the new binaries to the backend.
        save = PatchSave()
        pchs = []
        for patch in pch_list:
            try:
                save.save_to_backend(patch[0])
            except errors.SavingError:
                # Same binary, but patch notes are different, update those.
                idx = str(patch[1]["id"])
                try:
                    with open(os.path.join(self.back_path, idx,
                                           "{}.bin".format(idx)), "w") as f:
                        f.write(json.dumps(patch[1]))
                        pchs.append(patch[1]["title"])
                except FileNotFoundError:
                    with open(os.path.join(self.back_path, idx,
                                           "{}_v1.bin".format(idx)), "r") as f:
                        f.write(json.dumps(patch[1]))
                        pchs.append(patch[1]["title"])
            pchs.append(patch)

        # Pass the number of updates and titles of patches updated.
        return len(pch_list), pchs
예제 #2
0
import io
import unittest
import zipfile

from zoia_lib.backend import api, patch_binary

ps = api.PatchStorage()


class FormatTest(unittest.TestCase):
    def test_bin_formatter(self):
        """Extract patch information from binary file"""

        f = ps.download("105634")
        self.assertTrue(
            isinstance(f[0], bytes),
            "Returned tuple did not contain binary data in the first element.")
        self.assertTrue(
            isinstance(f[1], dict),
            "Returned tuple did not contain json data in the second element.")

        size, name, n_mod = patch_binary.formatter(f[0])
        self.assertTrue(size == 908, "Binary size not returning as expected")
        self.assertTrue(name == 'Am I Conscious',
                        "Binary name not returning as expected")
        self.assertTrue(n_mod == 31, "Binary n_mod not returning as expected")

    def test_zip_formatter(self):
        """Extract patch information from compressed drive"""

        f = ps.download("124436")
예제 #3
0
def check_for_updates():
    """ Upon startup, automatically retrieve the latest version of
    patches from PS, should any that have been previously downloaded
    are updated.

    This method will check the updated_at attribute of each downloaded
    patch, should this differ compared to what is returned by PS, a
    new patch will attempt to be saved. If the binary file is determined
    to be identical to the one stored within the backend, the saving is
    aborted at there was no update to the patch itself. Otherwise, a new
    version of the patch is added and saved within the patch directory.
    """

    global backend_path
    if backend_path is None:
        backend_path = determine_backend_path()

    meta = []

    for patch in os.listdir(backend_path):
        # Only check for updates for patches hosted on PS
        # (denoted via the 6-digit ID numbers).
        if os.path.isdir(os.path.join(backend_path, patch)) \
                and len(patch) > 5 \
                and len(os.listdir(os.path.join(backend_path, patch))) > 2 \
                and patch != "Banks" and patch != ".DS_Store":
            # Multiple versions, only need the latest.
            with open(
                    os.path.join(backend_path, patch,
                                 "{}_v1.json".format(patch)), "r") as f:
                temp = json.loads(f.read())
            meta_small = {"id": temp["id"], "updated_at": temp["updated_at"]}
            meta.append(meta_small)
        elif os.path.isdir(os.path.join(backend_path, patch)) \
                and len(patch) > 5:
            with open(
                    os.path.join(backend_path, patch, "{}.json".format(patch)),
                    "r") as f:
                temp = json.loads(f.read())
            meta_small = {"id": temp["id"], "updated_at": temp["updated_at"]}
            meta.append(meta_small)

    # Get a list of binary/metadata for all files that have been updated
    # on PatchStorage.
    ps = api.PatchStorage()
    pch_list = ps.get_potential_updates(meta)

    # Try to save the new binaries to the backend.
    updates = 0
    for patch in pch_list:
        try:
            save_to_backend(patch[0])
            updates += 1
        except errors.SavingError:
            # TODO If we fail to save, at least update the metadata.
            try:
                with open(
                        os.path.join(backend_path, str(patch[1]["id"]),
                                     "{}.bin".format(str(patch[1]["id"]))),
                        "w") as f:
                    f.write(json.dumps(patch[1]))
            except FileNotFoundError:
                with open(
                        os.path.join(backend_path, str(patch[1]["id"]),
                                     "{}_v1.bin".format(str(patch[1]["id"]))),
                        "r") as f:
                    f.write(json.dumps(patch[1]))
            updates += 1
            pass

    return updates