Esempio n. 1
0
def open_locker_uri(uri):
    sha1 = uri[9:]
    assert len(sha1) == 40
    context = SynchronizerContext()
    url = "{0}/api/locker/{1}".format(openretro_url_prefix(), sha1)
    path = Downloader.cache_file_from_url(url,
                                          auth=(context.username,
                                                context.password))
    return path
    def open_url(cls, url):
        original_url = url
        hash_part = ""
        parts = url.split("#", 1)
        if len(parts) > 1:
            url = parts[0]
            hash_part = "#" + parts[1]
        if not Downloader.cache_file_from_url(url, download=False):
            # license_code = cls.get_license_code_for_url(original_url)
            # license_status = {"accepted": False, "done": False}

            # def show_license_code():
            #     try:
            #         try:
            #             license_status["accepted"] = cls.show_license_code(
            #                 license_code
            #             )
            #         except Exception:
            #             traceback.print_exc()
            #     finally:
            #         license_status["done"] = True

            # if license_code:
            #     print("URL", url, "has license code", license_code)
            #     # FIXME: remove direct dependency on fsui
            #     import fsui as fsui

            #     fsui.call_after(show_license_code)
            #     while not license_status["done"]:
            #         time.sleep(0.1)
            #     if not license_status["accepted"]:
            #         # FIXME: custom exception here
            #         raise Exception(
            #             'Usage terms "{0}" was not '
            #             "accepted".format(license_code)
            #         )
            pass
        path = Downloader.cache_file_from_url(url)
        return path + hash_part
 def download_game_file_archive(self, url):
     print("\ndownload_game_file_archive", url)
     archive_path = Downloader.cache_file_from_url(url)
     archive = Archive(archive_path)
     archive_files = archive.list_files()
     print(archive_files)
     for name in archive_files:
         print(name)
         ifs = archive.open(name)
         data = ifs.read()
         Downloader.cache_data(data)
     if len(archive_files) == 0:
         # might not be an archive then
         with open(archive_path, "rb") as f:
             data = f.read()
         Downloader.cache_data(data)
     # the downloaded archive is no longer needed, now that we have
     # extracted all the files
     os.remove(archive_path)
     print("\n")