コード例 #1
0
def prefetch_files(args, semaphore=None):

    for infile_name in args.infile_names:

        try:
            prefetch_file(infile_name,
                          dry_run=args.dry_run,
                          refetch=args.refetch,
                          ignore_content_type=args.ignore_content_type,
                          gamedata_dir=args.gamedata_dir,
                          timeout=args.timeout,
                          semaphore=semaphore,
                          user_agent=args.user_agent)

        except (FileNotFoundError, IllegalSavegameException, SystemExit):
            print_err("Aborting.")
            sys.exit(1)
コード例 #2
0
ファイル: __init__.py プロジェクト: Treader/tts-backup
def prefetch_files(args, semaphore=None):

    for infile_name in args.infile_names:

        try:
            prefetch_file(
                infile_name,
                dry_run=args.dry_run,
                refetch=args.refetch,
                ignore_content_type=args.ignore_content_type,
                gamedata_dir=args.gamedata_dir,
                timeout=args.timeout,
                semaphore=semaphore
            )

        except FileNotFoundError:
            print_err("Aborting.")
            sys.exit(1)
コード例 #3
0
def prefetch_files(args, semaphore=None):

    for infile_name in args.infile_names:

        try:
            prefetch_file(
                infile_name,
                dry_run=args.dry_run,
                refetch=args.refetch,
                ignore_content_type=args.ignore_content_type,
                gamedata_dir=args.gamedata_dir,
                timeout=args.timeout,
                semaphore=semaphore
            )

        except FileNotFoundError:
            print_err("Aborting.")
            sys.exit(1)
コード例 #4
0
ファイル: __init__.py プロジェクト: dragongling/tts-backup
def prefetch_files(args, semaphore=None):
    print('Establishing TOR connection')
    SOCKS_PORT = 9050
    socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT)
    print("Done")

    for infile_name in args.infile_names:

        try:
            prefetch_file(infile_name,
                          dry_run=args.dry_run,
                          refetch=args.refetch,
                          ignore_content_type=args.ignore_content_type,
                          gamedata_dir=args.gamedata_dir,
                          timeout=args.timeout,
                          semaphore=semaphore,
                          user_agent=args.user_agent)

        except (FileNotFoundError, IllegalSavegameException, SystemExit):
            print_err("Aborting.")
            sys.exit(1)
コード例 #5
0
ファイル: __init__.py プロジェクト: simon-r-white/tts-backup
def prefetch_file(filename,
                  refetch=False,
                  ignore_content_type=False,
                  dry_run=False,
                  gamedata_dir=GAMEDATA_DEFAULT,
                  timeout=5,
                  semaphore=None,
                  user_agent='TTS prefetch'):

    try:
        save_name = get_save_name(filename)
    except Exception:
        save_name = "???"

    print("Prefetching assets for {file} ({save_name}).".format(
        file=filename, save_name=save_name))

    try:
        urls = urls_from_save(filename)
    except FileNotFoundError as error:
        print_err("Error retrieving URLs from {filename}: {error}".format(
            error=error, filename=filename))
        raise

    done = set()
    for path, url in urls:

        if semaphore and semaphore.acquire(blocking=False):
            print("Aborted.")
            return

        # Some mods contain malformed URLs missing a prefix. I’m not
        # sure how TTS deals with these. Let’s assume http for now.
        if not urllib.parse.urlparse(url).scheme:
            print_err("Warning: URL {url} does not specify a URL scheme. "
                      "Assuming http.".format(url=url))
            fetch_url = "http://" + url
        else:
            fetch_url = url

        # A mod might refer to the same URL multiple times.
        if url in done:
            continue

        # To prevent downloading unexpected content, we check the MIME
        # type in the response.
        if is_obj(path, url):
            content_expected = lambda mime: any(
                map(mime.startswith,
                    ('text/plain', 'application/binary',
                     'application/octet-stream', 'application/json',
                     'application/x-tgif')))
        elif is_assetbundle(path, url):
            content_expected = lambda mime: any(
                map(mime.startswith,
                    ('application/binary', 'application/octet-stream')))

        elif is_image(path, url):
            content_expected = lambda mime: mime in (
                'image/jpeg', 'image/jpg', 'image/png',
                'application/octet-stream', 'application/binary')
        else:
            errstr = "Do not know how to retrieve URL {url} at {path}.".format(
                url=url, path=path)
            raise ValueError(errstr)

        outfile_name = os.path.join(gamedata_dir, get_fs_path(path, url))

        # Check if the object is already cached.
        if os.path.isfile(outfile_name) and not refetch:
            done.add(url)
            continue

        print("{} ".format(url), end="", flush=True)

        if dry_run:
            print("dry run")
            done.add(url)
            continue

        headers = {'User-Agent': user_agent}
        request = urllib.request.Request(url=fetch_url, headers=headers)

        try:
            response = urllib.request.urlopen(request, timeout=timeout)

        except urllib.error.HTTPError as error:
            print_err("Error {code} ({reason})".format(code=error.code,
                                                       reason=error.reason))
            continue

        except urllib.error.URLError as error:
            print_err("Error ({reason})".format(reason=error.reason))
            continue

        except socket.timeout as error:
            print_err("Error ({reason})".format(reason=error))
            continue

        except http.client.HTTPException as error:
            print_err("HTTP error ({reason})".format(reason=error))
            continue

        # Only for informative purposes.
        length = response.getheader('Content-Length', 0)
        length_kb = "???"
        if length:
            with suppress(ValueError):
                length_kb = int(length) / 1000
        size_msg = "({length} kb): ".format(length=length_kb)
        print(size_msg, end="", flush=True)

        content_type = response.getheader('Content-Type', '').strip()
        is_expected = content_expected(content_type)
        if not (is_expected or ignore_content_type):
            print_err(
                "Error: Content type {type} does not match expected type. "
                "Aborting. Use --relax to ignore.".format(type=content_type))
            sys.exit(1)

        try:
            with open(outfile_name, 'wb') as outfile:
                outfile.write(response.read())

        except FileNotFoundError as error:
            print_err("Error writing object to disk: {}".format(error))
            raise

        # Don’t leave files with partial content lying around.
        except:
            with suppress(FileNotFoundError):
                os.remove(outfile_name)
            raise

        else:
            print("ok")

        if not is_expected:
            errmsg = ("Warning: Content type {} did not match "
                      "expected type.".format(content_type))
            print_err(errmsg)

        done.add(url)

    if dry_run:
        completion_msg = "Dry-run for {} completed."
    else:
        completion_msg = "Prefetching {} completed."
    print(completion_msg.format(filename))
コード例 #6
0
ファイル: __init__.py プロジェクト: Treader/tts-backup
def prefetch_file(filename,
                  refetch=False,
                  ignore_content_type=False,
                  dry_run=False,
                  gamedata_dir=GAMEDATA_DEFAULT,
                  timeout=5,
                  semaphore=None):

    print("Prefetching assets for {file}.".format(file=filename))

    try:
        urls = urls_from_save(filename)
    except FileNotFoundError as error:
        print_err("Error retrieving URLs from {filename}: {error}".format(
            error=error,
            filename=filename
        ))
        raise

    done = set()
    for path, url in urls:

        if semaphore and semaphore.acquire(blocking=False):
            print("Aborted.")
            return

        # Some mods contain malformed URLs missing a prefix. I’m not
        # sure how TTS deals with these. Let’s assume http for now.
        if not urllib.parse.urlparse(url).scheme:
            print_err("Warning: URL {url} does not specify a URL scheme. "
                      "Assuming http.".format(url=url))
            fetch_url = "http://" + url
        else:
            fetch_url = url

        # A mod might refer to the same URL multiple times.
        if url in done:
            continue

        # To prevent downloading unexpected content, we check the MIME
        # type in the response.
        if is_obj(path, url):
            content_expected = lambda mime: any(map(mime.startswith,
                                                    ('text/plain',
                                                     'application/json')))
        elif is_image(path, url):
            content_expected = lambda mime: mime in ('image/jpeg',
                                                     'image/png')
        else:
            errstr = "Do not know how to retrieve URL {url} at {path}.".format(
                url=url,
                path=path
            )
            raise ValueError(errstr)

        outfile_name = os.path.join(gamedata_dir, get_fs_path(path, url))

        # Check if the object is already cached.
        if os.path.isfile(outfile_name) and not refetch:
            done.add(url)
            continue

        print("{} ".format(url), end="")

        if dry_run:
            print("dry run")
            done.add(url)
            continue

        try:
            response = urllib.request.urlopen(fetch_url, timeout=timeout)

        except urllib.error.HTTPError as error:
            print_err("Error {code} ({reason})".format(
                code=error.code,
                reason=error.reason)
            )
            continue

        except urllib.error.URLError as error:
            print_err("Error ({reason})".format(reason=error.reason))
            continue

        # Only for informative purposes.
        length = response.getheader('Content-Length', 0)
        length_kb = "???"
        if length:
            with suppress(ValueError):
                length_kb = int(length) / 1000
        size_msg = "({length} kb): ".format(length=length_kb)
        print(size_msg, end="", flush=True)

        content_type = response.getheader('Content-Type').strip()
        is_expected = content_expected(content_type)
        if not (is_expected or ignore_content_type):
            print_err(
                "Error: Content type {type} does not match expected type. "
                "Aborting. Use --relax to ignore.".format(type=content_type)
            )
            sys.exit(1)

        try:
            with open(outfile_name, 'wb') as outfile:
                outfile.write(response.read())

        except FileNotFoundError as error:
            print_err("Error writing object to disk: {}".format(error))
            raise

        # Don’t leave files with partial content lying around.
        except:
            with suppress(FileNotFoundError):
                os.remove(outfile_name)
            raise

        else:
            print("ok")

        if not is_expected:
            errmsg = ("Warning: Content type {} did not match "
                      "expected type.".format(content_type))
            print_err(errmsg)

        done.add(url)

    if dry_run:
        completion_msg = "Dry-run for {} completed."
    else:
        completion_msg = "Prefetching {} completed."
    print(completion_msg.format(filename))
コード例 #7
0
ファイル: __init__.py プロジェクト: simon-r-white/tts-backup
def backup_json(args):

    try:
        urls = urls_from_save(args.infile_name)
    except FileNotFoundError as error:
        errmsg = "Could not read URLs from '{file}': {error}".format(
            file=args.infile_name,
            error=error
        )
        print_err(errmsg)
        sys.exit(1)

    # Change working dir, since get_fs_path gives us a relative path.
    orig_path = os.getcwd()
    try:
        os.chdir(args.gamedata_dir)
    except FileNotFoundError as error:
        errmsg = "Could not open gamedata directory '{dir}': {error}".format(
            dir=args.gamedata_dir,
            error=error
        )
        print_err(errmsg)
        sys.exit(1)

    # We also need to correct the the destination path now.
    if args.outfile_name:
        args.outfile_name = os.path.join(orig_path, args.outfile_name)
    else:
        outfile_basename = re.sub(
            r"\.json$", "",
            os.path.basename(args.infile_name)
        )
        args.outfile_name = os.path.join(orig_path, outfile_basename) + ".zip"

    try:
        zipfile = ZipFile(args.outfile_name, 'w',
                          dry_run=args.dry_run,
                          ignore_missing=args.ignore_missing)
    except FileNotFoundError as error:
        errmsg = "Could not write to Zip archive '{outfile}': {error}".format(
            outfile=args.outfile_name,
            error=error
        )
        print_err(errmsg)
        sys.exit(1)

    with zipfile as outfile:

        for path, url in urls:

            filename = get_fs_path(path, url)
            try:
                outfile.write(filename)

            except FileNotFoundError as error:
                errmsg = "Could not write {filename} to Zip ({error}).".format(
                    filename=filename,
                    error=error
                )
                print_err(errmsg, "Aborting.", sep="\n", end=" ")
                if not args.dry_run:
                    print_err("Zip file is incomplete.")
                else:
                    print_err()
                sys.exit(1)

        # Finally, include the save file itself.
        orig_json = os.path.join(orig_path, args.infile_name)
        outfile.write(orig_json, os.path.basename(args.infile_name))

        # Store some metadata.
        outfile.put_metadata(comment=args.comment)

    if args.dry_run:
        print("Dry run for {file} completed.".format(file=args.infile_name))
    else:
        print("Backed-up contents for {file} found in {outfile}.".format(
            file=args.infile_name,
            outfile=args.outfile_name
        ))