コード例 #1
0
ファイル: main.py プロジェクト: pacman-ghost/vasl-templates
 def run_service():  #pylint: disable=missing-docstring
     import grpc
     server = grpc.server(
         concurrent.futures.ThreadPoolExecutor(max_workers=1))
     from vasl_templates.webapp.tests.proto.generated.control_tests_pb2_grpc \
         import add_ControlTestsServicer_to_server
     from vasl_templates.webapp.tests.control_tests_servicer import ControlTestsServicer  #pylint: disable=cyclic-import
     servicer = ControlTestsServicer(app)
     add_ControlTestsServicer_to_server(servicer, server)
     port_no = parse_int(get_port(), -1)  # nb: have to get this again?!
     if port_no <= 0:
         # NOTE: Requesting port 0 tells grpc to use any free port, which is usually OK, unless
         # we're running inside a Docker container, in which case it needs to be pre-defined,
         # so that the port can be mapped to an external port when the container is started.
         port_no = 0
     port_no = server.add_insecure_port("[::]:{}".format(port_no))
     logging.getLogger("control_tests").debug(
         "Started the gRPC test control service: port=%s", str(port_no))
     server.start()
     global _control_tests_port_no
     _control_tests_port_no = port_no
     started_event.set()
     shutdown_event.wait()
     server.stop(None)
     server.wait_for_termination()
コード例 #2
0
 def get_win_score(key):
     """Get a player's win percentage."""
     nWins = parse_int(playings.get(key + "_wins"), -1)
     if nWins < 0:
         return None
     score = 100 * nWins / nGames
     return int(score + 0.5)
コード例 #3
0
 def save_image(img):  #pylint: disable=missing-docstring
     quality = parse_int(
         app.config.get("ASA_SCREENSHOT_QUALITY"), 50)
     return get_image_data(img,
                           format="JPEG",
                           quality=quality,
                           optimize=True,
                           subsampling=0)
コード例 #4
0
def test_asa_upload(scenario_id):
    """A test endpoint that can be used to simulate ASL Scenario Archive uploads."""

    # initialize
    logger = logging.getLogger("test_asa_upload")

    def save_file(key, asa_upload, convert=None):
        """Save a file that has been uploaded to us."""
        f = request.files.get(key)
        if not f:
            logger.info("- %s: not present.", key)
            return
        data = f.read()
        asa_upload[key] = convert(data) if convert else data
        logger.info("- %s: %s (%s) ; #bytes=%d", key, f.filename,
                    f.content_type, len(data))
        fname = app.config.get("SAVE_ASA_UPLOAD_" + key.upper())
        if fname:
            with open(fname, "wb") as fp:
                fp.write(data)
            logger.info("  - Saved to: %s", fname)

    def make_resp(fname):
        """Generate a response."""
        dname = os.path.join(os.path.dirname(__file__),
                             "tests/fixtures/asa-responses/")
        fname = os.path.join(dname, "{}.json".format(fname))
        with open(fname, "r", encoding="utf-8") as fp:
            return jsonify(json.load(fp))

    # simulate a slow response
    delay = parse_int(app.config.get("ASA_UPLOAD_DELAY"), 0)
    if delay > 0:
        time.sleep(delay)

    # parse the request
    user_name = request.args.get("user")
    if not user_name:
        return make_resp("missing-user-name")
    api_token = request.args.get("token")
    if not api_token:
        return make_resp("missing-token")
    if api_token == "incorrect-token":
        return make_resp("incorrect-token")

    # process the request
    logger.info("ASA upload: id=%s ; user=\"%s\" ; token=\"%s\"", scenario_id,
                user_name, api_token)
    asa_upload = {}
    save_file("vasl_setup", asa_upload)
    save_file("vt_setup", asa_upload, lambda v: json.loads(v))  #pylint: disable=unnecessary-lambda
    save_file("screenshot", asa_upload)
    if asa_upload:
        asa_upload.update({"user": user_name, "token": api_token})
        global _last_asa_upload
        _last_asa_upload = asa_upload

    return make_resp("ok")
コード例 #5
0
def _make_brief_overview(content):
    """Truncate the scenario overview."""
    if not content:
        return None
    threshold = parse_int(app.config.get("BRIEF_CONTENT_THRESHOLD"), 200)
    if threshold <= 0 or len(content) < threshold:
        return None
    regex = re.compile("[.?!]+")
    mo = regex.search(content, threshold)
    if not mo:
        return content[:threshold] + "..."
    val = content[:mo.start() + len(mo.group())]
    if val == content:
        return None
    return val
コード例 #6
0
    def download_files():
        """Download fresh copies of each file."""
        #pylint: disable=protected-access

        # loop forever (until the program exits)
        while True:

            # process each DownloadedFile
            # NOTE: The DownloadedFile registry is built once at startup, so we don't need to lock it.
            for df in _registry:

                # get the download URL
                url = app.config.get("{}_DOWNLOAD_URL".format(df.key.upper()),
                                     df.url)
                if os.path.isfile(url):
                    # read the data directly from a file (for debugging porpoises)
                    _logger.info(
                        "Loading the %s data directly from a file: %s", df.key,
                        url)
                    df._set_data(url)
                    continue

                # check if we have a cached copy of the file
                ttl = parse_int(
                    app.config.get("{}_DOWNLOAD_CACHE_TTL".format(df.key),
                                   df.ttl), 24)
                if ttl <= 0:
                    _logger.info("Download of the %s file has been disabled.",
                                 df.key)
                    continue
                ttl *= 60 * 60
                if os.path.isfile(df.cache_fname):
                    # yup - check how long ago it was downloaded
                    mtime = os.path.getmtime(df.cache_fname)
                    age = int(time.time() - mtime)
                    _logger.debug(
                        "Checking the cached %s file: age=%s, ttl=%s (mtime=%s)",
                        df.key, datetime.timedelta(seconds=age),
                        datetime.timedelta(seconds=ttl),
                        time.strftime("%Y-%m-%d %H:%M:%S",
                                      time.localtime(mtime)))
                    if age < ttl:
                        continue

                # download the file
                if app.config.get("DISABLE_DOWNLOADED_FILES"):
                    _logger.info("Download disabled (%s): %s", df.key, url)
                    continue
                _logger.info("Downloading the %s file: %s", df.key, url)
                try:
                    headers = {"Accept-Encoding": "gzip"}
                    if url in _etags:
                        _logger.debug("- If-None-Match = %s", _etags[url])
                        headers["If-None-Match"] = _etags[url]
                    req = urllib.request.Request(url, headers=headers)
                    with urllib.request.urlopen(req) as resp:
                        resp_data = resp.read()
                        if resp.headers.get("Content-Encoding") == "gzip":
                            resp_data = gzip.decompress(resp_data)
                        data = resp_data.decode("utf-8")
                        etag = resp.headers.get("ETag")
                    _logger.info("Downloaded the %s file OK: %d bytes", df.key,
                                 len(data))
                    if etag:
                        _logger.debug("- Got etag: %s", etag)
                        _etags[url] = etag
                except Exception as ex:  #pylint: disable=broad-except
                    if isinstance(ex,
                                  urllib.error.HTTPError) and ex.code == 304:  #pylint: disable=no-member
                        _logger.info("Download %s file: 304 Not Modified",
                                     df.key)
                        if os.path.isfile(df.cache_fname):
                            # NOTE: We touch the file so that the TTL check will work the next time around.
                            os.utime(df.cache_fname)
                        continue
                    msg = str(getattr(ex, "reason", None) or ex)
                    _logger.error("Can't download the %s file: %s", df.key,
                                  msg)
                    df.error_msg = msg
                    continue

                # install the new data
                df._set_data(data)

                # save a cached copy of the data
                _logger.debug("Saving a cached copy of the %s file: %s",
                              df.key, df.cache_fname)
                with open(df.cache_fname, "w", encoding="utf-8") as fp:
                    fp.write(data)

            # sleep before looping back and doing it all again
            delay = parse_int(app.config.get("DOWNLOAD_CHECK_INTERVAL"), 2)
            time.sleep(delay * 60 * 60)
コード例 #7
0
ファイル: main.py プロジェクト: pacman-ghost/vasl-templates
def get_program_info():
    """Get the program info."""

    # NOTE: We can't convert to local time, since the time zone inside a Docker container
    # may not be the same as on the host (or where the client is). It's possible to make it so,
    # but messy, so to keep things simple, we get the client to pass in the timezone offset.
    tz_offset = parse_int(request.args.get("tz_offset", 0))

    def to_localtime(tstamp):
        """Convert a timestamp to local time."""
        return tstamp + timedelta(minutes=tz_offset)

    # set the basic details
    params = {
        "APP_VERSION": vasl_templates.webapp.config.constants.APP_VERSION,
        "VASSAL_VERSION": VassalShim.get_version()
    }
    if globvars.vasl_mod:
        params["VASL_VERSION"] = globvars.vasl_mod.vasl_version
    for key in [
            "VASSAL_DIR", "VASL_MOD", "VASL_EXTNS_DIR", "BOARDS_DIR",
            "JAVA_PATH", "WEBDRIVER_PATH", "CHAPTER_H_NOTES_DIR",
            "USER_FILES_DIR"
    ]:
        params[key] = app.config.get(key)

    def parse_timestamp(val):
        """Parse a timestamp."""
        if not val:
            return None
        # FUDGE! Adjust the timezone offset from "HH:MM" to "HHMM".
        val = re.sub(r"(\d{2}):(\d{2})$", r"\1\2", val)
        try:
            val = datetime.strptime(val, "%Y-%m-%d %H:%M:%S %z")
        except ValueError:
            return None
        return to_localtime(val)

    def replace_mountpoint(key):
        """Replace a mount point with its corresponding target (on the host)."""
        params[key] = os.environ.get("{}_TARGET".format(key))

    # check if we are running the desktop application
    if IS_FROZEN:
        # yup - return information about the build
        build_info = get_build_info()
        if build_info:
            params["BUILD_TIMESTAMP"] = datetime.strftime(
                to_localtime(datetime.utcfromtimestamp(
                    build_info["timestamp"])), "%H:%M (%d %b %Y)")
            params["BUILD_GIT_INFO"] = build_info["git_info"]

    # check if we are running inside a Docker container
    if app.config.get("IS_CONTAINER"):
        # yup - return related information
        params["BUILD_GIT_INFO"] = os.environ.get("BUILD_GIT_INFO")
        params["DOCKER_IMAGE_NAME"] = os.environ.get("DOCKER_IMAGE_NAME")
        params["DOCKER_IMAGE_TIMESTAMP"] = datetime.strftime(
            parse_timestamp(os.environ.get("DOCKER_IMAGE_TIMESTAMP")),
            "%H:%M %d %b %Y")
        params["DOCKER_CONTAINER_NAME"] = os.environ.get(
            "DOCKER_CONTAINER_NAME")
        with open("/proc/self/cgroup", "r", encoding="utf-8") as fp:
            buf = fp.read()
        mo = re.search(r"^\d+:name=.+/docker/([0-9a-f]{12})", buf,
                       re.MULTILINE)
        # NOTE: Reading cgroup stopped working when we upgraded to Fedora 33, but still works
        # on Centos 8 (but reading the host name gives the physical host's name under Centos :-/).
        # NOTE: os.uname() is not available on Windows. This isn't really a problem (since
        # we're running on Linux inside a container), but pylint is complaining :-/
        params["DOCKER_CONTAINER_ID"] = mo.group(
            1) if mo else socket.gethostname()
        # replace Docker mount points with their targets on the host
        for key in [
                "VASSAL_DIR", "VASL_MOD", "VASL_EXTNS_DIR", "BOARDS_DIR",
                "CHAPTER_H_NOTES_DIR", "USER_FILES_DIR"
        ]:
            replace_mountpoint(key)

    # check the scenario index downloads
    def check_df(df):  #pylint: disable=missing-docstring
        with df:
            if not os.path.isfile(df.cache_fname):
                return
            mtime = datetime.utcfromtimestamp(os.path.getmtime(df.cache_fname))
            key = "LAST_{}_SCENARIO_INDEX_DOWNLOAD_TIME".format(df.key)
            params[key] = datetime.strftime(to_localtime(mtime),
                                            "%H:%M (%d %b %Y)")
            generated_at = parse_timestamp(getattr(df, "generated_at", None))
            if generated_at:
                key = "LAST_{}_SCENARIO_INDEX_GENERATED_AT".format(df.key)
                params[key] = datetime.strftime(generated_at, "%H:%M %d %b %Y")

    from vasl_templates.webapp.scenarios import _asa_scenarios, _roar_scenarios
    check_df(_asa_scenarios)
    check_df(_roar_scenarios)

    return render_template("program-info-content.html", **params)
コード例 #8
0
def prepare_asa_upload():  #pylint: disable=too-many-locals
    """Prepare files for upload to the ASL Scenario Archive."""

    # parse the request
    vsav_data = request.json["vsav_data"]
    vsav_filename = request.json["filename"]

    # initialize
    start_time = time.time()
    logger = logging.getLogger("prepare_asa_upload")

    try:

        # get the VSAV data (we do this inside the try block so that the user gets shown
        # a proper error dialog if there's a problem decoding the base64 data)
        vsav_data = base64.b64decode(vsav_data)
        logger.info("Preparing VSAV (#bytes=%d): %s", len(vsav_data),
                    vsav_filename)

        with TempFile() as input_file:

            # save the VSAV data in a temp file
            input_file.write(vsav_data)
            input_file.close(delete=False)
            fname = app.config.get(
                "PREPARE_ASA_UPLOAD_INPUT")  # nb: for diagnosing problems
            if fname:
                logger.debug("Saving a copy of the VSAV data: %s", fname)
                with open(fname, "wb") as fp:
                    fp.write(vsav_data)

            # prepare the files to be uploaded
            with TempFile() as stripped_vsav_file, TempFile(
            ) as screenshot_file:

                # run the VASSAL shim to prepare the VSAV file
                stripped_vsav_file.close(delete=False)
                screenshot_file.close(delete=False)
                vassal_shim = VassalShim()
                vassal_shim.prepare_asa_upload(input_file.name,
                                               stripped_vsav_file.name,
                                               screenshot_file.name)

                # read the stripped VSAV data
                with open(stripped_vsav_file.name, "rb") as fp:
                    stripped_vsav = fp.read()
                stripped_vsav_file.save_copy(
                    app.config.get("PREPARE_ASA_UPLOAD_STRIPPED_VSAV"), logger,
                    "stripped VSAV")

                # read the screenshot image
                if os.path.getsize(screenshot_file.name) == 0:
                    # NOTE: The VASSAL shim sometimes crashes while trying to generate a screenshot :-(
                    screenshot_data = None
                else:
                    # NOTE: These screenshots are used as map preview images on the ASL Scenario Archive
                    # web site (and by us, as well), so we want to optimize them for size.
                    # NOTE: I tried changing the PNG from 24-bit RGB to using a palette:
                    #   img.convert( "P", palette=Image.ADAPTIVE/WEB )
                    # but since PNG is a lossless format, the benefits are minimal. Also, weird things happen
                    # if we do this before shrinking the image, which makes calculating the ratio tricky.
                    # clean up the original screenshot
                    img = trim_image(screenshot_file.name)
                    img = remove_alpha_from_image(img)

                    # get the image data
                    def save_image(img):  #pylint: disable=missing-docstring
                        quality = parse_int(
                            app.config.get("ASA_SCREENSHOT_QUALITY"), 50)
                        return get_image_data(img,
                                              format="JPEG",
                                              quality=quality,
                                              optimize=True,
                                              subsampling=0)

                    screenshot_data = save_image(img)
                    # resize it to (roughly) the maximum allowed size
                    max_size = parse_int(
                        app.config.get("ASA_MAX_SCREENSHOT_SIZE"), 200) * 1024
                    if len(screenshot_data) > max_size:
                        ratio = math.sqrt(
                            float(max_size) / len(screenshot_data))
                        img = img.resize(
                            (int(img.width * ratio), int(img.height * ratio)),
                            Image.ANTIALIAS)
                    # add a border
                    border_size = parse_int(
                        app.config.get("ASA_SCREENSHOT_BORDER_SIZE"), 5)
                    img = ImageOps.expand(img, border_size,
                                          (255, 255, 255, 255))
                    # get the final image data
                    screenshot_data = save_image(img)

                # save a copy of the screenshot image
                fname = app.config.get("PREPARE_ASA_UPLOAD_SCREENSHOT")
                if fname:
                    if screenshot_data:
                        logger.debug(
                            "Saving a copy of the generated screenshot: %s",
                            fname)
                        with open(fname, "wb") as fp:
                            fp.write(screenshot_data)
                    else:
                        if os.path.isfile(fname):
                            os.unlink(fname)

    except Exception as ex:  #pylint: disable=broad-except

        return VassalShim.translate_vassal_shim_exception(ex, logger)

    # return the results
    logger.info("Prepared the VSAV file OK: elapsed=%.3fs",
                time.time() - start_time)
    results = {
        "filename": vsav_filename,
        "stripped_vsav": base64.b64encode(stripped_vsav).decode("utf-8"),
    }
    if screenshot_data:
        results["screenshot"] = base64.b64encode(screenshot_data).decode(
            "utf-8")
    return jsonify(results)
コード例 #9
0
def get_scenario(scenario_id):  #pylint: disable=too-many-locals
    """Return a scenario."""

    # get the parameters
    roar_override = request.args.get("roar")

    # get the basic scenario information
    scenario, args = _do_get_scenario(scenario_id)
    args["scenario_date_iso"] = _parse_date_iso(scenario.get("scen_date"))
    args["defender_name"] = scenario.get("defender")
    args["attacker_name"] = scenario.get("attacker")
    args = {k.lower(): v for k, v in args.items()}

    def get_win_score(key):
        """Get a player's win percentage."""
        nWins = parse_int(playings.get(key + "_wins"), -1)
        if nWins < 0:
            return None
        score = 100 * nWins / nGames
        return int(score + 0.5)

    # get any files available for download
    downloads = {}
    keys = {
        "vt_setup": "vaslTemplates",
        "vasl_setup": "vaslTemplateSetups",
        "screenshot": "templateImages"
    }
    for key, ftype in keys.items():
        for entry in scenario.get(ftype, []):
            fname = os.path.basename(entry.get("url", ""))
            pos = fname.find("|")
            if pos < 0:
                continue
            fkey = fname[:pos]
            if fkey not in downloads:
                downloads[fkey] = {
                    "user": entry.get("user"),
                    "timestamp": entry.get("created"),
                }
            downloads[fkey][key] = entry.get("url")
    downloads = sorted(downloads.values(),
                       key=lambda u: u["timestamp"],
                       reverse=True)
    if downloads:
        args["downloads"] = [
            d for d in downloads if "vt_setup" in d or "vasl_setup" in d
        ]

    # get the map previews
    map_images = []
    for fgroup in downloads:
        if "screenshot" in fgroup:
            map_images.append(fgroup)
    for map_image in scenario.get("mapImages", []):
        map_images.append({
            "screenshot": map_image.get("url"),
            "user": map_image.get("user"),
            "timestamp": map_image.get("created"),
        })
    if map_images:
        args["map_images"] = map_images

    # get the ASL Scenario Archive playings
    playings = scenario.get("playings", [{}])[0]
    nGames = parse_int(playings.get("totalGames"), 0)
    if playings and nGames > 0:
        # NOTE: The player names are display names, only shown in the balance graphs,
        # so it doesn't matter if we know about them or not.
        args["balance"] = [{
            "name": scenario.get("defender"),
            "wins": playings.get("defender_wins"),
            "percentage": get_win_score("defender")
        }, {
            "name": scenario.get("attacker"),
            "wins": playings.get("attacker_wins"),
            "percentage": get_win_score("attacker")
        }]

    # try to match the scenario with one in ROAR
    roar_id = None
    if roar_override == "auto-match":
        matches = _match_roar_scenario(scenario)
        if matches:
            roar_id = matches[0]["roar_id"]
    else:
        roar_id = roar_override
    if roar_id:
        args["roar"] = _get_roar_info(roar_id)

    return jsonify(args)