def test_next_migrate_script(self):
        "Get next migrate script to run"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                cur.execute(
                    read_query_file("upsert_puzzle_massive.sql"), {
                        "key": "database_version",
                        "label": "Database Version",
                        "description": "something",
                        "intvalue": 1,
                        "textvalue": None,
                        "blobvalue": None
                    })
                db.commit()
                script_files = [
                    "some/path/to/migrate_puzzle_massive_database_version_021.py",
                    "some/path/to/migrate_puzzle_massive_database_version_901.py",
                    "some/path/to/migrate_puzzle_massive_database_version_001.py",
                    "some/path/to/migrate_puzzle_massive_database_version_000.py",
                    "some/path/to/migrate_puzzle_massive_database_version_002.py",
                ]

                migrate_script = get_next_migrate_script(script_files)

                # The initial one should be migrate_puzzle_massive_database_version_000.py
                assert migrate_script == "some/path/to/migrate_puzzle_massive_database_version_002.py"
    def test_next_migrate_script_when_a_gap_exists(self):
        "Get next migrate script to run when a gap in version numbers exist"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                cur.execute(
                    read_query_file("upsert_puzzle_massive.sql"), {
                        "key": "database_version",
                        "label": "Database Version",
                        "description": "something",
                        "intvalue": 21,
                        "textvalue": None,
                        "blobvalue": None
                    })
                db.commit()
                script_files = [
                    "some/path/to/migrate_puzzle_massive_database_version_021.py",
                    "some/path/to/migrate_puzzle_massive_database_version_901.py",
                    "some/path/to/migrate_puzzle_massive_database_version_001.py",
                    "some/path/to/migrate_puzzle_massive_database_version_000.py",
                    "some/path/to/migrate_puzzle_massive_database_version_002.py",
                ]

                self.assertRaises(MigrateGapError, get_next_migrate_script,
                                  script_files)
コード例 #3
0
def update_bit_icon_expiration(db, bit_icon_expiration):
    if not isinstance(bit_icon_expiration, dict):
        raise Exception("BIT_ICON_EXPIRATION is not dict object: {}".format(
            bit_icon_expiration))
    delete_query = read_query_file("delete_all_from_bit_expiration_table.sql")
    insert_query = read_query_file("insert_bit_expiration.sql")
    cur = db.cursor()
    cur.execute(delete_query)
    db.commit()
    for score, extend in bit_icon_expiration.items():
        cur.execute(insert_query, {"score": score, "extend": f"+{extend}"})
    cur.close()
    db.commit()
コード例 #4
0
    def __init__(self, ip):
        self.ip = ip
        self.headers = {"X-Real-IP": ip}

        self.api_host = "http://localhost:{PORTAPI}".format(
            PORTAPI=current_app.config["PORTAPI"])
        self.publish_host = "http://localhost:{PORTPUBLISH}".format(
            PORTPUBLISH=current_app.config["PORTPUBLISH"])

        cur = db.cursor()
        result = cur.execute(
            read_query_file("select-user-id-by-ip-and-no-password.sql"), {
                "ip": ip
            }).fetchall()
        if result:
            (result, col_names) = rowify(result, cur.description)
            shareduser = result[0]["id"]
            redis_connection.zrem("bannedusers", shareduser)
        cur.close()

        # get test user
        current_user_id = requests.get("{0}/current-user-id/".format(
            self.api_host),
                                       headers=self.headers)
        # print('ip {} and {}'.format(ip, current_user_id.cookies))
        self.shareduser_cookie = current_user_id.cookies["shareduser"]
        self.shareduser = int(current_user_id.content)
コード例 #5
0
ファイル: scheduler.py プロジェクト: b0r1ngx/puzzle-massive
    def do_task(self):
        super().do_task()

        cur = db.cursor()
        result = cur.execute(
            read_query_file(
                "select-user-name-waiting-to-be-approved.sql")).fetchall()
        if result:
            self.log_task()
            names = []
            (result, col_names) = rowify(result, cur.description)
            for item in result:
                names.append("{approved_date} - {display_name}".format(**item))

            message = "\n".join(names)

            # Send a notification email (silent fail if not configured)
            current_app.logger.debug(message)
            if not current_app.config.get("DEBUG", True):
                try:
                    send_message(
                        current_app.config.get("EMAIL_MODERATOR"),
                        "Puzzle Massive - new names",
                        message,
                        current_app.config,
                    )
                except Exception as err:
                    current_app.logger.warning(
                        "Failed to send notification message. {}".format(err))
                    pass

        cur.close()
コード例 #6
0
def add_bit_icons_in_file_system(db):
    "Add each bit icon that is in the filesystem"

    bits = []

    cur = db.cursor()
    select_author_id_query = read_query_file(
        "_select-author_id-for-slug-name.sql")
    upsert_author_query = read_query_file("_insert_or_update_bit_author.sql")
    insert_or_replace_bit_icon_query = read_query_file(
        "_insert_or_replace_bit_icon.sql")
    for source_file in glob.glob(
            os.path.join("source-media", "bit-icons", "source-*.yaml")):
        root = os.path.splitext(os.path.basename(source_file))[0]
        group_name = root[root.index("-") + 1:]
        with open(source_file, "r") as f:
            source = yaml.safe_load(f)
            cur.execute(
                upsert_author_query,
                {
                    "name": source["name"],
                    "slug_name": source["slug_name"],
                    "artist_document": source["artist_document"],
                },
            )
            db.commit()
            author_id = cur.execute(select_author_id_query, {
                "slug_name": source["slug_name"]
            }).fetchone()[0]
            for bit_icon_file in glob.glob(
                    os.path.join(os.path.dirname(source_file),
                                 f"{group_name}-*")):
                bit_icon_name = os.path.splitext(
                    os.path.basename(bit_icon_file))[0]
                bits.append({"name": bit_icon_name, "author": author_id})

    def each(bit):
        for b in bit:
            yield b

    cur.executemany(insert_or_replace_bit_icon_query, each(bits))
    db.commit()
    def test_next_migrate_script_when_none_are_found(self):
        "Get next migrate script when none are found"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                db.commit()
                script_files = []

                self.assertRaises(MigrateError, get_next_migrate_script,
                                  script_files)
def migrate(config):
    "Migrate the sqlite3 database from the current database_version."
    cur = db.cursor()

    for filename in [
            "create_user_puzzle_index.sql",
    ]:
        cur.execute(read_query_file(filename))
        db.commit()

    cur.close()
    def test_get_latest_version_when_no_migrate_scripts(self):
        "Get latest version when no migrate scripts"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                db.commit()
                script_files = []

                self.assertRaises(Exception,
                                  get_latest_version_based_on_migrate_scripts,
                                  script_files)
def migrate(config):
    "Migrate the sqlite3 database from the current database_version."
    cur = db.cursor()

    # Example script!
    # Copy it and rename it based on the database_version it will migrate from.
    # Modify as needed.
    logger.debug("Example debug message.")
    logger.info(
        f"Hello, this is an example log message for the {sys.argv[0]} script.")
    logger.warning("Uhh... warning message.")
    logger.error(
        "Oh no! Something didn't work! The script should run sys.exit('Abandon ship!') or something."
    )
    sys.exit(
        "\nERROR: Example script should be copied and modified to correctly run.\n"
    )  # Remove this

    # Execute the sql found in these files in the queries directory.
    for filename in [
            "example_that_modifies_the_database.sql",
            "another_example.sql",
    ]:
        if True:
            cur.execute(read_query_file(filename))
            db.commit()
        # Or split up each one as separate statements
        if False:
            for statement in read_query_file(filename).split(";"):
                # Optionally ignore errors if that is needed.
                try:
                    cur.execute(statement)
                except sqlite3.OperationalError as err:
                    logger.warning(f"Ignoring sqlite error: {err}")
                db.commit()

    # Or run other commands on the database to modify things as needed.

    cur.close()
コード例 #11
0
def get_next_migrate_script(migrate_scripts):
    "Returns the next migrate script that should execute."
    if len(migrate_scripts) == 0:
        raise MigrateError("migrate_scripts list is empty.")

    migrate_script = None
    sorted_migrate_scripts = migrate_scripts.copy()
    sorted_migrate_scripts.sort(key=version_number)

    cur = db.cursor()
    result = cur.execute(read_query_file("select_puzzle_massive_key.sql"), {
        "key": "database_version"
    }).fetchall()
    if result:
        (result, _) = rowify(result, cur.description)
        database_version = result[0]["intvalue"]
    else:
        database_version = 0
    cur.close()

    # First time check
    if database_version == 0:
        if os.path.basename(
                sorted_migrate_scripts[0]
        ) == f"migrate_puzzle_massive_database_version_{database_version:03}.py":
            return sorted_migrate_scripts[0]
        else:
            raise MigrateError(
                f"The database version was missing or set to 0, but the migrate_puzzle_massive_database_version_{database_version:03}.py was not included in the migrate scripts to run."
            )

    next_database_version = database_version + 1
    for item in sorted_migrate_scripts:
        if os.path.basename(
                item
        ) == f"migrate_puzzle_massive_database_version_{next_database_version:03}.py":
            migrate_script = item
            break
        if version_number(item) > next_database_version:
            raise MigrateGapError(
                f"Missing migrate_puzzle_massive_database_version_{next_database_version:03}.py"
            )

    return migrate_script
    def test_next_migrate_script_for_initial_migration(self):
        "Get next migrate script to run for the initial migration"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                db.commit()
                script_files = [
                    "some/path/to/migrate_puzzle_massive_database_version_021.py",
                    "some/path/to/migrate_puzzle_massive_database_version_901.py",
                    "some/path/to/migrate_puzzle_massive_database_version_001.py",
                    "some/path/to/migrate_puzzle_massive_database_version_000.py",
                    "some/path/to/migrate_puzzle_massive_database_version_002.py",
                ]

                migrate_script = get_next_migrate_script(script_files)

                # The initial one should be migrate_puzzle_massive_database_version_000.py
                assert migrate_script == "some/path/to/migrate_puzzle_massive_database_version_000.py"
    def test_get_latest_version_based_on_migrate_scripts(self):
        "Get latest version based on migrate scripts"
        with self.app.app_context():
            with self.app.test_client():
                cur = db.cursor()
                cur.execute(read_query_file("create_table_puzzle_massive.sql"))
                db.commit()
                script_files = [
                    "some/path/to/migrate_puzzle_massive_database_version_021.py",
                    "some/path/to/migrate_puzzle_massive_database_version_901.py",
                    "some/path/to/migrate_puzzle_massive_database_version_001.py",
                    "some/path/to/migrate_puzzle_massive_database_version_000.py",
                    "some/path/to/migrate_puzzle_massive_database_version_002.py",
                ]

                latest_version = get_latest_version_based_on_migrate_scripts(
                    script_files)

                assert latest_version == 902
コード例 #14
0
def fix_redis_piece_groups(puzzles, results={}):
    ""
    _results = results.copy()
    # TODO: implement a fix for when there are multiple immovable piece groups
    # that have a different piece group then the top left piece.

    # Find each group that is not the same as the top left piece and remove
    # those pieces from the pcfixed redis smembers
    cur = db.cursor()
    print(f"failed redis puzzles: {puzzles}")
    for puzzle in puzzles:
        (result, col_names) = rowify(
            cur.execute(
                read_query_file("select_puzzle_top_left_piece_for_puzzle.sql"),
                {
                    "id": puzzle
                },
            ).fetchall(),
            cur.description,
        )
        if not result or not result[0]:
            continue

        top_left_piece = result[0]
        pcg_for_top_left = redis_connection.hget(
            "pc:{puzzle}:{id}".format(puzzle=puzzle, id=top_left_piece["id"]),
            "g")

        # Fix by resetting the pcfixed members back to only those for the top
        # left piece group.
        redis_connection.sinterstore(
            f"pcfixed:{puzzle}", f"pcfixed:{puzzle}",
            "pcg:{puzzle}:{group}".format(puzzle=puzzle,
                                          group=pcg_for_top_left))

        _results[puzzle]["fixed"] = True
    cur.close()
    return _results
コード例 #15
0
def find_puzzles_in_redis(results={}):
    """
    For each puzzle that is active in redis (is in pcupdates); check the
    immovable piece group counts.  Fail any that do not equal the count for the
    top left piece group.
    """
    _results = results.copy()
    cur = db.cursor()

    puzzles_in_redis = redis_connection.zrange("pcupdates", 0, -1)
    for puzzle in puzzles_in_redis:
        test_result = _results.get(puzzle, {
            "puzzle": puzzle,
            "msg": "",
            "test": []
        })
        test_result["test"].append("redis")
        _results.update({puzzle: test_result})
        (result, col_names) = rowify(
            cur.execute(
                read_query_file("select_puzzle_top_left_piece_for_puzzle.sql"),
                {
                    "id": puzzle
                },
            ).fetchall(),
            cur.description,
        )
        if not result or not result[0]:
            # Test failed.
            test_result[
                "msg"] = "{msg} Failed to find top left piece for puzzle {puzzle}".format(
                    msg=test_result.get("msg", ""), puzzle=puzzle)
            test_result["status"] = "fail"
            test_result["reason"] = "fail_no_top_left_piece"
            continue

        top_left_piece = result[0]
        test_result["puzzle_id"] = top_left_piece["puzzle_id"]

        # Compare the counts for the pcfixed and the top left piece group.  They
        # should be the same.
        pcfixed_count = redis_connection.scard(
            "pcfixed:{puzzle}".format(puzzle=puzzle))
        pcg_for_top_left = redis_connection.hget(
            "pc:{puzzle}:{id}".format(puzzle=puzzle, id=top_left_piece["id"]),
            "g")
        immovable_top_left_group_count = redis_connection.scard(
            "pcg:{puzzle}:{group}".format(puzzle=puzzle,
                                          group=pcg_for_top_left))
        if pcfixed_count == immovable_top_left_group_count:
            # Test passed.
            test_result[
                "msg"] = "{msg} {puzzle_id} {puzzle} all immovable pieces are in the same group as top left".format(
                    msg=test_result.get("msg", ""),
                    puzzle_id=top_left_piece["puzzle_id"],
                    puzzle=puzzle,
                )
            test_result["status"] = "pass"
            test_result["reason"] = "pass"
            continue
        else:
            # Test failed.
            test_result[
                "msg"] = "{msg} {puzzle_id} {puzzle} not all immovable pieces are in the same group as top left".format(
                    msg=test_result.get("msg", ""),
                    puzzle_id=top_left_piece["puzzle_id"],
                    puzzle=puzzle,
                )
            test_result["status"] = "fail"
            test_result["reason"] = "fail_pcfixed_outside_of_top_left"
    cur.close()
    return _results
コード例 #16
0
ファイル: scheduler.py プロジェクト: b0r1ngx/puzzle-massive
    def do_task(self):
        super().do_task()
        made_change = False

        cur = db.cursor()

        user = redis_connection.spop("batchuser")
        while user:
            user = int(user)
            score = redis_connection.getset(
                "batchscore:{user}".format(user=user), value=0)
            redis_connection.expire("batchscore:{user}".format(user=user), DAY)
            points = redis_connection.getset(
                "batchpoints:{user}".format(user=user), value=0)
            redis_connection.expire("batchpoints:{user}".format(user=user),
                                    DAY)

            current_app.logger.debug(
                "update user {id} with {points} points and score of {score}".
                format(**{
                    "id": user,
                    "points": points,
                    "score": score
                }))

            r = requests.post(
                "http://{HOSTAPI}:{PORTAPI}/internal/tasks/{task_name}/start/".
                format(
                    HOSTAPI=current_app.config["HOSTAPI"],
                    PORTAPI=current_app.config["PORTAPI"],
                    task_name="update_user_points_and_m_date",
                ),
                json={
                    "player": user,
                    "points": points,
                    "score": score,
                },
            )
            if r.status_code != 200:
                current_app.logger.warning(
                    "Internal tasks api error. Could not run task update_user_points_and_m_date for player {}"
                    .format(user))

            r = requests.post(
                "http://{HOSTAPI}:{PORTAPI}/internal/tasks/{task_name}/start/".
                format(
                    HOSTAPI=current_app.config["HOSTAPI"],
                    PORTAPI=current_app.config["PORTAPI"],
                    task_name="update_bit_icon_expiration",
                ),
                json={
                    "player": user,
                },
            )
            if r.status_code != 200:
                current_app.logger.warning(
                    "Internal tasks api error. Could not run task update_bit_icon_expiration for player {}"
                    .format(user))

            user = redis_connection.spop("batchuser")
            made_change = True

        if self.first_run:
            result = cur.execute(
                read_query_file(
                    "select_user_score_and_timestamp.sql")).fetchall()
            if result and len(result):
                current_app.logger.info(
                    "Set rank and timeline on {0} players".format(len(result)))
                user_scores = dict(map(lambda x: [x[0], x[1]], result))
                user_timestamps = dict(map(lambda x: [x[0], int(x[2])],
                                           result))
                redis_connection.zadd("rank", user_scores)
                redis_connection.zadd("timeline", user_timestamps)
                made_change = True
            self.first_run = False

        if made_change:
            self.log_task()

        cur.close()
コード例 #17
0
def fork_puzzle_pieces(source_puzzle_data, puzzle_data):
    """
    """
    cur = db.cursor()
    original_puzzle_id = source_puzzle_data["puzzle_id"]
    source_instance_puzzle_id = source_puzzle_data["instance_puzzle_id"]
    puzzle_id = puzzle_data["puzzle_id"]

    current_app.logger.info(
        "Creating new fork of puzzle {source_instance_puzzle_id} to {puzzle_id}"
        .format(source_instance_puzzle_id=source_instance_puzzle_id,
                puzzle_id=puzzle_id))

    if source_puzzle_data["status"] not in (ACTIVE, IN_QUEUE, COMPLETED,
                                            FROZEN):
        raise DataError("Source puzzle not in acceptable status")

    result = cur.execute(
        "select * from Puzzle where id = :id",
        {
            "id": puzzle_data["id"]
        },
    ).fetchall()
    if not result:
        raise DataError(
            "Puzzle {puzzle_id} no longer in maintenance status.".format(
                puzzle_id=puzzle_id))
    (result, col_names) = rowify(result, cur.description)
    puzzle_data = result[0]
    if puzzle_data["status"] != MAINTENANCE:
        raise DataError(
            "Puzzle {puzzle_id} no longer in maintenance status.".format(
                puzzle_id=puzzle_id))
    if puzzle_data["permission"] != PRIVATE:
        raise DataError(
            "Puzzle {puzzle_id} needs to have private (unlisted) permission.".
            format(puzzle_id=puzzle_id))

    result = cur.execute(
        read_query_file("get_original_puzzle_id_from_puzzle_instance.sql"),
        {
            "puzzle": puzzle_data["id"]
        },
    ).fetchone()
    if not result:
        raise DataError("Error with puzzle instance {puzzle_id}.".format(
            puzzle_id=puzzle_id))

    original_puzzle_id = result[0]
    original_puzzle_dir = os.path.join(current_app.config["PUZZLE_RESOURCES"],
                                       original_puzzle_id)
    puzzle_dir = os.path.join(current_app.config["PUZZLE_RESOURCES"],
                              puzzle_id)

    # Copy the puzzle resources to the new puzzle_dir
    original_puzzle_dir = os.path.join(current_app.config["PUZZLE_RESOURCES"],
                                       original_puzzle_id)
    source_instance_puzzle_dir = os.path.join(
        current_app.config["PUZZLE_RESOURCES"], source_instance_puzzle_id)
    puzzle_dir = os.path.join(current_app.config["PUZZLE_RESOURCES"],
                              puzzle_id)
    copytree(source_instance_puzzle_dir, puzzle_dir)

    # Get all piece props of source puzzle
    transfer(source_puzzle_data["instance_id"])

    (piece_properties, col_names) = rowify(
        cur.execute(
            """select id, puzzle, adjacent, b, col, h, parent, r, rotate, row, status, w, x, y from Piece where (puzzle = :puzzle)""",
            {
                "puzzle": source_puzzle_data["instance_id"]
            },
        ).fetchall(),
        cur.description,
    )

    source_preview_full_attribution = None
    result = cur.execute(
        "select url, attribution from PuzzleFile where puzzle = :source_puzzle and name = :name;",
        {
            "name": "preview_full",
            "source_puzzle": source_puzzle_data["id"]
        },
    ).fetchall()
    if result:
        (result, _) = rowify(result, cur.description)
        source_preview_full_url = result[0]["url"]
        attribution_id = result[0]["attribution"]
    if attribution_id:
        result = cur.execute(
            fetch_query_string("_select_attribution_for_id.sql"),
            {
                "attribution_id": attribution_id
            },
        ).fetchall()
        if result:
            (result, _) = rowify(result, cur.description)
            source_preview_full_attribution = {
                "title": result[0]["title"],
                "author_link": result[0]["author_link"],
                "author_name": result[0]["author_name"],
                "source": result[0]["source"],
                "license_name": result[0]["license_name"],
            }

    cur.close()

    # Commit the piece properties and puzzle resources
    # row and col are really only useful for determining the top left piece when resetting puzzle
    for pc in piece_properties:
        pc["puzzle"] = puzzle_data["id"]

    r = requests.post(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/pieces/".
        format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
        ),
        json={"piece_properties": piece_properties},
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error {}".format(r.json()))

    # Check if there is only one piece parent and mark as complete
    is_complete = True
    for index, pc in enumerate(piece_properties):
        if pc["parent"] != piece_properties[max(0, index - 1)]["parent"]:
            is_complete = False
            break

    # TODO: Copy attribution data on puzzle file if it exists.

    r = requests.post(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
        .format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
            file_name="original",
        ),
        json={
            "url":
            "/resources/{puzzle_id}/original.jpg".format(puzzle_id=puzzle_id),
        },
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error")

    r = requests.post(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
        .format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
            file_name="preview_full",
        ),
        json={
            "url":
            "/resources/{puzzle_id}/preview_full.jpg".format(
                puzzle_id=puzzle_id) if source_preview_full_url.startswith("/")
            else source_preview_full_url,
            "attribution":
            source_preview_full_attribution,
        },
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error {}".format(r.json()))

    r = requests.post(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
        .format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
            file_name="pieces",
        ),
        json={
            "url":
            "/resources/{puzzle_id}/scale-100/raster.png".format(
                puzzle_id=puzzle_id),
        },
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error")

    r = requests.post(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
        .format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
            file_name="pzz",
        ),
        json={
            "url":
            "/resources/{puzzle_id}/scale-100/raster.css?ts={timestamp}".
            format(puzzle_id=puzzle_id, timestamp=int(time.time()))
        },
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error")

    status = ACTIVE
    if is_complete:
        status = COMPLETED
    r = requests.patch(
        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
        format(
            HOSTAPI=current_app.config["HOSTAPI"],
            PORTAPI=current_app.config["PORTAPI"],
            puzzle_id=puzzle_id,
        ),
        json={"status": status},
    )
    if r.status_code != 200:
        raise Exception("Puzzle details api error")
コード例 #18
0
def transfer(puzzle, cleanup=True, skip_status_update=False):
    """
    Transfer the puzzle data from Redis to the database. If the cleanup flag is
    set the Redis data for the puzzle will be removed afterward.
    """
    current_app.logger.info("transferring puzzle: {0}".format(puzzle))
    cur = db.cursor()

    query = """select * from Puzzle where (id = :puzzle)"""
    (result, col_names) = rowify(
        cur.execute(query, {
            "puzzle": puzzle
        }).fetchall(), cur.description)
    if not result:
        # Most likely because of a database switch and forgot to run this script
        # between those actions.
        # TODO: Raise an error here and let the caller decide how to handle it.
        current_app.logger.warning(
            "Puzzle {} not in database. Skipping.".format(puzzle))
        return

    puzzle_data = result[0]

    puzzle_previous_status = puzzle_data["status"]
    if not skip_status_update:
        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle_data["puzzle_id"],
            ),
            json={
                "status": MAINTENANCE,
            },
        )
        if r.status_code != 200:
            # TODO: Raise an error here and let the caller decide how to handle it.
            current_app.logger.warning(
                "Puzzle details api error when setting status to maintenance {}"
                .format(puzzle_data["puzzle_id"]))
            return

    (all_pieces, col_names) = rowify(
        cur.execute(read_query_file("select_all_piece_props_for_puzzle.sql"), {
            "puzzle": puzzle
        }).fetchall(),
        cur.description,
    )

    # Save the redis data to the db if it has changed
    changed_pieces = []
    pcstacked = set(map(int, redis_connection.smembers(f"pcstacked:{puzzle}")))
    pcfixed = set(map(int, redis_connection.smembers(f"pcfixed:{puzzle}")))
    for piece in all_pieces:
        has_changes = False
        pieceFromRedis = redis_connection.hgetall("pc:{puzzle}:{id}".format(
            puzzle=puzzle, id=piece["id"]))

        # The redis data may be empty so skip updating the db
        if len(pieceFromRedis) == 0:
            continue

        # Compare redis data with db for any changes
        for (prop, colname) in [
            ("x", "x"),
            ("y", "y"),
            ("r", "r"),
            ("g", "parent"),
            ("", "status"),
        ]:
            if colname == "status":
                if piece["id"] in pcstacked:
                    redis_piece_prop = 2
                if piece["id"] in pcfixed:
                    redis_piece_prop = 1
            else:
                redis_piece_prop = pieceFromRedis.get(prop)
                redis_piece_prop = (int(redis_piece_prop) if isinstance(
                    redis_piece_prop, str) else redis_piece_prop)
            if redis_piece_prop != piece[colname]:
                current_app.logger.debug("{} has {} changes. {} != {}".format(
                    piece["id"], colname, redis_piece_prop, piece[colname]))
                piece[colname] = redis_piece_prop
                has_changes = True

        if has_changes:
            changed_pieces.append(piece)

    if len(changed_pieces) != 0:
        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/pieces/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle_data["puzzle_id"],
            ),
            json={"piece_properties": changed_pieces},
        )
        if r.status_code != 200:
            raise Exception(
                "Puzzle pieces api error. Failed to patch pieces. {}".format(
                    r))

    if cleanup:
        deletePieceDataFromRedis(redis_connection, puzzle, all_pieces)

    cur.close()

    if not skip_status_update:
        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle_data["puzzle_id"],
            ),
            json={
                "status": puzzle_previous_status,
            },
        )
        if r.status_code != 200:
            # TODO: Raise an error here and let the caller decide how to handle it.
            current_app.logger.warning("Puzzle details api error")
            return
コード例 #19
0
def main():
    config_file = "site.cfg"
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    app = make_app(config=config_file,
                   cookie_secret=cookie_secret,
                   database_writable=True)

    logger.setLevel(logging.DEBUG if config["DEBUG"] else logging.INFO)

    with app.app_context():
        cur = db.cursor()

        # Always create the PuzzleMassive table in case it doesn't exist.
        cur.execute(read_query_file("create_table_puzzle_massive.sql"))
        db.commit()

        script_file = sys.argv[0]

        migrate_scripts = glob(
            f"{os.path.dirname(script_file)}/migrate_puzzle_massive_database_version_[0-9][0-9][0-9].py"
        )
        if len(migrate_scripts) == 0:
            logger.warning(
                f"No migrate scripts found for glob: '{os.path.dirname(script_file)}/migrate_puzzle_massive_database_version_[0-9][0-9][0-9].py'"
            )
            cur.close()
            sys.exit(0)

        next_migrate_script = get_next_migrate_script(migrate_scripts)
        sanity_count = 0
        while next_migrate_script:
            version = version_number(next_migrate_script)
            logger.info(
                f"Executing {next_migrate_script} to migrate from PuzzleMassive database version {version}."
            )
            logger.debug(f"sanity count {sanity_count}")
            sanity_count = sanity_count + 1

            # Execute the next_migrate_script
            try:
                output = subprocess.run([sys.executable, next_migrate_script],
                                        check=True,
                                        capture_output=True)
            except subprocess.CalledProcessError as err:
                logger.debug(str(err))
                logger.error(f"Failed when executing {next_migrate_script}.")
                logger.info(f"\n{err.stdout.decode()}\n")
                logger.error(f"\n{err.stderr.decode()}\n")
                cur.close()
                sys.exit(1)
            logger.info(f"\n{output.stdout.decode()}\n")
            logger.info(f"\n{output.stderr.decode()}\n")

            # Bump the database_version assuming that the migrate script was
            # successful.
            now = datetime.datetime.utcnow().isoformat()
            logger.info(
                f"Successfully executed {next_migrate_script} and will now update database_version to be {version + 1}."
            )
            cur.execute(
                read_query_file("upsert_puzzle_massive.sql"), {
                    "key": "database_version",
                    "label": "Database Version",
                    "description":
                    f"Puzzle Massive Database version updated on {now}. Only update this via the {script_file}",
                    "intvalue": version + 1,
                    "textvalue": None,
                    "blobvalue": None
                })
            db.commit()

            next_migrate_script = get_next_migrate_script(migrate_scripts)
            if sanity_count > len(migrate_scripts):
                logger.error(
                    "Exiting out of while loop for checking next migrate scripts."
                )
                break
        else:
            logger.info("PuzzleMassive database version is up to date.")

        cur.close()
コード例 #20
0
ファイル: scheduler.py プロジェクト: b0r1ngx/puzzle-massive
    def do_task(self):
        super().do_task()
        made_change = False

        cur = db.cursor()

        puzzle = redis_connection.spop("batchpuzzle")
        while puzzle:
            last_batch = redis_connection.zrangebyscore(
                "timeline:{puzzle}".format(puzzle=puzzle),
                self.last_run,
                "+inf",
                withscores=True,
            )
            for (user, update_timestamp) in last_batch:
                current_app.logger.debug(
                    "user: {user}, {update_timestamp}".format(
                        user=user, update_timestamp=update_timestamp))
                user = int(user)
                points = int(
                    redis_connection.getset(
                        "batchpoints:{puzzle}:{user}".format(puzzle=puzzle,
                                                             user=user),
                        value=0,
                    ) or "0")
                redis_connection.expire(
                    "batchpoints:{puzzle}:{user}".format(puzzle=puzzle,
                                                         user=user), DAY)
                if points != 0:
                    result = cur.execute(
                        fetch_query_string("select-all-from-puzzle-by-id.sql"),
                        {
                            "puzzle": puzzle
                        },
                    ).fetchall()
                    if not result:
                        current_app.logger.warn(
                            "no puzzle details found for puzzle {}".format(
                                puzzle))
                        continue
                    (result, col_names) = rowify(result, cur.description)
                    puzzle_data = result[0]
                    puzzle_id = puzzle_data["puzzle_id"]

                    timestamp = strftime("%Y-%m-%d %H:%M:%S",
                                         gmtime(update_timestamp))
                    current_app.logger.debug(
                        "{timestamp} - bumping {points} points on {puzzle} ({puzzle_id}) for player: {player}"
                        .format(
                            puzzle=puzzle,
                            puzzle_id=puzzle_id,
                            player=user,
                            points=points,
                            timestamp=timestamp,
                        ))

                    r = requests.post(
                        "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/timeline/"
                        .format(
                            HOSTAPI=current_app.config["HOSTAPI"],
                            PORTAPI=current_app.config["PORTAPI"],
                            puzzle_id=puzzle_id,
                        ),
                        json={
                            "player": user,
                            "points": points,
                            "timestamp": timestamp
                        },
                    )
                    if r.status_code != 200:
                        current_app.logger.warning(
                            "Puzzle timeline api error. Could not add batchpoints. Skipping {puzzle_id}"
                            .format(puzzle_id=puzzle_id, ))
                        continue

                made_change = True
            puzzle = redis_connection.spop("batchpuzzle")

        if self.first_run:
            result = cur.execute(
                read_query_file(
                    "get_list_of_puzzles_in_timeline.sql")).fetchall()
            if result and len(result):
                puzzle_list = list(map(lambda x: x[0], result))
                for puzzle in puzzle_list:
                    result = cur.execute(
                        read_query_file(
                            "select_user_score_and_timestamp_per_puzzle.sql"),
                        {
                            "puzzle": puzzle
                        },
                    ).fetchall()
                    if result and len(result):
                        current_app.logger.info(
                            "Set puzzle ({0}) score and puzzle timeline on {1} players"
                            .format(puzzle, len(result)))
                        user_score = dict(map(lambda x: [x[0], x[1]], result))
                        user_timestamps = dict(
                            map(lambda x: [x[0], int(x[2])], result))
                        redis_connection.zadd(
                            "timeline:{puzzle}".format(puzzle=puzzle),
                            user_timestamps)
                        redis_connection.zadd(
                            "score:{puzzle}".format(puzzle=puzzle), user_score)
                made_change = True

            self.first_run = False

        self.last_run = int(time())

        if made_change:
            self.log_task()

        cur.close()
コード例 #21
0
ファイル: upload.py プロジェクト: b0r1ngx/puzzle-massive
    def add_puzzle(self, data):
        cur = db.cursor()

        # Don't use unsplash description if puzzle already has one
        description = (self.description if self.description else escape(
            data.get("description", None)))

        puzzle_dir = os.path.join(self.puzzle_resources, self.puzzle_id)
        filename = os.path.join(puzzle_dir, "original.jpg")
        f = open(filename, "w+b")

        links = data.get("links")
        if not links:
            raise Exception("Unsplash returned no links")
        download = links.get("download")
        if not download:
            raise Exception("Unsplash returned no download")
        r = requests.get(download)
        f.write(r.content)
        f.close()

        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=self.app.config["HOSTAPI"],
                PORTAPI=self.app.config["PORTAPI"],
                puzzle_id=self.puzzle_id,
            ),
            json={
                "link": "",
                "description": description
            },
        )
        if r.status_code != 200:
            raise Exception(
                "Puzzle details api error when setting link and description on unsplash photo upload {}"
                .format(self.puzzle_id))

        puzzle = rowify(
            cur.execute(
                read_query_file("select_puzzle_id_by_puzzle_id.sql"),
                {
                    "puzzle_id": self.puzzle_id
                },
            ).fetchall(),
            cur.description,
        )[0][0]["puzzle"]

        # Set preview full url and fallback to small
        preview_full_url = data.get("urls",
                                    {}).get("custom",
                                            data.get("urls", {}).get("small"))
        # Use the max version to keep the image ratio and not crop it.
        preview_full_url = re.sub("fit=crop", "fit=max", preview_full_url)

        # Not using url_fix on the user.links.html since it garbles the '@'.
        r = requests.post(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
            .format(
                HOSTAPI=self.app.config["HOSTAPI"],
                PORTAPI=self.app.config["PORTAPI"],
                puzzle_id=self.puzzle_id,
                file_name="preview_full",
            ),
            json={
                "attribution": {
                    "title":
                    "Photo",
                    "author_link":
                    "{user_link}?utm_source={application_name}&utm_medium=referral"
                    .format(
                        user_link=data.get("user").get("links").get("html"),
                        application_name=self.application_name,
                    ),
                    "author_name":
                    data.get("user").get("name"),
                    "source":
                    "{photo_link}?utm_source={application_name}&utm_medium=referral"
                    .format(
                        photo_link=data.get("links").get("html"),
                        application_name=self.application_name,
                    ),
                    "license_name":
                    "unsplash",
                },
                "url": preview_full_url,
            },
        )
        if r.status_code != 200:
            raise Exception(
                "Puzzle file api error when setting attribution and url for unsplash preview_full {}"
                .format(self.puzzle_id))

        cur.close()
コード例 #22
0
config_file = sys.argv[1]
config = loadConfig(config_file)

db_file = config["SQLITE_DATABASE_URI"]
db = sqlite3.connect(db_file)

logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG if config["DEBUG"] else logging.INFO)

if __name__ == "__main__":

    cur = db.cursor()

    ## Update
    for filename in [
            "create_user_ip_index.sql",
            "create_puzzle_puzzle_id_index.sql",
            "create_piece_puzzle_index.sql",
    ]:
        for statement in read_query_file(filename).split(";"):
            try:
                cur.execute(statement)
            except sqlite3.OperationalError as err:
                # Ignore sqlite error here if the slug_name column on BitAuthor
                # has already been added.
                logger.warning(f"Ignoring sqlite error: {err}")
            db.commit()

    cur.close()
コード例 #23
0
    def fabricate_fake_puzzle(self, **kw):
        cur = self.db.cursor()

        classic_variant = cur.execute(
            read_query_file("select-puzzle-variant-id-for-slug.sql"), {
                "slug": CLASSIC
            }).fetchone()[0]

        puzzle_id = "fabricated-{}".format(str(uuid4()))
        fake_puzzle = {
            "puzzle_id": puzzle_id,
            "pieces": 16,
            "name": "abc",
            "rows": 4,
            "cols": 4,
            "piece_width": 64.0,
            "mask_width": 102.0,
            "table_width": 2000,
            "table_height": 2000,
            "name": "abc",
            "link": "http://example.com/",
            "description": "example",
            "bg_color": "gray",
            "m_date": "2016-06-24 02:59:32",
            "owner": 0,
            "queue": 2,
            "status": ACTIVE,
            "permission": PUBLIC,
        }
        fake_puzzle.update(kw)
        puzzle_id = fake_puzzle.get("puzzle_id")

        # Create puzzle dir
        puzzle_dir = os.path.join(self.app.config.get("PUZZLE_RESOURCES"),
                                  puzzle_id)
        os.mkdir(puzzle_dir)

        # Create a blank file for the original and preview_full
        blank_original_image = os.path.join(puzzle_dir, "original.jpg")
        open(blank_original_image, "a").close()
        blank_preview_full_image = os.path.join(puzzle_dir, "preview_full.jpg")
        open(blank_preview_full_image, "a").close()

        # Add puzzle to database
        cur.execute(read_query_file("insert_puzzle.sql"), fake_puzzle)
        self.db.commit()

        # Get the puzzle id
        result = rowify(
            cur.execute(
                read_query_file("select_puzzle_id_by_puzzle_id.sql"),
                {
                    "puzzle_id": puzzle_id
                },
            ).fetchall(),
            cur.description,
        )[0][0]
        puzzle = result["puzzle"]

        cur.execute(
            read_query_file("insert-puzzle-instance.sql"),
            {
                "original": puzzle,
                "instance": puzzle,
                "variant": classic_variant
            },
        )

        fake_puzzle = rowify(
            cur.execute(
                read_query_file(
                    "select-internal-puzzle-details-for-puzzle_id.sql"),
                {
                    "puzzle_id": puzzle_id
                },
            ).fetchall(),
            cur.description,
        )[0][0]

        # Add puzzle files to database
        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle": puzzle,
                "name": "original",
                "url": "/resources/{0}/original.jpg".format(
                    puzzle_id),  # Not a public file (only on admin page)
            },
        )
        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle": puzzle,
                "name": "preview_full",
                "url": "/resources/{0}/preview_full.jpg".format(puzzle_id),
            },
        )

        # Add puzzle instance
        classic_variant = cur.execute(
            read_query_file("select-puzzle-variant-id-for-slug.sql"), {
                "slug": CLASSIC
            }).fetchone()[0]
        cur.execute(
            read_query_file("insert-puzzle-instance.sql"),
            {
                "original": puzzle,
                "instance": puzzle,
                "variant": classic_variant
            },
        )

        # Add fake piece resources
        scale = 100
        scaled_dir = os.path.join(puzzle_dir, "scale-%i" % scale)
        os.mkdir(scaled_dir)
        open(os.path.join(scaled_dir, "raster.css"), "a").close()
        open(os.path.join(scaled_dir, "raster.png"), "a").close()

        self.db.commit()

        piece_properties = []
        for pc in range(0, fake_puzzle["pieces"]):
            piece_properties.append({
                "id":
                pc,
                "puzzle":
                puzzle,
                "x":
                randint(0, fake_puzzle["table_width"]),
                "y":
                randint(0, fake_puzzle["table_height"]),
                "w":
                40,
                "h":
                40,
                "r":
                0,
                "adjacent":
                "",
                "rotate":
                0,
                "row":
                -1,
                "col":
                -1,
                "parent":
                None,
                "b":
                2,
                "status":
                None,
            })
        # Fake top left piece
        piece_properties[0]["status"] = 1
        piece_properties[0]["parent"] = 0
        piece_properties[0]["row"] = 0
        piece_properties[0]["col"] = 0

        def each(pieces):
            for p in pieces:
                yield p

        cur.executemany(read_query_file("insert_pieces.sql"),
                        each(piece_properties))

        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle":
                puzzle,
                "name":
                "pieces",
                "url":
                "/resources/{puzzle_id}/scale-100/raster.png".format(
                    puzzle_id=puzzle_id),
            },
        )
        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle":
                puzzle,
                "name":
                "pzz",
                "url":
                "/resources/{puzzle_id}/scale-100/raster.css?ts={timestamp}".
                format(puzzle_id=puzzle_id, timestamp=int(time.time())),
            },
        )
        self.db.commit()
        cur.close()
        return fake_puzzle, piece_properties
コード例 #24
0
def render(*args):
    """
    Render any puzzles that are in the render queue.
    Each puzzle should exist in the Puzzle db with the IN_RENDER_QUEUE or REBUILD status
    and have an original.jpg file.
    """
    # Delete old piece properties if existing
    # Delete old PuzzleFile for name if existing
    # TODO: update preview image in PuzzleFile?
    cur = db.cursor()

    for puzzle in args:
        current_app.logger.info(
            "Rendering puzzle: {puzzle_id}".format(**puzzle))

        result = cur.execute(
            read_query_file(
                "select-internal-puzzle-details-for-puzzle_id.sql"),
            {
                "puzzle_id": puzzle["puzzle_id"]
            },
        ).fetchall()
        if not result:
            current_app.logger.info(
                "Puzzle {puzzle_id} not available; skipping.".format(**puzzle))
            continue

        puzzle_data = rowify(
            result,
            cur.description,
        )[0][0]
        if puzzle_data["status"] not in (IN_RENDER_QUEUE, REBUILD):
            current_app.logger.info(
                "Puzzle {puzzle_id} no longer in rendering status; skipping.".
                format(**puzzle))
            continue

        # Set the status of the puzzle to rendering
        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle["puzzle_id"],
            ),
            json={"status": RENDERING},
        )
        if r.status_code != 200:
            raise Exception("Puzzle details api error")

        result = cur.execute(
            read_query_file("get_original_puzzle_id_from_puzzle_instance.sql"),
            {
                "puzzle": puzzle["id"]
            },
        ).fetchone()
        if not result:
            print("Error with puzzle instance {puzzle_id} ; skipping.".format(
                **puzzle))
            continue
        original_puzzle_id = result[0]
        puzzle_id = puzzle["puzzle_id"]
        original_puzzle_dir = os.path.join(
            current_app.config["PUZZLE_RESOURCES"], original_puzzle_id)
        puzzle_dir = os.path.join(current_app.config["PUZZLE_RESOURCES"],
                                  puzzle_id)

        # If it is being rebuilt then delete all the other resources.
        cleanup(puzzle_id, ["original.jpg", "preview_full.jpg"])

        scaled_sizes = [
            100,
        ]

        # Create the preview full if it is a new original puzzle. A puzzle is
        # considered to be 'new' if status was IN_RENDER_QUEUE and not REBUILD.
        # TODO: use requests.get to get original.jpg and run in another thread
        if original_puzzle_id == puzzle_id and puzzle[
                "status"] == IN_RENDER_QUEUE:
            im = Image.open(os.path.join(original_puzzle_dir,
                                         "original.jpg")).copy()
            im.thumbnail(size=(384, 384))
            im.save(os.path.join(puzzle_dir, "preview_full.jpg"))
            im.close()

        # TODO: get path of original.jpg via the PuzzleFile query
        # TODO: use requests.get to get original.jpg and run in another thread
        imagefile = os.path.join(original_puzzle_dir, "original.jpg")

        im = Image.open(imagefile)
        (width, height) = im.size
        im.close()

        # Scale down puzzle image to avoid have pieces too big
        # min_pixels = (MIN_PIECE_SIZE * MIN_PIECE_SIZE) * int(puzzle['pieces'])
        max_pixels = min(MAX_PIXELS, (MAX_PIECE_SIZE * MAX_PIECE_SIZE) *
                         int(puzzle["pieces"]))
        im_pixels = width * height
        if im_pixels > max_pixels:
            resizedimagefile = os.path.join(puzzle_dir, "resized-original.jpg")
            # The image is too big which would create piece sizes larger then the MAX_PIECE_SIZE
            # resize the image using image magick @
            subprocess.call([
                "convert",
                imagefile,
                "-resize",
                "{0}@".format(max_pixels),
                "-strip",
                "-quality",
                "85%",
                resizedimagefile,
            ])
            im = Image.open(resizedimagefile)
            (width, height) = im.size
            im_pixels = width * height
            imagefile = resizedimagefile
            im.close()

        # Create svg lines
        jpc = JigsawPieceClipsSVG(
            width=width,
            height=height,
            pieces=int(puzzle["pieces"]),
            minimum_piece_size=MIN_PIECE_SIZE,
        )
        svgfile = os.path.join(puzzle_dir, "lines.svg")
        f = open(svgfile, "w")
        f.write(jpc.svg())
        f.close()

        # Create pieces
        piece_count = 0
        dimensions = {}
        for scale in scaled_sizes:
            scale = int(scale)
            scaled_dir = os.path.join(puzzle_dir, "scale-%i" % scale)
            os.mkdir(scaled_dir)

            # max_pixels is 0 to prevent resizing, since this is handled before creating piece clips svg
            # Skip creating the svg files for each piece by setting vector to False (too memory intensive)
            pieces = Pieces(svgfile,
                            imagefile,
                            scaled_dir,
                            scale=scale,
                            max_pixels=0,
                            vector=False)

            pieces.cut()

            pieces.generate_resources()

            piece_count = len(pieces.pieces)
            piece_bboxes = pieces.pieces
            dimensions[scale] = {
                "width": pieces.width,
                "height": pieces.height,
                "table_width": int(pieces.width * 2.5),
                "table_height": int(pieces.height * 2.5),
                "board_url": "puzzle_board-%s.html" % scale,
            }

        tw = dimensions[100]["table_width"]
        th = dimensions[100]["table_height"]

        # Update the table width and height, set the new piece count
        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle["puzzle_id"],
            ),
            json={
                "pieces": piece_count,
                "table_width": tw,
                "table_height": th,
            },
        )
        if r.status_code != 200:
            raise Exception("Puzzle details api error")

        # Update the css file with dimensions for puzzle outline
        cssfile = open(os.path.join(puzzle_dir, "scale-100", "raster.css"),
                       "a")
        cssfile.write(
            "[id=puzzle-outline]{{width:{width}px;height:{height}px;left:{left}px;top:{top}px;}}"
            .format(
                width=pieces.width,
                height=pieces.height,
                left=int(round(old_div((tw - pieces.width), 2))),
                top=int(round(old_div((th - pieces.height), 2))),
            ))
        cssfile.close()

        # Get the top left piece by checking the bounding boxes
        top_left_piece = "0"
        minLeft = piece_bboxes[top_left_piece][0]
        minTop = piece_bboxes[top_left_piece][1]
        for key in list(piece_bboxes.keys()):
            if piece_bboxes[key][0] <= minLeft and piece_bboxes[key][
                    1] <= minTop:
                top_left_piece = key
                minLeft = piece_bboxes[key][0]
                minTop = piece_bboxes[key][1]
        top_left_piece = int(top_left_piece)

        piece_properties = []
        for i in range(0, piece_count):

            piece_properties.append({
                "id":
                i,
                "puzzle":
                puzzle["id"],
                "x":
                randint(0, tw),
                "y":
                randint(0, th),
                "w":
                piece_bboxes[str(i)][2] - piece_bboxes[str(i)][0],
                "h":
                piece_bboxes[str(i)][3] - piece_bboxes[str(i)][1],
                "r":
                0,  # mutable rotation of piece
                "rotate":
                0,  # immutable piece orientation
                "row":
                -1,  # deprecated
                "col":
                -1,  # deprecated
                # "s": 0,  # side
                "parent":
                None,  # parent
                "b":
                2,  # TODO: will need to be either 0 for dark or 1 for light
                "status":
                None,
            })

        # Set the top left piece to the top left corner and make it immovable
        piece_properties[top_left_piece]["x"] = int(
            round(old_div((tw - pieces.width), 2)))
        piece_properties[top_left_piece]["y"] = int(
            round(old_div((th - pieces.height), 2)))
        piece_properties[top_left_piece]["status"] = 1
        piece_properties[top_left_piece]["parent"] = top_left_piece
        # set row and col for finding the top left piece again after reset of puzzle
        piece_properties[top_left_piece]["row"] = 0
        piece_properties[top_left_piece]["col"] = 0

        # create index.json
        data = {
            "version": "alpha",
            "generator": "piecemaker",
            "scaled": scaled_sizes,
            "sides": [0],
            "piece_count": piece_count,
            "image_author": "none",
            "image_link": "none",
            "image_title": "none",
            "image_description": "none",
            "puzzle_author": "yup",
            "puzzle_link": "yup",
            "scaled_dimensions": dimensions,
            "piece_properties": piece_properties,
        }
        f = open(os.path.join(puzzle_dir, "index.json"), "w")
        json.dump(data, f)
        f.close()

        # Create adjacent pieces
        adjacent_pieces = None
        if (
                False
        ):  # TODO: Use the overlapping masks approach when using a custom cut lines
            first_scaled_dir = os.path.join(puzzle_dir,
                                            "scale-%i" % scaled_sizes[0])
            adjacent = Adjacent(first_scaled_dir, by_overlap=True)
            adjacent_pieces = adjacent.adjacent_pieces
        else:  # Find adjacent pieces by bounding boxes only and skip corners
            first_scaled_dir = os.path.join(puzzle_dir,
                                            "scale-%i" % scaled_sizes[0])
            adjacent = Adjacent(first_scaled_dir, by_overlap=False)
            adjacent_pieces = adjacent.adjacent_pieces
            filtered_adjacent_pieces = {}

            # filter out the corner adjacent pieces
            for target_id, target_adjacent_list in list(
                    adjacent_pieces.items()):
                target_bbox = piece_bboxes[target_id]  # [0, 0, 499, 500]
                target_center_x = target_bbox[0] + int(
                    round(old_div((target_bbox[2] - target_bbox[0]), 2)))
                target_center_y = target_bbox[1] + int(
                    round(old_div((target_bbox[3] - target_bbox[1]), 2)))
                filtered_adjacent_list = []
                for adjacent_id in target_adjacent_list:
                    adjacent_bbox = piece_bboxes[
                        adjacent_id]  # [0, 347, 645, 996]
                    left = (adjacent_bbox[0] < target_center_x) and (
                        adjacent_bbox[2] < target_center_x)
                    top = (adjacent_bbox[1] < target_center_y) and (
                        adjacent_bbox[3] < target_center_y)
                    right = (adjacent_bbox[0] > target_center_x) and (
                        adjacent_bbox[2] > target_center_x)
                    bottom = (adjacent_bbox[1] > target_center_y) and (
                        adjacent_bbox[3] > target_center_y)

                    if ((top and left) or (top and right) or (bottom and left)
                            or (bottom and right)):
                        loc = []
                        if top and left:
                            loc.append("top left")
                        if top and right:
                            loc.append("top right")
                        if bottom and left:
                            loc.append("bottom left")
                        if bottom and right:
                            loc.append("bottom right")
                        # print("adjacent piece: {0} is {2} corner piece of {1}".format(adjacent_id, target_id, loc))
                        # print("adjacent bbox: {0}".format(adjacent_bbox))
                        # print("target bbox: {0}".format(target_bbox))
                    else:
                        filtered_adjacent_list.append(adjacent_id)

                filtered_adjacent_pieces[target_id] = filtered_adjacent_list
            adjacent_pieces = filtered_adjacent_pieces
            # print(filtered_adjacent_pieces)
            # for f, g in filtered_adjacent_pieces.items():
            #    print("{0} with {1} adjacent pieces: {2}".format(f, len(g), g))

        f = open(os.path.join(puzzle_dir, "adjacent.json"), "w")
        json.dump(adjacent_pieces, f)
        f.close()

        # Create adjacent offsets for the scale
        for pc in piece_properties:
            origin_x = piece_bboxes[str(pc["id"])][0]
            origin_y = piece_bboxes[str(pc["id"])][1]
            offsets = {}
            for adj_pc in adjacent_pieces[str(pc["id"])]:
                x = piece_bboxes[adj_pc][0] - origin_x
                y = piece_bboxes[adj_pc][1] - origin_y
                offsets[adj_pc] = "{x},{y}".format(x=x, y=y)
            adjacent_str = " ".join(
                map(
                    lambda k, v: "{0}:{1}".format(k, v),
                    list(offsets.keys()),
                    list(offsets.values()),
                ))
            pc["adjacent"] = adjacent_str

        # The original.jpg is assumed to be available locally because of migratePuzzleFile.py
        # Clear out any older pieces and their puzzle files, (raster.png,
        # raster.css) but keep preview full.
        r = requests.delete(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/pieces/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle["puzzle_id"],
            ))
        if r.status_code != 200:
            raise Exception(
                "Puzzle pieces api error when deleting pieces for puzzle {}".
                format(puzzle["puzzle_id"]))

        for name in [
                "pieces",
                "pzz",
        ]:
            r = requests.delete(
                "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
                .format(
                    HOSTAPI=current_app.config["HOSTAPI"],
                    PORTAPI=current_app.config["PORTAPI"],
                    puzzle_id=puzzle["puzzle_id"],
                    file_name=name,
                ), )
            if r.status_code != 200:
                raise Exception(
                    "Puzzle file api error when deleting file '{}' for puzzle {}"
                    .format(name, puzzle["puzzle_id"]))

        # Commit the piece properties and puzzle resources
        # row and col are really only useful for determining the top left piece when resetting puzzle
        r = requests.post(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/pieces/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle["puzzle_id"],
            ),
            json={"piece_properties": piece_properties},
        )
        if r.status_code != 200:
            raise Exception(
                "Puzzle pieces api error. Failed to post pieces. {}".format(r))

        # Update Puzzle data
        puzzleStatus = ACTIVE
        if original_puzzle_id == puzzle_id and puzzle["permission"] == PUBLIC:
            puzzleStatus = IN_QUEUE

        # TODO: if puzzle is unsplash photo then check if preview_full is still
        # reachable.  If it isn't; then run the set_lost_unsplash_photo from
        # migratePuzzleFile.

        r = requests.patch(
            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/".
            format(
                HOSTAPI=current_app.config["HOSTAPI"],
                PORTAPI=current_app.config["PORTAPI"],
                puzzle_id=puzzle["puzzle_id"],
            ),
            json={
                "status": puzzleStatus,
                "m_date": time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()),
            },
        )
        if r.status_code != 200:
            raise Exception(
                "Puzzle details api error when updating status and m_date on newly rendered puzzle"
            )

        for (name, url) in [
            (
                "pieces",
                "/resources/{puzzle_id}/scale-100/raster.png".format(
                    puzzle_id=puzzle["puzzle_id"]),
            ),
            (
                "pzz",
                "/resources/{puzzle_id}/scale-100/raster.css?ts={timestamp}".
                format(puzzle_id=puzzle["puzzle_id"],
                       timestamp=int(time.time())),
            ),
        ]:
            r = requests.post(
                "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/files/{file_name}/"
                .format(
                    HOSTAPI=current_app.config["HOSTAPI"],
                    PORTAPI=current_app.config["PORTAPI"],
                    puzzle_id=puzzle["puzzle_id"],
                    file_name=name,
                ),
                json={
                    "url": url,
                },
            )
            if r.status_code != 200:
                raise Exception(
                    "Puzzle file api error when adding file '{}' on newly rendered puzzle"
                    .format(name))

        cur.close()

        keep_list = [
            "original.jpg",
            "preview_full.jpg",
            "resized-original.jpg",
            "scale-100",
            "raster.css",
            "raster.png",
        ]
        cleanup(puzzle["puzzle_id"], keep_list)
コード例 #25
0
def generate_puzzles(count=1,
                     size="180x180!",
                     min_pieces=0,
                     max_pieces=9,
                     user=3):

    cur = db.cursor()
    for index in range(count):
        link = ""
        description = ""
        bg_color = "#444444"
        permission = PUBLIC
        if min_pieces:
            pieces = randint(min_pieces, max_pieces)
        else:
            pieces = max_pieces
        filename = "random-{}.png".format(str(uuid4()))
        d = time.strftime("%Y_%m_%d.%H_%M_%S", time.localtime())
        puzzle_id = "random-{}".format(
            hashlib.sha224(bytes("%s%s" % (filename, d),
                                 "utf-8")).hexdigest()[0:30])

        # Create puzzle dir
        puzzle_dir = os.path.join(current_app.config.get("PUZZLE_RESOURCES"),
                                  puzzle_id)
        os.mkdir(puzzle_dir)

        # Create random image
        file_path = os.path.join(puzzle_dir, filename)
        subprocess.check_call(
            ["convert", "-size", "200x150", "plasma:fractal", file_path])
        subprocess.check_call([
            "convert",
            file_path,
            "-paint",
            "10",
            "-blur",
            "0x5",
            "-paint",
            "10",
            "-filter",
            "box",
            "-resize",
            size,
            "+repage",
            "-auto-level",
            "-quality",
            "85%",
            "-format",
            "jpg",
            os.path.join(puzzle_dir, "original.jpg"),
        ])
        os.unlink(file_path)

        # Insert puzzle directly to render queue instead of setting status to NEEDS_MODERATION
        d = {
            "puzzle_id": puzzle_id,
            "pieces": pieces,
            "name": filename,
            "link": link,
            "description": description,
            "bg_color": bg_color,
            "owner": user,
            "queue": QUEUE_NEW,
            "status": IN_RENDER_QUEUE,
            "permission": permission,
        }
        cur.execute(read_query_file("insert_puzzle.sql"), d)
        db.commit()

        puzzle = rowify(
            cur.execute(
                read_query_file("select_puzzle_id_by_puzzle_id.sql"),
                {
                    "puzzle_id": puzzle_id
                },
            ).fetchall(),
            cur.description,
        )[0][0]
        puzzle = puzzle["puzzle"]

        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle": puzzle,
                "name": "original",
                "url": "/resources/{0}/original.jpg".format(
                    puzzle_id),  # Not a public file (only on admin page)
            },
        )

        cur.execute(
            read_query_file("add-puzzle-file.sql"),
            {
                "puzzle": puzzle,
                "name": "preview_full",
                "url": "/resources/{0}/preview_full.jpg".format(puzzle_id),
            },
        )

        classic_variant = cur.execute(
            read_query_file("select-puzzle-variant-id-for-slug.sql"), {
                "slug": CLASSIC
            }).fetchone()[0]
        cur.execute(
            read_query_file("insert-puzzle-instance.sql"),
            {
                "original": puzzle,
                "instance": puzzle,
                "variant": classic_variant
            },
        )

        db.commit()
        print("pieces: {pieces} {puzzle_id}".format(**locals()))

    puzzles = rowify(
        cur.execute(
            read_query_file("select-puzzles-in-render-queue.sql"),
            {
                "IN_RENDER_QUEUE": IN_RENDER_QUEUE,
                "REBUILD": REBUILD
            },
        ).fetchall(),
        cur.description,
    )[0]
    print("found {0} puzzles to render".format(len(puzzles)))

    # push each puzzle to artist job queue
    for puzzle in puzzles:
        # push puzzle to artist job queue
        job = current_app.createqueue.enqueue_call(
            func="api.jobs.pieceRenderer.render",
            args=([puzzle]),
            result_ttl=0,
            timeout="24h",
        )

    cur.close()
コード例 #26
0
def generate_puzzle_instances(count=1, min_pieces=0, max_pieces=9):

    cur = db.cursor()
    for index in range(count):
        bg_color = "#444444"
        permission = PUBLIC
        if min_pieces:
            pieces = randint(min_pieces, max_pieces)
        else:
            pieces = max_pieces
        result = cur.execute(
            read_query_file(
                "select-random-player-with-available-user-puzzle-slot.sql")
        ).fetchone()[0]
        if result:
            player = result
            # select a random original puzzle

            result = cur.execute(
                read_query_file(
                    "select-random-puzzle-for-new-puzzle-instance.sql"),
                {
                    "ACTIVE": ACTIVE,
                    "IN_QUEUE": IN_QUEUE,
                    "COMPLETED": COMPLETED,
                    "FROZEN": FROZEN,
                    "REBUILD": REBUILD,
                    "IN_RENDER_QUEUE": IN_RENDER_QUEUE,
                    "RENDERING": RENDERING,
                    "PUBLIC": PUBLIC,
                },
            ).fetchall()
            if not result:
                print("no puzzle found")
                continue

            (result, col_names) = rowify(result, cur.description)
            originalPuzzleData = result[0]

            filename = "random-{}.png".format(str(uuid4()))
            d = time.strftime("%Y_%m_%d.%H_%M_%S", time.localtime())
            puzzle_id = "rnd-instance-{}".format(
                hashlib.sha224(bytes("%s%s" % (filename, d),
                                     "utf-8")).hexdigest()[0:30])

            # Create puzzle dir
            puzzle_dir = os.path.join(
                current_app.config.get("PUZZLE_RESOURCES"), puzzle_id)
            os.mkdir(puzzle_dir)

            # Insert puzzle directly to render queue
            d = {
                "puzzle_id": puzzle_id,
                "pieces": pieces,
                "name": originalPuzzleData["name"],
                "link": originalPuzzleData["link"],
                "description": originalPuzzleData["description"],
                "bg_color": bg_color,
                "owner": player,
                "queue": QUEUE_NEW,
                "status": IN_RENDER_QUEUE,
                "permission": permission,
            }
            cur.execute(
                """insert into Puzzle (
            puzzle_id,
            pieces,
            name,
            link,
            description,
            bg_color,
            owner,
            queue,
            status,
            permission) values
            (:puzzle_id,
            :pieces,
            :name,
            :link,
            :description,
            :bg_color,
            :owner,
            :queue,
            :status,
            :permission);
            """,
                d,
            )
            db.commit()

            result = cur.execute(
                "select * from Puzzle where puzzle_id = :puzzle_id;",
                {
                    "puzzle_id": puzzle_id
                },
            ).fetchall()
            if not result:
                raise Exception("no puzzle instance")

            (result, col_names) = rowify(result, cur.description)
            puzzleData = result[0]
            puzzle = puzzleData["id"]

            classic_variant = cur.execute(
                read_query_file("select-puzzle-variant-id-for-slug.sql"),
                {
                    "slug": CLASSIC
                },
            ).fetchone()[0]
            cur.execute(
                read_query_file("insert-puzzle-instance.sql"),
                {
                    "original": originalPuzzleData["id"],
                    "instance": puzzle,
                    "variant": classic_variant,
                },
            )

            cur.execute(
                read_query_file("fill-user-puzzle-slot.sql"),
                {
                    "player": player,
                    "puzzle": puzzle
                },
            )

            db.commit()
            print("pieces: {pieces} {puzzle_id}".format(**locals()))

            job = current_app.createqueue.enqueue_call(
                func="api.jobs.pieceRenderer.render",
                args=([puzzleData]),
                result_ttl=0,
                timeout="24h",
            )
    cur.close()
コード例 #27
0
    db = sqlite3.connect(db_file)

    application_name = config.get("UNSPLASH_APPLICATION_NAME")

    # TODO: Update to use sqlalchemy
    # db = create_engine(config['CHILL_DATABASE_URI'], echo=config['DEBUG'])
    cur = db.cursor()

    ## Create the new tables and populate with initial data
    query_files = list(PUZZLE_CREATE_TABLE_LIST)
    query_files.append("initial_puzzle_variant.sql")
    query_files.append("insert_initial_admin_user.sql")
    query_files.append("insert_initial_anon_user.sql")

    for file_path in query_files:
        query = read_query_file(file_path)
        for statement in query.split(";"):
            cur.execute(statement)
            db.commit()

    ## Set initial licenses
    for statement in read_query_file("initial_licenses.sql").split(";"):
        cur.execute(statement, {"application_name": application_name})
        db.commit()

    ## Set puzzle features that are enabled
    puzzle_features = config.get("PUZZLE_FEATURES", set())
    print(f"Enabling puzzle features: {puzzle_features}")
    for query_file in puzzle_features_init_list(puzzle_features):
        cur.execute(read_query_file(query_file))
        db.commit()
コード例 #28
0
def find_puzzles_in_database(results={}):
    """
    """
    _results = results.copy()
    cur = db.cursor()

    (puzzles_in_database, col_names) = rowify(
        cur.execute(
            read_query_file("select_all_puzzles_with_rendered_pieces.sql"),
        ).fetchall(),
        cur.description,
    )
    if not puzzles_in_database:
        # no matching puzzles found
        return _results

    for puzzle_data in puzzles_in_database:
        puzzle = puzzle_data["id"]
        test_result = _results.get(
            puzzle,
            {
                "puzzle": puzzle,
                "puzzle_id": puzzle_data["puzzle_id"],
                "msg": "",
                "test": [],
            },
        )
        test_result["test"].append("database")
        _results.update({puzzle: test_result})

        # TODO: Check piece data for this puzzle to see if pieces that are
        # immovable have the same parent as top left piece.
        (immovable_pieces, col_names) = rowify(
            cur.execute(
                read_query_file(
                    "select_immovable_piece_groups_for_puzzle.sql"),
                {
                    "puzzle": puzzle
                },
            ).fetchall(),
            cur.description,
        )

        # Fail if no immovable piece groups
        if not immovable_pieces:
            test_result[
                "msg"] = "{msg} {puzzle_id} {puzzle} no immovable piece groups found in database".format(
                    msg=test_result.get("msg", ""),
                    puzzle_id=puzzle_data["puzzle_id"],
                    puzzle=puzzle,
                )
            test_result["status"] = "fail"
            test_result["reason"] = "fail_no_immovable_piece_groups"

        # Fail if more than one immovable piece group
        if len(immovable_pieces) > 1:
            test_result[
                "msg"] = "{msg} {puzzle_id} {puzzle} multiple immovable piece groups found in database".format(
                    msg=test_result.get("msg", ""),
                    puzzle_id=puzzle_data["puzzle_id"],
                    puzzle=puzzle,
                )
            test_result["status"] = "fail"
            test_result["reason"] = "fail_multiple_immovable_piece_groups"

        # Pass if only one immovable piece group found
        if len(immovable_pieces) == 1:
            test_result[
                "msg"] = "{msg} {puzzle_id} {puzzle} single immovable piece group found in database".format(
                    msg=test_result.get("msg", ""),
                    puzzle_id=puzzle_data["puzzle_id"],
                    puzzle=puzzle,
                )
            test_result["status"] = "pass"
            test_result["reason"] = "pass"

    return _results
コード例 #29
0
def generate_users(count):
    def generate_name(user_id):
        # TODO: Use generated names from https://www.name-generator.org.uk/
        return "Random Name for " + str(user_id)

    cur = db.cursor()

    for index in range(count):
        ip = ".".join(map(lambda x: str(randint(0, 255)), range(4)))
        score = randint(0, 15000)
        login = generate_user_login()
        query = """insert into User (points, score, login, m_date, ip) values
                (:points, :score, :login, datetime('now'), :ip)"""
        cur.execute(
            query,
            {
                "ip": ip,
                "login": login,
                "points": current_app.config["NEW_USER_STARTING_POINTS"],
                "score": score,
            },
        )
        result = cur.execute(QUERY_USER_ID_BY_LOGIN, {
            "ip": ip,
            "login": login
        }).fetchall()
        (result, col_names) = rowify(result, cur.description)
        user_id = result[0]["id"]

        # Claim a random bit icon
        cur.execute(read_query_file("claim_random_bit_icon.sql"),
                    {"user": user_id})

        # Randomly Add slots
        for chance in range(randint(0, 1)):
            slotcount = randint(1, 6)
            if slotcount == 6:
                slotcount = randint(1, 50)
            if slotcount == 50:
                slotcount = randint(50, 250)
            for slot in range(slotcount):
                cur.execute(read_query_file("add-new-user-puzzle-slot.sql"),
                            {"player": user_id})

        # Randomly assign player names
        chance_for_name = randint(0, 5)
        if chance_for_name == 5:
            display_name = generate_name(user_id)
            name = display_name.lower()
            cur.execute(
                read_query_file(
                    "add-user-name-on-name-register-for-player-to-be-reviewed.sql"
                ),
                {
                    "player_id": user_id,
                    "name": name,
                    "display_name": display_name,
                    "time": "+{} minutes".format(randint(1, 60)),
                },
            )

    cur.close()
    db.commit()
コード例 #30
0
ファイル: scheduler.py プロジェクト: b0r1ngx/puzzle-massive
    def do_task(self):
        super().do_task()
        made_change = False

        cur = db.cursor()

        result = cur.execute(
            fetch_query_string(
                "select-active-public-puzzles-due-for-retirement.sql")
        ).fetchall()
        if result:
            for item in result:
                puzzle_id = item[0]
                current_app.logger.debug(
                    "{} has been inactive for more than 7 days".format(
                        puzzle_id))
                r = requests.patch(
                    "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/"
                    .format(
                        HOSTAPI=current_app.config["HOSTAPI"],
                        PORTAPI=current_app.config["PORTAPI"],
                        puzzle_id=puzzle_id,
                    ),
                    json={
                        "status": IN_QUEUE,
                        "queue": QUEUE_INACTIVE
                    },
                )
                if r.status_code != 200:
                    current_app.logger.warning(
                        "Puzzle details api error. Could not update puzzle m_date to {m_date}. Skipping {puzzle_id}"
                        .format(
                            m_date=m_date,
                            puzzle_id=puzzle_data["puzzle_id"],
                        ))
                    continue
                made_change = True

        # select all ACTIVE puzzles within each skill range
        skill_range_active_count = 2
        for (low, high) in SKILL_LEVEL_RANGES:
            result = cur.execute(
                read_query_file("count-active-puzzles-within-skill-range.sql"),
                {
                    "low": low,
                    "high": high
                },
            ).fetchone()
            if result == None or result[0] < skill_range_active_count:
                result = cur.execute(
                    fetch_query_string(
                        "select-puzzle-next-in-queue-to-be-active.sql"),
                    {
                        "low": low,
                        "high": high,
                        "active_count": skill_range_active_count,
                    },
                ).fetchall()
                if result:
                    current_app.logger.debug(
                        "Bump next puzzle in queue to be active for skill level range {low}, {high}"
                        .format(low=low, high=high))
                    # This use to be 4 days in the past, now uses present time.
                    m_date_now = strftime("%Y-%m-%d %H:%M:%S", gmtime(time()))
                    for item in result:
                        puzzle_id = item[0]
                        current_app.logger.debug(
                            "{} is next in queue to be active".format(
                                puzzle_id))
                        r = requests.patch(
                            "http://{HOSTAPI}:{PORTAPI}/internal/puzzle/{puzzle_id}/details/"
                            .format(
                                HOSTAPI=current_app.config["HOSTAPI"],
                                PORTAPI=current_app.config["PORTAPI"],
                                puzzle_id=puzzle_id,
                            ),
                            json={
                                "status": ACTIVE,
                                "m_date": m_date_now
                            },
                        )
                        if r.status_code != 200:
                            current_app.logger.warning(
                                "Puzzle details api error. Could not update puzzle m_date to {m_date} and status to active. Skipping {puzzle_id}"
                                .format(
                                    m_date=m_date_now,
                                    puzzle_id=puzzle_data["puzzle_id"],
                                ))
                            continue
                        made_change = True

        if made_change:
            self.log_task()

        cur.close()