def main():
    config_file = "site.cfg"
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    app = make_app(config=config_file,
                   cookie_secret=cookie_secret,
                   database_writable=True)

    logger.setLevel(logging.DEBUG if config["DEBUG"] else logging.INFO)

    with app.app_context():
        migrate(config)
Esempio n. 2
0
def main():
    args = docopt(__doc__)
    config_file = args["--config"]
    show_list = args.get("--list")
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    app = make_app(config=config_file, cookie_secret=cookie_secret)

    with app.app_context():
        if not show_list:
            render_all()
        else:
            list_all()
Esempio n. 3
0
    def __init__(self, puzzle_id, photo, description, config_file,
                 cookie_secret):
        threading.Thread.__init__(self)
        self.puzzle_id = puzzle_id
        self.photo = photo
        self.description = description

        self.app = make_app(config=config_file, cookie_secret=cookie_secret)

        self.application_id = (
            self.app.config.get("UNSPLASH_APPLICATION_ID"), )
        self.puzzle_resources = self.app.config.get("PUZZLE_RESOURCES")
        self.application_name = self.app.config.get(
            "UNSPLASH_APPLICATION_NAME")
Esempio n. 4
0
def main():
    args = docopt(__doc__, version="0.0")

    config_file = args["--config"]
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")

    count = int(args.get("--count"))
    size = args.get("--size")
    max_pieces = int(args.get("--pieces"))
    min_pieces = int(args.get("--min-pieces"))
    puzzles = args.get("--puzzles")

    app = make_app(config=config_file,
                   cookie_secret=cookie_secret,
                   database_writable=True)

    with app.app_context():
        if args.get("players"):
            print("Creating {} players".format(count))
            generate_users(count)

        elif args.get("puzzles"):
            print(
                "Creating {count} puzzles at {size} with up to {max_pieces} pieces"
                .format(count=count,
                        size=size,
                        max_pieces=max_pieces,
                        min_pieces=min_pieces))
            generate_puzzles(count=count,
                             size=size,
                             min_pieces=min_pieces,
                             max_pieces=max_pieces)

        elif args.get("instances"):
            print(
                "Creating {count} puzzle instances with up to {max_pieces} pieces"
                .format(count=count,
                        max_pieces=max_pieces,
                        min_pieces=min_pieces))
            generate_puzzle_instances(count=count,
                                      min_pieces=min_pieces,
                                      max_pieces=max_pieces)

        elif args.get("activity"):
            print("Simulating puzzle activity")
            puzzle_ids = []
            if puzzles:
                puzzle_ids = puzzles.split(",")
            simulate_puzzle_activity(puzzle_ids, count=count)
Esempio n. 5
0
def main():
    ""
    args = docopt(__doc__)
    config_file = args["--config"]
    task_name = args.get("--task")
    show_list = args.get("--list")

    if task_name:
        OneOffTask = globals().get(task_name)
        if not issubclass(OneOffTask, Task):
            print("{} is not a task in the list".format(task_name))
            return

    if show_list:
        for item in globals():
            Item = globals().get(item)
            if isinstance(Item, type) and issubclass(Item, Task):
                print(item)
        return

    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")

    app = make_app(
        config=config_file,
        cookie_secret=cookie_secret,
    )

    with app.app_context():
        # Check if running a one-off, otherwise just run main
        if task_name:
            OneOffTask = globals().get(task_name)
            if issubclass(OneOffTask, Task):
                # Run the task
                oneOffTask = OneOffTask()
                oneOffTask()

        else:
            try:
                current_app.logger.info(
                    "Delaying start of initial task by 20 seconds.")
                sleep(20)
                all_tasks()
            except requests.exceptions.ConnectionError as err:
                current_app.logger.warning(
                    "Connection error. Retrying in {} seconds... \nError: {}".
                    format(SCHEDULER_RETRY_INTERVAL, err))
                sleep(SCHEDULER_RETRY_INTERVAL)
Esempio n. 6
0
def main():
    ""
    args = docopt(__doc__)
    config_file = args["--config"]
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    redis_connection = get_redis_connection(config, decode_responses=False)
    app = make_app(config=config_file, cookie_secret=cookie_secret)

    with app.app_context():
        with Connection(redis_connection):
            worker = Worker(list(map(Queue, listen)))

            # If the render process has an exception
            worker.push_exc_handler(handle_fail)

            worker.work(with_scheduler=True)
Esempio n. 7
0
def main():
    ""
    args = docopt(__doc__)
    config_file = args["--config"]
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    redis_connection = get_redis_connection(config, decode_responses=False)
    app = make_app(config=config_file, cookie_secret=cookie_secret)

    with app.app_context():
        with Connection(redis_connection):
            worker = Worker(list(map(Queue, listen)))

            # TODO: handle exceptions
            # worker.push_exc_handler(pieceTranslate.handle_piece_error)

            worker.work()
Esempio n. 8
0
def main():
    """"""
    config_file = "site.cfg"

    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")

    args = docopt(__doc__)
    is_destructive = args["--destructive"]

    app = make_app(
        config=config_file,
        cookie_secret=cookie_secret,
    )

    with app.app_context():
        if current_app.config["LOCAL_PUZZLE_RESOURCES"]:
            move_all_from_s3(is_destructive=is_destructive)
        else:
            move_all_to_s3(is_destructive=is_destructive)
Esempio n. 9
0
    def setUp(self):
        self.tmp_db = tempfile.NamedTemporaryFile()
        self.tmp_purge_list = tempfile.NamedTemporaryFile()
        self.tmp_puzzle_resources = tempfile.mkdtemp()
        self.tmp_puzzle_archive = tempfile.mkdtemp()
        cookie_secret = "oatmeal"
        self.app = make_app(
            SQLITE_DATABASE_URI=self.tmp_db.name,
            HOSTREDIS="127.0.0.1",
            PORTREDIS=6379,
            REDIS_DB=1,
            REDIS_URL="redis://127.0.0.1:6379/1/",
            HOSTAPI="127.0.0.1",
            PORTAPI=6310,
            DEBUG=True,
            TESTING=True,  # Ignore wal journal_mode requirement
            PUZZLE_RESOURCES=self.tmp_puzzle_resources,
            PUZZLE_ARCHIVE=self.tmp_puzzle_archive,
            PURGEURLLIST=self.tmp_purge_list.name,
            MINIMUM_PIECE_COUNT=20,
            MAX_POINT_COST_FOR_REBUILDING=1000,
            MAX_POINT_COST_FOR_DELETING=1000,
            BID_COST_PER_PUZZLE=100,
            POINT_COST_FOR_CHANGING_BIT=100,
            POINT_COST_FOR_CHANGING_NAME=100,
            NEW_USER_STARTING_POINTS=1300,
            POINTS_CAP=15000,
            SECURE_COOKIE_SECRET=cookie_secret,
            cookie_secret=cookie_secret,
            database_writable=True,
        )

        self.db = db
        # TODO: set logger level to DEBUG when actively developing the tests
        self.app.logger.setLevel(logging.WARN)
        with self.app.app_context():
            with self.app.test_client() as c:
                init_db()
Esempio n. 10
0
            # print('transfer: {0}'.format(puzzle))
            transfer(puzzle)
            memory = redis_connection.info(section="memory")
            # print('used_memory: {used_memory_human}'.format(**memory))
            if memory.get("used_memory") < target_memory:
                break


def transferAll(cleanup=False):
    # Get all puzzles
    puzzles = redis_connection.zrange("pcupdates", 0, -1)
    current_app.logger.info("transferring puzzles: {0}".format(puzzles))
    for puzzle in puzzles:
        current_app.logger.info("transfer puzzle: {0}".format(puzzle))
        transfer(puzzle, cleanup=cleanup)
        memory = redis_connection.info(section="memory")
        if cleanup:
            current_app.logger.info(
                "used_memory: {used_memory_human}".format(**memory))


if __name__ == "__main__":
    args = docopt(__doc__)
    config_file = args["--config"]
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    app = make_app(config=config_file, cookie_secret=cookie_secret)

    with app.app_context():
        transferAll(args["--cleanup"])
Esempio n. 11
0
def main():
    config_file = "site.cfg"
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")
    app = make_app(config=config_file,
                   cookie_secret=cookie_secret,
                   database_writable=True)

    logger.setLevel(logging.DEBUG if config["DEBUG"] else logging.INFO)

    with app.app_context():
        cur = db.cursor()

        # Always create the PuzzleMassive table in case it doesn't exist.
        cur.execute(read_query_file("create_table_puzzle_massive.sql"))
        db.commit()

        script_file = sys.argv[0]

        migrate_scripts = glob(
            f"{os.path.dirname(script_file)}/migrate_puzzle_massive_database_version_[0-9][0-9][0-9].py"
        )
        if len(migrate_scripts) == 0:
            logger.warning(
                f"No migrate scripts found for glob: '{os.path.dirname(script_file)}/migrate_puzzle_massive_database_version_[0-9][0-9][0-9].py'"
            )
            cur.close()
            sys.exit(0)

        next_migrate_script = get_next_migrate_script(migrate_scripts)
        sanity_count = 0
        while next_migrate_script:
            version = version_number(next_migrate_script)
            logger.info(
                f"Executing {next_migrate_script} to migrate from PuzzleMassive database version {version}."
            )
            logger.debug(f"sanity count {sanity_count}")
            sanity_count = sanity_count + 1

            # Execute the next_migrate_script
            try:
                output = subprocess.run([sys.executable, next_migrate_script],
                                        check=True,
                                        capture_output=True)
            except subprocess.CalledProcessError as err:
                logger.debug(str(err))
                logger.error(f"Failed when executing {next_migrate_script}.")
                logger.info(f"\n{err.stdout.decode()}\n")
                logger.error(f"\n{err.stderr.decode()}\n")
                cur.close()
                sys.exit(1)
            logger.info(f"\n{output.stdout.decode()}\n")
            logger.info(f"\n{output.stderr.decode()}\n")

            # Bump the database_version assuming that the migrate script was
            # successful.
            now = datetime.datetime.utcnow().isoformat()
            logger.info(
                f"Successfully executed {next_migrate_script} and will now update database_version to be {version + 1}."
            )
            cur.execute(
                read_query_file("upsert_puzzle_massive.sql"), {
                    "key": "database_version",
                    "label": "Database Version",
                    "description":
                    f"Puzzle Massive Database version updated on {now}. Only update this via the {script_file}",
                    "intvalue": version + 1,
                    "textvalue": None,
                    "blobvalue": None
                })
            db.commit()

            next_migrate_script = get_next_migrate_script(migrate_scripts)
            if sanity_count > len(migrate_scripts):
                logger.error(
                    "Exiting out of while loop for checking next migrate scripts."
                )
                break
        else:
            logger.info("PuzzleMassive database version is up to date.")

        cur.close()
Esempio n. 12
0
def main():
    args = docopt(__doc__, version="0.0")

    config_file = args["--config"]
    config = loadConfig(config_file)
    cookie_secret = config.get("SECURE_COOKIE_SECRET")

    count = int(args.get("--count"))
    size = args.get("--size")
    max_pieces = int(args.get("--pieces"))
    min_pieces = int(args.get("--min-pieces"))
    puzzles = args.get("--puzzles")
    delay = float(args.get("--delay"))

    dictConfig({
        "version": 1,
        "formatters": {
            "default": {
                "format":
                "[%(asctime)s] %(levelname)s in %(module)s: %(message)s",
            }
        },
        "handlers": {
            "wsgi": {
                "class": "logging.StreamHandler",
                "stream": "ext://flask.logging.wsgi_errors_stream",
                "formatter": "default",
            }
        },
        "root": {
            "level": "INFO",
            "handlers": ["wsgi"]
        },
    })
    app = make_app(config=config_file,
                   cookie_secret=cookie_secret,
                   database_writable=True)

    with app.app_context():
        if args.get("players"):
            current_app.logger.info("Creating {} players".format(count))
            generate_users(count)

        elif args.get("puzzles"):
            current_app.logger.info(
                "Creating {count} puzzles at {size} with up to {max_pieces} pieces"
                .format(count=count,
                        size=size,
                        max_pieces=max_pieces,
                        min_pieces=min_pieces))
            generate_puzzles(count=count,
                             size=size,
                             min_pieces=min_pieces,
                             max_pieces=max_pieces)

        elif args.get("instances"):
            current_app.logger.info(
                "Creating {count} puzzle instances with up to {max_pieces} pieces"
                .format(count=count,
                        max_pieces=max_pieces,
                        min_pieces=min_pieces))
            generate_puzzle_instances(count=count,
                                      min_pieces=min_pieces,
                                      max_pieces=max_pieces)

        elif args.get("activity"):
            current_app.logger.info("Simulating puzzle activity")
            puzzle_ids = []
            if puzzles:
                puzzle_ids = puzzles.split(",")
            simulate_puzzle_activity(puzzle_ids, count=count, max_delay=delay)
Esempio n. 13
0
    def setUp(self):
        PUZZLE_PIECE_GROUPS = list(
            map(int, "100 200 400 800 1600 2200 4000 60000".split()))
        self.tmp_db = tempfile.NamedTemporaryFile()
        self.tmp_purge_list = tempfile.NamedTemporaryFile()
        self.tmp_puzzle_resources = tempfile.mkdtemp()
        self.tmp_puzzle_archive = tempfile.mkdtemp()
        cookie_secret = "oatmeal"
        self.app = make_app(
            HOST="127.0.0.1",
            HOSTNAME="legacy_puzzle_massive",
            PORT=6300,
            HOSTCACHE="127.0.0.1",
            HOSTORIGIN="127.0.0.1",
            HOSTPUBLISH="127.0.0.1",
            PORTPUBLISH=6311,
            HOSTDIVULGER="127.0.0.1",
            PORTDIVULGER=6320,
            HOSTSTREAM="127.0.0.1",
            PORTSTREAM=6321,
            CHILL_DATABASE_URI="sqlite:////var/lib/puzzle-massive/sqlite3/db",
            PUBLIC_URL_PREFIX="/site",
            ROOT_FOLDER="root",
            DOCUMENT_FOLDER="documents",
            MEDIA_FOLDER="media",
            MEDIA_PATH="/media/",
            THEME_STATIC_FOLDER="dist",
            PACKAGEJSON={
                "version": "0",
                "author": "Beaker"
            },
            VERSION="0",
            THEME_STATIC_PATH="/theme/0/",
            THEME_TEMPLATE_FOLDER="templates",
            THEME_SQL_FOLDER="queries",
            CACHE_NO_NULL_WARNING=True,
            CACHE_TYPE="null",
            FREEZER_DESTINATION="frozen",
            FREEZER_BASE_URL="http://legacy_puzzle_massive/",
            UNSPLASH_APPLICATION_ID="",
            UNSPLASH_APPLICATION_NAME="",
            UNSPLASH_SECRET="",
            SUGGEST_IMAGE_LINK="",
            ENVIRONMENT="development",
            NEW_PUZZLE_CONTRIB="rizzo",
            SMTP_HOST="",
            SMTP_PORT="",
            SMTP_USER="",
            SMTP_PASSWORD="",
            EMAIL_SENDER="",
            EMAIL_MODERATOR="",
            PUBLISH_WORKER_COUNT=2,
            STREAM_WORKER_COUNT=2,
            PUZZLE_PIECES_CACHE_TTL=20,
            MAX_RECENT_POINTS=25,
            RECENT_POINTS_EXPIRE=1209600,
            INITIAL_KARMA=10,
            MAX_KARMA=25,
            KARMA_POINTS_EXPIRE=3600,
            BLOCKEDPLAYER_EXPIRE_TIMEOUTS=list(
                map(int, "10 30 300 600 1200 2400 3600".split())),
            MAXIMUM_PIECE_COUNT=50000,
            PUZZLE_PIECE_GROUPS=PUZZLE_PIECE_GROUPS,
            ACTIVE_PUZZLES_IN_PIECE_GROUPS=list(
                map(int, "40  20  10  10  5    5    5    5".split())),
            MINIMUM_IN_QUEUE_PUZZLES_IN_PIECE_GROUPS=list(
                map(int, "6   6   2   2   1    1    1    1".split())),
            SKILL_LEVEL_RANGES=list(
                zip([0] + PUZZLE_PIECE_GROUPS, PUZZLE_PIECE_GROUPS)),
            MINIMUM_TO_CLAIM_ACCOUNT=1400,
            BIT_ICON_EXPIRATION=dict(
                map(
                    lambda x:
                    [int(x[:x.index(":")]), x[1 + x.index(":"):].strip()],
                    """
            0:    2 days,
            1:    4 days,
            50:   14 days,
            400:  1 months,
            800:  4 months,
            1600: 8 months
            """.split(","),
                )),
            PIECE_MOVE_TIMEOUT=4,
            MAX_PAUSE_PIECES_TIMEOUT=15,
            TOKEN_LOCK_TIMEOUT=5,
            TOKEN_EXPIRE_TIMEOUT=60 * 5,
            PLAYER_BIT_RECENT_ACTIVITY_TIMEOUT=10,
            PIECE_JOIN_TOLERANCE=100,
            AUTO_APPROVE_PUZZLES=True,
            LOCAL_PUZZLE_RESOURCES=True,
            CDN_BASE_URL="http://localhost:38685",
            PUZZLE_RESOURCES_BUCKET_REGION="local",
            PUZZLE_RESOURCES_BUCKET_ENDPOINT_URL=
            "http://s3fake.puzzle.massive.test:4568",
            PUZZLE_RESOURCES_BUCKET="chum",
            PUZZLE_RESOURCES_BUCKET_OBJECT_CACHE_CONTROL=
            "public, max-age:31536000, immutable",
            PUZZLE_RULES={"all"},
            PUZZLE_FEATURES={"all"},
            SHOW_OTHER_PLAYER_BITS=True,
            DOMAIN_NAME="puzzle.massive.test",
            SITE_TITLE="Test Puzzle Massive",
            HOME_PAGE_ROUTE="/chill/site/front/",
            SOURCE_CODE_LINK="https://github.com/jkenlooper/puzzle-massive/",
            M3="",
            SQLITE_DATABASE_URI=self.tmp_db.name,
            HOSTREDIS="127.0.0.1",
            PORTREDIS=6379,
            REDIS_DB=1,
            REDIS_URL="redis://127.0.0.1:6379/1/",
            HOSTAPI="127.0.0.1",
            PORTAPI=6310,
            DEBUG=True,
            TESTING=True,  # Ignore wal journal_mode requirement
            PUZZLE_RESOURCES=self.tmp_puzzle_resources,
            PUZZLE_ARCHIVE=self.tmp_puzzle_archive,
            PURGEURLLIST=self.tmp_purge_list.name,
            MINIMUM_PIECE_COUNT=20,
            MAX_POINT_COST_FOR_REBUILDING=1000,
            MAX_POINT_COST_FOR_DELETING=1000,
            BID_COST_PER_PUZZLE=100,
            POINT_COST_FOR_CHANGING_BIT=100,
            POINT_COST_FOR_CHANGING_NAME=100,
            NEW_USER_STARTING_POINTS=1300,
            POINTS_CAP=15000,
            REWARD_INSTANCE_SLOT_SCORE_THRESHOLD=0,
            SECURE_COOKIE_SECRET=cookie_secret,
            cookie_secret=cookie_secret,
            database_writable=True,
        )

        self.db = db
        # TODO: set logger level to DEBUG when actively developing the tests
        # self.app.logger.setLevel(logging.WARN)
        self.app.logger.setLevel(
            logging.DEBUG if self.debug else logging.CRITICAL)
        with self.app.app_context():
            with self.app.test_client() as c:
                init_db()