def __init__(self, game_id):
     self.game_id = game_id
     logging.basicConfig(level=logging.DEBUG,
                         filename='app.log',
                         filemode='w',
                         format='%(name)s - %(levelname)s - %(message)s')
     self.config = configuration()
     print(self.config)
     self.api = api.Api(api_key=self.config.lichess.api_key)
     self.stream = self.api.stream_board_state(self.game_id)
     self.tui = terminal.GameUi(config=self.config,
                                stream=self.stream,
                                game_id=self.game_id,
                                api=self.api)
Exemple #2
0
import flask
import logging

# Our modules
from src.letterbag import LetterBag
from src.vocab import Vocab
from src.jumble import jumbled
import src.config as config

###
# Globals
###
app = flask.Flask(__name__)

CONFIG = config.configuration()
app.secret_key = CONFIG.SECRET_KEY  # Should allow using session variables

#
# One shared 'Vocab' object, read-only after initialization,
# shared by all threads and instances.  Otherwise we would have to
# store it in the browser and transmit it on each request/response cycle,
# or else read it from the file on each request/responce cycle,
# neither of which would be suitable for responding keystroke by keystroke.

WORDS = Vocab(CONFIG.VOCAB)

###
# Pages
###
Exemple #3
0
import requests
import logging
import time

from bs4 import BeautifulSoup
from src import config
conf = config.configuration()


def get_tor_site_source(uri: str) -> dict:
    """
    Extract HTML source from a uri
    """
    timeout = {"source": "timeout", "title": "timeout"}
    try:

        if "http" not in uri:
            uri = f"http://{uri}"

        # using Polipo port for the socks proxt to TOR
        proxy = {"http": "[email protected]:8123", "https": "[email protected]:8123"}
        headers = {"user-agent": "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0"}

        if conf.use_proxy:
            r = requests.get(uri, headers=headers, proxies=proxy, timeout=10)
        else:
            r = requests.get(uri, headers=headers, timeout=10)

        soup = BeautifulSoup(r.text, "html.parser")

        try: