Ejemplo n.º 1
0
def train(table: str = c.DEFAULT_TABLE,
          model_name: str = c.DEFAULT_MODEL_NAME,
          learning_rate: float = c.DEFAULT_LEARNING_RATE,
          epochs: int = c.DEFAULT_EPOCHS,
          batch_size: int = c.DEFAULT_BATCH_SIZE,
          matches: int = 10,
          threshold: int = c.DEFAULT_THRESHOLD,
          data_limit: Optional[int] = 50000) -> None:
    """
    Trains the network with stored example sin the database. Before saving the new weights, the new network simulates
    a series of game vs. the old network, only accepting the new network if a certain number of matches is won.
    
    :param table: Database table were examples were stored with gen_examples(). 
    :param model_name: Determines the save folder for the weights of the neural network.
    :param learning_rate: Network learning rate.
    :param epochs: Number of training epochs.
    :param batch_size: Batch size in training.
    :param matches: Number of matches simulated to test the new network
    :param threshold: Minimum difference of wins and losses from the matches to accept the new network.   
    :param data_limit: Number of examples used to train. Examples are drawn uniformly. None for no limit.
    """
    new_net = NNet(learning_rate=learning_rate,
                   epochs=epochs,
                   batch_size=batch_size,
                   model_name=model_name)
    old_net = NNet(model_name=model_name)
    db = Connector()
    examples = _df_to_examples(db.get_data(data_limit, table))
    new_net.train(examples)
    score = _match_series(nnet1=new_net, nnet2=old_net, matches=matches)
    _evaluate_score(new_net, score, model_name, threshold)
Ejemplo n.º 2
0
def gen_examples(iterations: int = c.DEFAULT_ITERATIONS, nnet: NNet = None, table: str = c.DEFAULT_TRAINING_TABLE,
                 count_factor: float = 1.0) -> None:
    start = time.time()
    examples = _run_episode(nnet=nnet, iterations=iterations)
    for ex in copy.copy(examples):
        examples.append(_mirror_example(ex))
    print(f'generating data took {time.time() - start}s')

    start = time.time()
    db = Connector()
    db.insert_examples(examples, count_factor=count_factor, table=table)
    print(f'inserting data took {time.time() - start}s')
Ejemplo n.º 3
0
def train(table: str = c.DEFAULT_TRAINING_TABLE, model_name: str = c.DEFAULT_MODEL_NAME, matches: int = 10,
          threshold: int = c.DEFAULT_THRESHOLD, learning_rate: float = c.DEFAULT_LEARNING_RATE,
          epochs: int = c.DEFAULT_EPOCHS, batch_size: int = c.DEFAULT_BATCH_SIZE, data_limit: int = 600000) -> None:
    new_net = NNet(learning_rate=learning_rate, epochs=epochs, batch_size=batch_size, model_name=model_name)
    old_net = NNet(model_name=model_name)
    db = Connector()
    examples = db.df_to_examples(db.retrieve_data(
        query=f"SELECT * FROM {table} ORDER BY counter DESC LIMIT {data_limit};"
    ))
    new_net.train(examples)
    score = _match_series(nnet1=new_net, nnet2=old_net, matches=matches)
    _evaluate_score(new_net, score, model_name, threshold)
Ejemplo n.º 4
0
def gen_examples(randomness: float = 0.7,
                 randomness_decline: float = 0.95,
                 max_moves: int = 80,
                 table: str = c.DEFAULT_TABLE) -> None:
    """
    Generates training examples using Stockfish and stores them in a database in algebraic notation. Set up a MySQL 
    database first and set the connection in constants.py. Also make sure that Stockfish is installed correctly.

    :param table: Table the data is stored in.
    :param randomness: Starting Probability for proceeding with are random move instead of the best move. This is
        necessary to not simulate the same game each time.
    :param randomness_decline: Factor applied to the randomness with each move. Should be less than 1 to have less 
        randomness later in the game.
    :param max_moves: Stops the simulated game early to prevent too long end games.
    """
    game = Game()
    stockfish = Stockfish(c.STOCKFISH_PATH)
    examples = []
    moves = []
    for _ in range(max_moves):
        stockfish.set_position(moves)
        best_move = stockfish.get_best_move()

        value = _value(stockfish.get_evaluation())
        if best_move:
            examples.append((_truncate_fen(stockfish.get_fen_position()),
                             (best_move[:4], value)))

        if best_move and random.random() > randomness:
            move_alg = best_move
            move_tuple = _from_algebraic(move_alg)
        else:
            move_tuple = random.sample(game.game_legal_moves(), 1)[0]
            move_alg = _to_algebraic(move_tuple)

        if len(move_alg) == 5:
            print('pawn promotion')

        try:
            game.make_move(move_tuple[0], move_tuple[1])
            moves.append(move_alg)
        except ValueError:
            moves[-1] = moves[-1] + 'q'
            print(examples)

        randomness *= randomness_decline

        if game.game_winner():
            break
    db = Connector()
    db.insert_examples(examples, table)
Ejemplo n.º 5
0
def save_cache(showname):
    """save callback"""

    cache_files = glob.glob('/mnt/shows/{0}/tank/cache/path_cache*'.format(showname))
    cache_files.sort()

    if len(cache_files[-1].split('.')) > 2:
        # increment last
        prefix, suffix, incr = cache_files[-1].split('.')
        incr = int(incr) + 1
        backup_cache = ".".join([prefix, suffix, str(incr)])
    else:
        backup_cache = '/mnt/shows/{0}/tank/cache/path_cache.db.1'.format(showname)

    shutil.copy('/mnt/shows/{0}/tank/cache/path_cache.db'.format(showname), backup_cache)
    os.chown(backup_cache, 1900, 20)

    with Connector('/mnt/shows/{0}/tank/cache/path_cache.db'.format(showname)) as db:
        # this way the connection is automatically destroyed then the page has
        # been loaded.

        # get some info about our table
        db.cur.execute("PRAGMA table_info(path_cache)")
        table_info = db.cur.fetchall()
        print table_info

        # old with the old
        db.cur.execute("DELETE FROM path_cache")
        db.con.commit()

        # in with the new
        for key in sorted(request.json.keys()):
            row_data = request.json[key]

            # munge the insert query to match the schema of the table
            if len(table_info) == 6:
                query = "INSERT INTO path_cache VALUES('{type}', {id}, '{name}', 'primary', '{path}', 1)".format(**row_data)
            else:
                query = "INSERT INTO path_cache VALUES('{type}', {id}, '{name}', 'primary', '{path}')".format(**row_data)

            try:
                db.cur.execute(query)
            except sqlite3.OperationalError as e:
                print e
                print "insert into database failed: ",
            finally:
                print query

            try:
                db.con.commit()
            except sqlite3.OperationalError:
                print "unable to commit changes to database"
                return json.dumps({})

    print "Saving completed successfully"
    return json.dumps({'status':'ok'})
Ejemplo n.º 6
0
def editor(showname):
    """db_editor page"""

    with Connector('/mnt/shows/{0}/tank/cache/path_cache.db'.format(showname)) as db:
        # this way the connection is automatically destroyed then the page has
        # been loaded.
        db.cur.execute('SELECT * FROM path_cache ORDER BY entity_type, entity_name')
        data = db.cur.fetchall()

        return render_template('editor.html', fields=data, showname=showname)
Ejemplo n.º 7
0
from ezsc import ezscApplication
import asyncio 
import os 
import logging 
from aiohttp import web
import json
import time
import thereading
from db_connector import Connector
#from security import Encryption
#from security import Decryption

LOGGER = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)

connector_app = Connector()
ezsc_controller = ezscApplication()
# A function that will allow devices to join the network

class SHA: # *******SHA = Secure Home Automation **************
	'''@staticmethod 
	async def permit_join(): # why request? 
		await LOGGER.info("Permitting devices to join the network for the next 60s ...")
		permit_join_future = asyncio.create_task(ezsc_controller.permit_join())
		await permit_join_future.add_done_callback(lambda futre: LOGGER.info("devices can no longer join the network"))'''

	#*** A FUNCTION THAT HELPS GETS THE DEVICES ***** 
	@staticmethod
	async def _get_devices():  
		return  ezsc_controller.get_devices()
Ejemplo n.º 8
0
application.config['SECRET_KEY'] = SECRET_KEY

# Confirações do JWT
jwt = JWTManager(application)
application.json_encoder = JSONEncoder
application.config['JWT_SECRET_KEY'] = SECRET_KEY
application.config['JWT_ACCESS_TOKEN_EXPIRES'] = JWT_ACCESS_TOKEN_EXPIRES
application.config["JSON_SORT_KEYS"] = False

# Confirações do Bcrypt
flask_bcrypt = Bcrypt(application)

# Conectado com o banco de dados
db_connector = Connector(host=DB_CONFIG['host'],
                         database=DB_CONFIG['database'],
                         user=DB_CONFIG['user'],
                         password=DB_CONFIG['password'],
                         migrations_dir=DB_CONFIG['migrations_dir'])

Model.set_connector(db_connector)

# Deinição das rotas
Controller.set_application(application)
Controller.apply_routes(application)
Controller.set_jwt_rules(jwt)
Controller.set_bcrypt(flask_bcrypt)

# Configurações de ambiente
Environment.set_environment(ENV)
Environment.setup(application, db_connector)