Exemple #1
0
def create_vocab(url, filename, player):

    vocab_path = get_dir_cfg()['vocab_path']

    filename = local_dir + vocab_path + filename + ".txt"

    if not is_on_file(filename):

        response = requests.get(url,
                                headers={
                                    'groups': 'ROLE_AUTOMATION,',
                                    'username': '******'
                                })
        values = response.json()
        logger.info('vocab is not on file')
        make_dir(filename)
        with open(filename, 'w') as f:
            for value in values:
                label = value['id']
                if label is not None:
                    f.write(label)
                    f.write('\n')

        # now put file away.
        head, tail = os.path.split(filename)
        put_aws_file_with_path(vocab_path, tail)
        write_filenames_index_from_filename(filename)

    else:
        head, tail = os.path.split(filename)
        logger.info('get from aws ' + tail)
        #need to load the file from aws potentially
        get_aws_file(vocab_path, tail)

    return filename
Exemple #2
0
import util.model_utils as model_utils
import util.cache_utils as cache_utils
import dataset.match_dataset as match_dataset
import util.receipt_utils as receipt_utils
import util.training_utils as training_utils
import util.train_history_utils as train_history_utils
from util.config_utils import get_dir_cfg
from util.config_utils import get_learning_cfg
import logging

logger = logging.getLogger(__name__)

local_dir = get_dir_cfg()['local']
history_file = get_dir_cfg()['player_saves_train_history_file']


def train(player, receipt):

    learning_cfg = get_learning_cfg("saves")

    history = train_history_utils.init_history('in progress', learning_cfg)

    training_utils.train(data_range=training_utils.create_data_range(
        learning_cfg=learning_cfg, history_file=history_file),
                         label='saves',
                         label_values=match_dataset.SAVES,
                         model_dir="saves",
                         train_path=training_utils.create_train_path(),
                         receipt=receipt,
                         history=history,
                         history_file=history_file)
Exemple #3
0
import util.model_utils as model_utils
import util.cache_utils as cache_utils
import dataset.match_dataset as match_dataset
import util.receipt_utils as receipt_utils
import util.training_utils as training_utils
import util.train_history_utils as train_history_utils
from util.config_utils import get_dir_cfg
from util.config_utils import get_learning_cfg
import logging

logger = logging.getLogger(__name__)

local_dir = get_dir_cfg()['local']
history_file = get_dir_cfg()['player_yellow_card_train_history_file']


def train(player, receipt):

    learning_cfg = get_learning_cfg("yellow")

    history = train_history_utils.init_history('in progress', learning_cfg)

    training_utils.train(data_range=training_utils.create_data_range(
        learning_cfg=learning_cfg, history_file=history_file),
                         label='yellow',
                         label_values=match_dataset.CARDS,
                         model_dir="yellow",
                         train_path=training_utils.create_train_path(),
                         receipt=receipt,
                         history=history,
                         history_file=history_file)
import train.player_goals_train as player_goals_train
import train.player_assists_train as player_assists_train
import train.player_minutes_train as player_minutes_train
import train.player_conceded_train as player_conceded_train
import train.player_red_card_train as player_red_card_train
import train.player_yellow_card_train as player_yellow_card_train
from util.config_utils import get_dir_cfg

import json
import logging
import threading
import traceback

app = Flask(__name__)

logging.basicConfig(filename=get_dir_cfg()['local'] + 'predictor.log',
                    level=logging.NOTSET)
logger = logging.getLogger(__name__)

local_dir = get_dir_cfg()['local']

if __name__ == "__main__":
    app.run(host='0.0.0.0')


def set_init(init):
    if init == 'true':
        return True
    else:
        return False
Exemple #5
0
import tensorflow as tf
from util.file_utils import get_indexes
from util.file_utils import get_aws_file
from util.config_utils import get_dir_cfg
import logging

logger = logging.getLogger(__name__)

local_dir = get_dir_cfg()['local']


def init_models(model_dir):
    logger.info('calling init')
    indexes = get_indexes(local_dir + model_dir)
    for attribute, value in indexes.items():
        if (value['active'] == True):
            get_aws_file(model_dir + '/', attribute)

    indexes = get_indexes(local_dir + model_dir + '/eval')
    for attribute, value in indexes.items():
        if (value['active'] == True):
            get_aws_file(model_dir + '/eval/', attribute)


def create(feature_columns, classes, model_dir, learning_cfg, init):

    logger.info('model dir for classifier ' + local_dir + model_dir)

    logger.info('tensorflow version ' + tf.__version__)

    if init:
Exemple #6
0
from util.index_utils import process_index, read_index
import os.path
import os
import requests
import logging
import csv
import time
import boto3
from botocore.exceptions import ClientError


logger = logging.getLogger(__name__)

s3_client = boto3.client('s3')

aws = get_dir_cfg()['aws']
aws_url = get_dir_cfg()['aws_url']
aws_bucket = get_dir_cfg()['aws_bucket']

local_dir = get_dir_cfg()['local']


def on_finish(tf_models_dir, aws_model_dir):
    logger.info(' write index '+tf_models_dir)
    write_filenames_index(tf_models_dir)
    try:
     write_filenames_index(tf_models_dir+'/eval')
    except Exception as e:
      logger.info('eval dir not created')

    logger.info(' put aws files '+aws_model_dir)
Exemple #7
0
import json
import os.path
import logging
import datetime

from util.config_utils import get_dir_cfg

logger = logging.getLogger(__name__)
local_dir = get_dir_cfg()['local']
vocab_file = get_dir_cfg()['vocab_history_file']


def write_history(filename, history):
    logger.info('opening ' + filename)

    with open(local_dir + filename, 'w') as outfile:
        json.dump(history, outfile)


def read_history(filename):
    if os.path.isfile(local_dir + filename):
        with open(local_dir + filename) as f:
            return json.load(f)
    else:
        return {}


def get_history(filename, key):
    history = read_history(filename)
    if key in history:
        return history[key]
Exemple #8
0
import util.model_utils as model_utils
import util.cache_utils as cache_utils
import dataset.match_dataset as match_dataset
import util.receipt_utils as receipt_utils
import util.training_utils as training_utils
import util.train_history_utils as train_history_utils
from util.config_utils import get_dir_cfg
from util.config_utils import get_learning_cfg
import logging

logger = logging.getLogger(__name__)

local_dir = get_dir_cfg()['local']
history_file = get_dir_cfg()['player_conceded_train_history_file']


def train(player, receipt):

    learning_cfg = get_learning_cfg("conceded")
    history = train_history_utils.init_history('in progress', learning_cfg)

    training_utils.train(data_range=training_utils.create_data_range(
        learning_cfg=learning_cfg, history_file=history_file),
                         label='conceded',
                         label_values=match_dataset.CONCEDED,
                         model_dir="conceded",
                         train_path=training_utils.create_train_path(),
                         receipt=receipt,
                         history=history,
                         history_file=history_file)
def create_train_path():
    train_path = get_dir_cfg()['train_path']

    return train_path