Exemplo n.º 1
0
    def __init__(self):
        self.colorgorical = model.Model()

        self.repeats = 10
        self.sizes = [3, 5, 8]
        weights = [0, 0.5, 1]

        weights = np.array(list(it.product(weights, weights, weights)))

        # Remove duplicate settings
        weights = weights[np.sum(weights, axis=1) != 0.5, :]

        def isOk(row):
            values, counts = [
                list(r) for r in np.unique(row, return_counts=True)
            ]
            if values == [0.5]:  # all 0.5
                return False
            if 0 in values and 0.5 in values and\
                    counts[values.index(0)] == 1 and counts[values.index(0.5)] == 2:
                return False
            else:
                return True

        self.weights = np.array([w for w in weights if isOk(w)])

        thisFilePath = os.path.dirname(os.path.realpath(__file__))
        projectDir = os.path.abspath(os.path.join(thisFilePath, os.pardir))
        outputPath = os.path.join(projectDir, 'examplePalettes')

        self.outputPath = outputPath
        self.paletteFile = os.path.join(outputPath, 'examplePalettes.json')

        self.samplePalettes = None
Exemplo n.º 2
0
def _similarity(image, img_link):
    try:
        comparison_img_res = requests.get(img_link).content
        comparison_img = Image.open(io.BytesIO(comparison_img_res))

        similarity_model = model.Model("color-10-128.model")
        return similarity_model.predict(image, comparison_img)
    except Exception:
        if debug: print(f'Unable to get similarity for image: {img_link}')
        return 0 # don't consider an image that errored
Exemplo n.º 3
0
def main(argvs):
    net=model.Model()
    if (FLAGS.mode=='createdata'):
        createDataset.createDataset()
    elif (FLAGS.mode=='train'):
        trainer.train(net)
    elif (FLAGS.mode=='evaluate'):
        evaluator.evaluate()
    else:
        print('No mode selected')
Exemplo n.º 4
0
def load(model_name, model_file=None):
    lgbm_path = '/home/steven/loss-landscape/flight/model/lightgbm.npz'
    if model_name == 'flight_random':
        net = model.Model(np.load(lgbm_path), 4, True, True, 0.1, None)
        #tree_params, opt_level, random_init=False, train_ohe=False, do, do_ohe
        print(model_name)
    elif model_name == 'flight_finetune':
        net = model.Model(np.load(lgbm_path), 4, False, True, 0.1, None)
        print(model_name)

    if model_file:
        assert os.path.exists(model_file), model_file + " does not exist."
        stored = torch.load(model_file,
                            map_location=lambda storage, loc: storage)
        net.load_state_dict(stored['model_state_dict'])
        """
        if 'state_dict' in stored.keys():
            net.load_state_dict(stored['state_dict'])
        else:
            net.load_state_dict(stored)
        """
    net.float()
    return net
Exemplo n.º 5
0
def train(csv_file_name, mdl_prm_name, norm_prm_name):
    # --- data loading
    df_src = dh.load_csv_to_df(csv_file_name)
    n_days = 7

    # --- preprocessing
    ret = pp.data_preparation(df_src, norm_prm_name, n_days)
    feat_train, tar_train, feat_test, tar_test, idx_train, idx_test = ret

    # --- training model
    l_r = mdl.Model()
    l_r.fit(feat_train, tar_train)

    # --- store model
    l_r.save_model(mdl_prm_name)
Exemplo n.º 6
0
    def __init__(self, app, sys_argv):

        super(MainApp, self).__init__()
        self.app = app
        self.view = view.Ui_MainWindow()
        self.model = model.Model()
        self.model.username = '******'
        self.model.password = "******"
        self.MainWindow = QMainWindow()
        self.view.setupUi(self.MainWindow)
        self.RunParameterController = run_parameter_controller.RunParameterController(
            app, self.view, self.model)
        self.DynamicPlotController = dynamic_plot_controller.DynamicPlotController(
            app, self.view, self.model)
        #self.PostProcessingController = post_processing_controller.PostProcessingController(app, self.view, self.model)
        self.UnifiedController = unified_controller.UnifiedController(
            self.RunParameterController, self.DynamicPlotController)
        #                                                              self.PostProcessingController)
        self.MainWindow.show()
Exemplo n.º 7
0
    def __init__(self):
        self.__view = view.View()
        self.__model = model.Model()

        self.__topline_templates = {}
        self.__topline_templates["Y2"] = os.path.join(template_folder, "topline_template.docx")
        self.__topline_templates["QUALTRICS"] = ""
        self.__topline_templates["UT_POLICY"] = os.path.join(template_folder, "utpolicy_top_template.docx")     

        self.__appendix_templates = {}
        self.__appendix_templates["Y2"] = os.path.join(template_folder, "appendix_template.docx")
        self.__appendix_templates["QUALTRICS"] = ""
        self.__appendix_templates["UT_POLICY"] = os.path.join(template_folder, "utpolicy_app_template.docx")

        self.__template_logos = {}
        self.__template_logos["Y2"] = os.path.join(image_folder, "y2_xtabs.png")
        self.__template_logos["QUALTRICS"] = os.path.join(image_folder, "QLogo.png")
        self.__template_logos["UT_POLICY"] = os.path.join(image_folder, "y2_utpol_logo.png")
        self.__template_logos["WHATSAPP"] = os.path.join(image_folder, "whatsapp.png")
Exemplo n.º 8
0
def test_predict(csv_file_name, mdl_prm_name, norm_prm_name):
    # --- data loading
    df_src = dh.load_csv_to_df(csv_file_name)
    n_days = 7

    # --- preprocessing
    ret = pp.data_preparation(df_src, norm_prm_name, n_days)
    feat_train, tar_train, feat_test, tar_test, idx_train, idx_test = ret

    # --- loading model
    l_r = mdl.Model()
    l_r.load_model(mdl_prm_name)

    # --- prediction
    estim = l_r.predict(feat_test)

    # --- post process
    df_pred_norm = pd.DataFrame(estim, index=idx_test)
    df_pred = pp.df_inversed_min_max_norm(df_pred_norm, norm_prm_name)
    print(df_pred.shape)
Exemplo n.º 9
0
    def __init__(self):
        """Initializes the web server and pairs it with a Colorgorical model."""
        self.model = model.Model()

        thisFilePath = os.path.dirname(__file__)
        public_root = os.path.join(thisFilePath, 'public/static')
        template_root = os.path.join(thisFilePath, 'templates')
        data_root = os.path.join(public_root, 'data')

        # load the data used to generate the visualizations that preview palettes
        static_data = {}

        mapDataPath = os.path.join(thisFilePath, 'data/map-us-counties.json')
        mapValuePath = os.path.join(thisFilePath,
                                    'data/map-us-unemployment.json')

        mainOps = dict(topoData=codecs.open(mapDataPath, 'r', 'utf-8').read(),
                       valueData=codecs.open(mapValuePath, 'r',
                                             'utf-8').read())
        makePaletteOps = dict(model=self.model)

        handlerList = [
            (r'/', handlers.main.MainHandler, mainOps),
            (r'/(.*).html', handlers.template.TemplateHandler, mainOps),
            (r'/color/makePalette', handlers.makePalette.MakePaletteHandler,
             makePaletteOps),
            #   (r'/model', handler.ModelHandler, handlerOps),
            (r'/color/scorePalette', handlers.scorePalette.ScorePaletteHandler,
             makePaletteOps),
            #   (r'/data/(.*)',  web.StaticFileHandler, {'path': data_root}),
            (r'/static/(.*)', web.StaticFileHandler, {
                'path': public_root
            })
        ]

        settings = dict(debug=True,
                        template_path=template_root,
                        static_path=public_root,
                        static_url_prefix="/static/")

        self.application = web.Application(handlerList, **settings)
Exemplo n.º 10
0
def evaluate(args):
    """
    Evaluate the classification model
    """
    logger = logging.getLogger("alibaba")
    logger.info("Load data_set , vocab and label config...")
    if args.pretrained_embedding:
        word_vocab_ = PretrainedVocab(args)

    else:
        with open(os.path.join(args.vocab_dir, "vocab.data"), "rb") as fin:
            word_vocab_ = pickle.load(fin)
    with open(os.path.join(args.vocab_dir, "vocab_character.data"),
              "rb") as fin:
        vocab_character_ = pickle.load(fin)
    data = dataset.Dataset(args)
    logger.info("Convert word to id...")
    data.convert_to_ids(word_vocab_)
    logger.info("Convert character to id...")
    data.convert_to_ids(vocab_character_, character=True)
    logger.info("Build Model...")
    model_ = model.Model(args,
                         word_vocab=word_vocab_,
                         character_vocab=vocab_character_)
    model_.restore(model_dir=args.model_dir, model_prefix=args.class_model)
    logger.info("Evaluating the model on dev set...")
    dev_batchs = data.get_mini_batchs(batch_size=args.batch_size,
                                      set_name="dev")
    loss_, accuracy, _, _ = model_.evaluate(
        batch_data=dev_batchs,
        result_dir=args.result_dir,
        result_prefix="dev.evaluate",
        save_predict_label=True,
    )
    logger.info("Loss on dev set: {}".format(loss_))
    logger.info("Accuracy on dev set: {}".format(accuracy))
    logger.info("Predicted labels are saved to {}".format(
        os.path.join(args.result_dir)))
Exemplo n.º 11
0
def train_model(*args):
    """Dash callback that trains the model.

    Given the user has uploaded a dataset and clicks the 'Train Model' button,
    the scikit-learn function that trains the specified algorithm is called
    to fit the corresponding Estimator object.

    Args:
        *args:
            - The number of times the 'Train Model' button has been clicked.
            - The name of the model to train.
            = The stored JSON dataset file.
            - The selected algorithm with which to train the model.

    Returns: A list containing a jsonpickle object of the Model class instance.

    """
    button_click = args[0]
    if button_click:
        # Get the dataset file and name.
        name, data = utils.unjson_df(args[2][0])
        selected_algo = args[3]
        model_name = args[1]
        sk_params = {
            sk_param[0].split('-')[1]: value
            for sk_param, value in zip(ALGO_CALLBACK_INPUTS, args[5:])
        }
        # Create a model to train.
        m = model.Model(model_name, selected_algo, name)
        np_data = data.to_numpy()
        features = np_data[:, :-1]
        target = np_data[::, -1]
        # Train the model given the features and target data, as well as all
        # the necessary scikit-learn parameters to fit the Estimator.
        m.train(features, target, **sk_params)
        return [model.pickle(m)]
Exemplo n.º 12
0
    def __init__(self, parent):
        tk.Frame.__init__(self, parent)
        self.parent = parent
        self.parent.geometry("1024x500+150+100")
        self.model = model.Model()
        self.entry = None
        self.figure = Figure(figsize=(5, 4), dpi=75)
        self.axes = self.figure.add_subplot(111)
        self.canvas = None
        self.legend = None
        self.text = \
"""s1: I can't tell
s2: Negative
s3: Neutral
s4: Positive
s5: Tweet not related to weather
w1: current (same day) weather
w2: future (forecast)
w3: I can't tell
w4: past weather
k1: clouds
k2: cold
k3: dry
k4: hot
k5: humid
k6: hurricane
k7: I can't tell
k8: ice
k9: other
k10: rain
k11: snow
k12: storms
k13: sun
k14: tornado
k15: wind"""
        self.initUI()
Exemplo n.º 13
0
def import_blendfile(filename, animation_mode):
    log.debug("IMPORT BLENDFILE HERE")
    output_model = model.Model()
    output_model.global_matrix = blend_matrix_to_euclid(
        blender_conversion_matrix())

    try:
        bpy.ops.wm.open_mainfile(filepath=filename)
    except RuntimeError as error:
        log.error("Couldn't open " + filename + ", bailing.")
        sys.exit(PROCESSING_ERROR)

    for blend_object in bpy.data.objects:
        if blend_object.hide_render == False:
            if blend_object.type == "MESH":
                import_mesh(output_model, blend_object.name, blend_object,
                            animation_mode == "bone")

    for material in bpy.data.materials:
        import_material(output_model, material.name, material)

    import_animations(output_model)

    return output_model
Exemplo n.º 14
0
    output = {}
    for name in names:
        temp = {}
        name_idx = onehot_mapping[name]

        temp["vvec"] = [float(x) for x in v_mat[name_idx, :]]
        temp["uvec"] = [float(x) for x in u_mat[:, name_idx]]
        temp["avg"] = [float(x) for x in npy.add(v_mat[name_idx, :], u_mat[:, name_idx]) * 0.5]

        output[name] = temp

    output["confusion_matrix"] = confusion_matrix.tolist()

    with open("test.json", "w") as outfile:
        json.dump(output, outfile)

if __name__ == "__main__":
    fnames = preprocess.get_filenames()
    onehot_mapping, data_mat, label_mat, weight_mat = preprocess.prepare_data(fnames)

    # Test results
    test_data, test_lab = preprocess.prepare_test_data(onehot_mapping)

    model = model.Model(data_mat, label_mat, weight_mat)
    model.train(ModelConfig().numEpochs)

    conf_matrix = model.test(test_data, test_lab, species_clause=True)

    output_results(model, onehot_mapping, conf_matrix)
Exemplo n.º 15
0
from common.data_connector import *
from common.util import *
from model import model
from flask import Flask, request, jsonify, redirect, url_for
from flask_cors import CORS, cross_origin
import json

data_connector = DataConnector()

app = Flask(__name__)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config['supports_credentials'] = 'true'
app.config['CORS_SUPPORTS_CREDENTIALS'] = 'true'
cors = CORS(app)

mdl = model.Model()


@app.route("/")
def greeting():
    return "Welcome to InsightLake Speech Model Service!!"


@app.route("/train", methods=['POST'])
@cross_origin()
def trainApi():
    if request.method == 'POST':
        reqData = request.data
        jsonData = json.loads(reqData)
        data = data_connector.fetchData(jsonData)
        print(data)
Exemplo n.º 16
0
if dev_mode:
    pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
    r = redis.StrictRedis(connection_pool=pool,
                          charset="utf-8",
                          decode_responses=True)
else:
    pool = redis.ConnectionPool(host='redis', port=6379, db=0)
    r = redis.StrictRedis(connection_pool=pool,
                          charset="utf-8",
                          decode_responses=True)

r.set("counter", "0")

print("Redis counter is: " + str(r.get("counter")))

# initialize and train model as first thing. dev=True runs sample data
model = model.Model(dev_mode)

# create global controllers for the app.
view_helper = viewHelper.ViewHelper(session)
session_helper = sessionController.SessionController(session)
input_helper = inputHelper.InputHelper(model, session, session_helper)
f_writer = fileWriter.FileWriter(session)
control = controller.Controller(model, session, session_helper, f_writer)

view.configure_routes(app, view_helper, session_helper, input_helper, f_writer,
                      control, r)

if __name__ == '__main__':
    app.run(host='0.0.0.0', port='6543')
Exemplo n.º 17
0
from flask import Flask
from flask import request
import json
import database.database as db
import model.model as predictor
app = Flask(__name__)

obj = predictor.Model()

skills = {
    "Carpentry": [1, 7, "-"],
    "Painting": [2, 6, "+"],
    "Masonry": [3, 9, "-"],
    "Plumbing": [4, 7, "+"],
    "Electrician": [5, 6, "+"],
    "Cleaner": [6, 5, "+"],
    "Rigger": [7, 10, "-"],
    "Transport": [8, 5, "+"],
    "Welder": [9, 8, "-"],
    "Fitter": [10, 6, "-"]
}

gender = {"male": 1, "female": 2}


@app.route("/add_database", methods=['POST'])
def add_labour_database():
    '''addition to the labourer's database'''
    if request.method == 'POST':
        data = request.json
        db.data_entry(data["name"], data["age"], skills[data["skills"]][0],
Exemplo n.º 18
0
def main(load_path, params, mode='test'):

    nhidden = params['nhidden']
    dropout = params['dropout']
    word2vec = params['word2vec']
    dataset = params['data']
    nlayers = params['nlayers']
    sub2vec = params['sub2vec']
    train_emb = params['train_emb']
    sub_dim = params['sub_dim']
    use_feat = params['use_feat']
    gating_fn = params['gating_fn']
    use_subs = sub_dim > 0
    dp = DataPreprocessor.DataPreprocessor()
    data = dp.preprocess(dataset, no_training_set=True, use_subs=use_subs)
    inv_vocab = data.inv_dictionary

    print("building minibatch loaders ...")
    if mode == 'test':
        batch_loader_test = MiniBatchLoader.MiniBatchLoader(
            data.test, BATCH_SIZE)
    else:
        batch_loader_test = MiniBatchLoader.MiniBatchLoader(
            data.validation, BATCH_SIZE)

    print("building network ...")
    W_init, embed_dim = Helpers.load_word2vec_embeddings(
        data.dictionary[0], word2vec)
    S_init, sub_dim = Helpers.load_sub_embeddings(data.dictionary[1], sub2vec)
    m = model.Model(nlayers,
                    data.vocab_size,
                    data.num_chars,
                    W_init,
                    S_init,
                    nhidden,
                    embed_dim,
                    dropout,
                    train_emb,
                    sub_dim,
                    use_feat,
                    gating_fn,
                    save_attn=True)
    m.load_model('%s/best_model.p' % load_path)

    print("testing ...")
    pr = np.zeros((len(batch_loader_test.questions),
                   batch_loader_test.max_num_cand)).astype('float32')
    fids, attns = [], []
    total_loss, total_acc, n = 0., 0., 0
    for dw, dt, qw, qt, a, m_dw, m_qw, tt, tm, c, m_c, cl, fnames in batch_loader_test:
        outs = m.validate(dw, dt, qw, qt, c, a, m_dw, m_qw, tt, tm, m_c, cl)
        loss, acc, probs = outs[:3]
        attns += [[fnames[0], probs[0, :]] + [o[0, :, :] for o in outs[3:]]
                  ]  # store one attention
        bsize = dw.shape[0]
        total_loss += bsize * loss
        total_acc += bsize * acc
        fids += fnames
        n += bsize
        print("step" + str(n) + ",acc" + str(acc))

    logger = open(load_path + '/log', 'a', 0)
    message = '%s Loss %.4e acc=%.4f' % (mode.upper(), total_loss / n,
                                         total_acc / n)
    print message
    logger.write(message + '\n')
    logger.close()

    np.save('%s/%s.probs' % (load_path, mode), np.asarray(pr))
    pkl.dump(attns, open('%s/%s.attns' % (load_path, mode), 'w'))
    f = open('%s/%s.ids' % (load_path, mode), 'w')
    for item in fids:
        f.write(item + '\n')
    f.close()
Exemplo n.º 19
0
# -*- coding:utf-8 -*-

from flask import Flask, request

from model import model
from view import view

data_model = model.Model()
view = view.View()

app = Flask(__name__)

@app.route('/', methods = ['GET'])
def index():
	'''
	on charge un graphe
	dans un cas plus general il faudrait prendre en compte
	les donnees de formulaire (avec la methode POST)
	et calculer un (sous)graphe
	parmi toute une palette de possibilites
	'''
	graph = data_model.load_graph()

	'''
	on confie le graphe calcule a la vue qui sait traduire la donnee
	"brute" en un objet qui se prete au calcul de la vue cote client
	'''
	view.graph2json(graph)

	'''
	on envoie le resultat cote client
Exemplo n.º 20
0
                     className='n4 g2')
        ],
                 id='game61',
                 className='final-four-bounding inner-bounding game'))
    bounding_html_list.append(
        html.Div(right_region_html_list, className='right-bounding'))
    bracket_html = html.Div(bounding_html_list, className='bounding-bracket')
    return bracket_html


###############################################################################
################################ Global code ##################################
###############################################################################

m = model.Model(number_simulations=number_simulations,
                gender=gender,
                scoring_sys=scoring_system,
                year=year)
m.batch_simulate()
print("sims done")
m.create_json_files()
m.update_entry_picks()
m.initialize_special_entries()
m.analyze_special_entries()
m.add_fake_entries(fake_entries)
m.add_bulk_entries_from_database(real_entries)
m.add_simulation_results_postprocessing()
m.raw_print()
all_results = m.output_results()
all_results = m.output_results()
special_wins = m.get_special_wins()
special_results = all_results[-4:]
Exemplo n.º 21
0
from flask import Flask, flash, render_template, url_for, redirect
from model import PlayerInsertForm, AdventureInsertForm, CharacterInsertForm, SessionInsertForm, SignInForm, model, User
from model import AuthorInsertForm, EnemyInsertForm, EquipmentInsertForm, MapInsertForm
from flask_bcrypt import Bcrypt
from flask_login import LoginManager, login_required, login_user, logout_user, current_user

app = Flask(__name__)
model = model.Model()
bcrypt = Bcrypt(app)

# config
app.config.update(SECRET_KEY='foo')

login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'


@login_manager.user_loader
def load_user(user_id):
    return User.User(user_id) if model.get_row("player", user_id) else None


@app.route('/')
@login_required
def index():
    return render_template("index.html")


@app.route('/logout')
@login_required
Exemplo n.º 22
0
from torch.nn.utils import clip_grad_norm
from torch.autograd import Variable
from torch.nn import functional as F
from torch.utils.data import DataLoader
from torch.optim import SGD
from model import model
from utils import process

transform = process.DataTransform(65)
reader = process.DataReader("data", ["smalltrain", "smallvalid", "smalltest"], transform=transform)

max_length, word_vocab, char_vocab, \
 word_tensor, char_tensor = reader.load()
print("finish reading data")

model_ = model.Model(char_vocab.size, word_vocab.size)
optimizer = SGD(model_.parameters(), lr=1.0)

train_set = process.WordDataSet(word_tensor["smalltrain"], char_tensor["smalltrain"], 20, 35)
train_loader = DataLoader(train_set, batch_size=20, shuffle=False)

valid_set = process.WordDataSet(word_tensor["smallvalid"], char_tensor["smallvalid"], 20, 35)
valid_loader = DataLoader(valid_set, batch_size=20, shuffle=False)

test_set = process.WordDataSet(word_tensor["smalltest"], char_tensor["smalltest"], 20, 35)
test_loader = DataLoader(valid_set, batch_size=20, shuffle=False)


def repackage_hidden(h):
    """
    It is a step that need to understand
Exemplo n.º 23
0
    else:
        experiment_name = ""

    CONFIG_PATH = "./config/arcus.yaml"

    applications = [keras.applications.vgg16, keras.applications.vgg19, keras.applications.resnet, keras.applications.xception, keras.applications.inception_resnet_v2, keras.applications.inception_v3]
    architectures = [keras.applications.vgg16.VGG16, keras.applications.vgg19.VGG19, keras.applications.resnet.ResNet152, keras.applications.xception.Xception, keras.applications.inception_resnet_v2.InceptionResNetV2, keras.applications.inception_v3.InceptionV3]

    print("Initiating...\n")

    with open(CONFIG_PATH, 'r') as y:
        print("Opening config...\n")
        config = yaml.load(y)

    if config['cuda'] is True:
        print("Setting CUDA device environment variables...")
        os.environ['CUDA_DEVICE_ORDER'] = config['CUDA_DEVICE_ORDER']
        os.environ['CUDA_VISIBLE_DEVICES'] = config['CUDA_VISIBLE_DEVICES']

    os.environ["WANDB_RUN_GROUP"] = "experiment " + experiment_name
    util.wb.init_wandb(yaml=config['wandb_config_path'], run_name="init")

    training_dataset, validation_dataset, testing_dataset = ds.load_train_validate_test_datasets(config)

    for app, arch in zip(applications, architectures):
        model = m.Model(training_dataset, validation_dataset, testing_dataset, app, arch)
        model.train()
        model.evaluate()
        model.finish()
        del model
Exemplo n.º 24
0
def main(save_path, params):
    nhidden = params['nhidden']
    dropout = params['dropout']
    word2vec = params['word2vec']
    sub2vec = params['sub2vec']
    subdict = params['subdic']
    dataset = params['data']
    nlayers = params['nlayers']
    train_emb = params['train_emb']
    sub_dim = params['sub_dim']
    use_feat = params['use_feat']
    gating_fn = params['gating_fn']

    # save settings
    shutil.copyfile('config.py', '%s/config.py' % save_path)

    use_subs = sub_dim > 0
    dp = DataPreprocessor.DataPreprocessor()
    data = dp.preprocess(dataset,
                         no_training_set=False,
                         use_subs=use_subs,
                         subdict=subdict)

    print "building minibatch loaders ...", datetime.now().strftime(
        '%Y-%m-%d %H:%M:%S')
    batch_loader_train = MiniBatchLoader.MiniBatchLoader(data.training,
                                                         BATCH_SIZE,
                                                         sample=1)
    batch_loader_val = MiniBatchLoader.MiniBatchLoader(data.validation,
                                                       BATCH_SIZE)

    print "building network ...", datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    W_init, embed_dim, = Helpers.load_word2vec_embeddings(
        data.dictionary[0], word2vec)
    S_init, sub_dim = Helpers.load_sub_embeddings(data.dictionary[1], sub2vec)
    m = model.Model(nlayers, data.vocab_size, data.num_chars, W_init, S_init,
                    nhidden, embed_dim, dropout, train_emb, sub_dim, use_feat,
                    gating_fn)

    print "training ...", datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    num_iter = 0
    max_acc = 0.
    deltas = []

    logger = open(save_path + '/log', 'a', 0)

    if os.path.isfile('%s/best_model.p' % save_path):
        print 'loading previously saved model', datetime.now().strftime(
            '%Y-%m-%d %H:%M:%S')
        m.load_model('%s/best_model.p' % save_path)
        print "model loaded"
    else:
        print 'saving init model', datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        m.save_model('%s/model_init.p' % save_path)
        print 'loading init model', datetime.now().strftime(
            '%Y-%m-%d %H:%M:%S')
        m.load_model('%s/model_init.p' % save_path)
    for epoch in xrange(NUM_EPOCHS):
        print "epochs training ...", datetime.now().strftime(
            '%Y-%m-%d %H:%M:%S')
        estart = time.time()
        new_max = False
        for dw, dt, qw, qt, a, m_dw, m_qw, tt, tm, c, m_c, cl, fnames in batch_loader_train:
            loss, tr_acc, probs = m.train(dw, dt, qw, qt, c, a, m_dw, m_qw, tt,
                                          tm, m_c, cl)

            message = "Epoch %d TRAIN loss=%.4e acc=%.4f elapsed=%.1f" % (
                epoch, loss, tr_acc, time.time() - estart)
            print message
            logger.write(message + '\n')

            num_iter += 1
            if num_iter % VALIDATION_FREQ == 0:
                total_loss, total_acc, n, n_cand = 0., 0., 0, 0.

                for dw, dt, qw, qt, a, m_dw, m_qw, tt, tm, c, m_c, cl, fnames in batch_loader_val:
                    outs = m.validate(dw, dt, qw, qt, c, a, m_dw, m_qw, tt, tm,
                                      m_c, cl)
                    loss, acc, probs = outs[:3]

                    bsize = dw.shape[0]
                    total_loss += bsize * loss
                    total_acc += bsize * acc
                    n += bsize
                print('validate on ', str(n) + 'validation data')
                val_acc = total_acc / n
                if val_acc > max_acc:
                    max_acc = val_acc
                    m.save_model('%s/best_model.p' % save_path)
                    new_max = True
                message = "Epoch %d VAL loss=%.4e acc=%.4f max_acc=%.4f" % (
                    epoch, total_loss / n, val_acc, max_acc)
                print message
                logger.write(message + '\n')

        m.save_model('%s/model_%d.p' % (save_path, epoch))
        message = "After Epoch %d: Train acc=%.4f, Val acc=%.4f" % (
            epoch, tr_acc, val_acc)
        print message
        logger.write(message + '\n')

        # learning schedule
        if epoch >= 2:
            m.anneal()
        # stopping criterion
        if not new_max:
            break

    logger.close()