def main(args): info('loading embedding vec') embedding = np.load(os.path.expanduser(args.embedding)) info('constructing config') cfg = config(args, embedding) info('constructing graph') env = build_graph(cfg) info('initializing session') sess = tf.Session() sess.run(tf.global_variables_initializer(), feed_dict={cfg.embedding: embedding}) sess.run(tf.local_variables_initializer()) env.sess = sess info('loading data') (X_train, y_train), (X_test, y_test), (X_valid, y_valid) = load_data(cfg.data, cfg.bipolar) info('training model') train(env, X_train, y_train, X_valid, y_valid, load=False, batch_size=cfg.batch_size, epochs=cfg.epochs, name=cfg.name) evaluate(env, X_test, y_test, batch_size=cfg.batch_size) env.sess.close()
def main(args): info('loading embedding vec') embedding = np.load(os.path.expanduser(args.embedding)) info('constructing config') cfg = config(args, embedding) info('constructing graph') env = build_graph(cfg) env.cfg = cfg info('initializing session') sess = tf.Session() sess.run(tf.global_variables_initializer(), feed_dict={cfg.embedding: embedding}) sess.run(tf.local_variables_initializer()) env.sess = sess info('loading data') (_, _), (X_data, y_data) = load_data(cfg.data, cfg.bipolar, -1) info('loading model') train(env, load=True, name=cfg.name) info('evaluating against clean test samples') evaluate(env, X_data, y_data, batch_size=cfg.batch_size) env.re = ReverseEmbedding(w2v_file=cfg.w2v, index_file=cfg.indexer) info('making adversarial texts') X_adv, X_sents = make_adversarial(env, X_data) info('evaluating against adversarial texts') evaluate(env, X_adv, y_data, batch_size=cfg.batch_size) y_adv = predict(env, X_adv, batch_size=cfg.batch_size) env.sess.close() postfn(cfg, X_sents, y_data, y_adv)
def main(args): info('loading embedding vec') embedding = np.eye(args.vocab_size).astype(np.float32) info('constructing config') cfg = config(args, embedding) info('constructing graph') env = build_graph(cfg) env.cfg = cfg info('initializing session') sess = tf.Session() sess.run(tf.global_variables_initializer(), feed_dict={cfg.embedding: embedding}) sess.run(tf.local_variables_initializer()) env.sess = sess info('loading data') (_, _), (X_data, y_data) = load_data(os.path.expanduser(cfg.data), cfg.bipolar, validation_split=-1) info('loading model') train(env, load=True, name=cfg.name) info('evaluating against clean test samples') evaluate(env, X_data, y_data, batch_size=cfg.batch_size) info('making adversarial texts') X_adv = make_adversarial(env, X_data) X_adv = np.reshape(X_adv, (-1, cfg.charlen)) y_data = np.tile(y_data, (cfg.beam_width, 1)) info('evaluating against adversarial texts') evaluate(env, X_adv, y_data, batch_size=cfg.batch_size) y_adv = predict(env, X_adv, batch_size=cfg.batch_size) env.sess.close() info('recover chars from indices') X_sents = index2char(X_adv, unk=cfg.unk) postfn(cfg, X_sents, y_data, y_adv)
def main(args): info('loading embedding vec') embedding = np.eye(args.vocab_size).astype(np.float32) info('constructing config') cfg = config(args, embedding) info('constructing graph') env = build_graph(cfg) info('initializing session') sess = tf.Session() sess.run(tf.global_variables_initializer(), feed_dict={cfg.embedding: embedding}) sess.run(tf.local_variables_initializer()) env.sess = sess info('loading data {}'.format(cfg.data)) X_test, y_test = load_data(cfg.data, cfg.bipolar, -1) info('training model') train(env, load=True, name=cfg.name) evaluate(env, X_test, y_test, batch_size=cfg.batch_size) env.sess.close()
#NOTE: main loop for training if __name__ == "__main__": # Option opt = parse_option(print_option=True) # Data Loader dataset_trn, dataset_val = get_dataloader(opt) # Network net = create_model(opt) # Loss Function criterion = get_loss_function(opt) # Optimizer optimizer = get_optimizer(net, criterion, opt) scheduler = CosineAnnealingLR(optimizer, eta_min=opt.lr*opt.eta_min_ratio, T_max=(opt.max_epoch - opt.lr_warmup_epoch)) # Initial Best Score best_dice, best_epoch = [0, 0] for epoch in range(opt.start_epoch, opt.max_epoch): # Train train(net, dataset_trn, optimizer, criterion, epoch, opt) # Evaluate best_dice, best_epoch = validate(dataset_val, net, criterion, optimizer, epoch, opt, best_dice, best_epoch) lr_update(epoch, opt, optimizer, scheduler) print('Training done')