Esempio n. 1
0
DEFINE_boolean("is_toy", False, "Toy Dataset or not, useful for debugging")
DEFINE_boolean("is_attention", False, "Using Attention for Decoder or not")
DEFINE_string("data_path", "en-es", "Path to parallel corpus data (.txt)")
DEFINE_string("output_dir", "output", "Path to log folder")
DEFINE_string("cell_type", "gru", "GRU or LSTM or naive, ...")
DEFINE_integer("n_epochs", 10, "Number of training epochs")
DEFINE_integer("n_layers", 1, "Number of stacked RNN layers")
DEFINE_integer("n_hidden", 1024, "Dimensionality of RNN output")
DEFINE_integer("emb_dim", 256, "Dimensionality of word embedding, src==tgt")
DEFINE_integer("save_every", 500, "How many batches to save once")
DEFINE_integer("eval_every", 100, "How many batches to evaluate")
DEFINE_integer("batch_size", 64, "Batch size. SET to `2` for easy debugging.")
DEFINE_integer("n_loaded_sentences", 20000, "Number of sentences to load, "
                                            "Set to <= 0 for loading all data,"
                                            "SET LOWER FOR DEBUGGING")
DEFINE_float("init_lr", 1e-3, "Init learning rate. This is default for Adam.")
DEFINE_float("drop_keep_prob", 1.0, "Dropout rate")


def train():
  """
  Train driver - no need to change.  
  """
  print("-" * 80)
  if not os.path.isdir(FLAGS.output_dir):
    print("Path {0} does not exist. Creating.".format(FLAGS.output_dir))
    os.makedirs(FLAGS.output_dir)
  elif FLAGS.reset_output_dir:
    print("Path {0} exists. Remove and remake.".format(FLAGS.output_dir))
    shutil.rmtree(FLAGS.output_dir)
    os.makedirs(FLAGS.output_dir)
Esempio n. 2
0
from micro_controller import MicroController
from micro_child import MicroChild

flags = tf.app.flags
FLAGS = flags.FLAGS

################## YOU Should write under parameter ######################
DEFINE_string("output_dir", "./output", "")
DEFINE_string("train_data_dir", "./data/train", "")
DEFINE_string("val_data_dir", "./data/valid", "")
DEFINE_string("test_data_dir", "./data/test", "")
DEFINE_integer("channel", 1, "MNIST: 1, Cifar10: 3")
DEFINE_integer("img_size", 32, "enlarge image size")
DEFINE_integer("n_aug_img", 1,
               "if 2: num_img: 55000 -> aug_img: 110000, elif 1: False")
DEFINE_float("child_lr_min", 0.00005, "for lr schedule")
##########################################################################

DEFINE_boolean("reset_output_dir", True, "Delete output_dir if exists.")
DEFINE_string("data_format", "NHWC", "'NHWC or NCHW'")
DEFINE_string("search_for", "micro", "")

DEFINE_integer("batch_size", 160, "")
DEFINE_integer("num_epochs", 630, " = (10 + 20 + 40 + 80 + 160 +320)")

DEFINE_integer("child_lr_dec_every", 100, "")
DEFINE_integer(
    "child_num_layers", 15,
    "Number of layer. IN this case we will calculate 4 conv and 2 pooling layers"
)
DEFINE_integer("child_num_cells", 5,
DEFINE_bool("do_train", False, "Whether to run training.")

DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")

DEFINE_bool(
    "do_predict", False,
    "Whether to run the model in inference mode on the test set.")

DEFINE_integer("train_batch_size", 32, "Total batch size for training.")

DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")

DEFINE_integer("predict_batch_size", 8, "Total batch size for predict.")

DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")

DEFINE_float("num_train_epochs", 3.0,
                   "Total number of training epochs to perform.")

DEFINE_float(
    "warmup_proportion", 0.1,
    "Proportion of training to perform linear learning rate warmup for. "
    "E.g., 0.1 = 10% of training.")

DEFINE_integer("save_checkpoints_steps", 1000,
                     "How often to save the model checkpoint.")

DEFINE_integer("iterations_per_loop", 1000,
                     "How many steps to make in each estimator call.")