Esempio n. 1
0
from utils import print_user_flags

import data_utils

from micro_controller import MicroController
from micro_child import MicroChild

flags = tf.app.flags
FLAGS = flags.FLAGS

################## YOU Should write under parameter ######################
DEFINE_string("output_dir", "./output", "")
DEFINE_string("train_data_dir", "./data/train", "")
DEFINE_string("val_data_dir", "./data/valid", "")
DEFINE_string("test_data_dir", "./data/test", "")
DEFINE_integer("channel", 1, "MNIST: 1, Cifar10: 3")
DEFINE_integer("img_size", 32, "enlarge image size")
DEFINE_integer("n_aug_img", 1,
               "if 2: num_img: 55000 -> aug_img: 110000, elif 1: False")
DEFINE_float("child_lr_min", 0.00005, "for lr schedule")
##########################################################################

DEFINE_boolean("reset_output_dir", True, "Delete output_dir if exists.")
DEFINE_string("data_format", "NHWC", "'NHWC or NCHW'")
DEFINE_string("search_for", "micro", "")

DEFINE_integer("batch_size", 160, "")
DEFINE_integer("num_epochs", 630, " = (10 + 20 + 40 + 80 + 160 +320)")

DEFINE_integer("child_lr_dec_every", 100, "")
DEFINE_integer(
Esempio n. 2
0
flags = tf.app.flags
FLAGS = flags.FLAGS
os.environ['CUDA_VISIBLE_DEVICES'] = '1'

logger = utils.logger


DEFINE_boolean("reset_output_dir", False, "Delete output_dir if exists.")
DEFINE_string("data_path", "", "")
DEFINE_string("output_dir", "", "")
DEFINE_string("summaries_dir", "", "")
DEFINE_string("data_format", "NHWC", "'NHWC' or 'NCWH'")
DEFINE_string("search_for", None, "Must be [macro|micro]")

DEFINE_integer("num_gpus", 1, "")
DEFINE_integer("num_cpus", 1, "")
DEFINE_integer("batch_size", 32, "")

DEFINE_integer("num_epochs_evolve", 3, "")

DEFINE_integer("num_epochs", 300, "")
DEFINE_integer("child_lr_dec_every", 100, "")
DEFINE_integer("child_num_layers", 5, "")
DEFINE_integer("child_num_cells", 8, "")
DEFINE_integer("child_filter_size", 5, "")
DEFINE_integer("child_out_filters", 48, "")
DEFINE_integer("child_out_filters_scale", 1, "")
DEFINE_integer("child_num_branches", 4, "")
DEFINE_integer("child_num_aggregate", None, "")
DEFINE_integer("child_num_replicas", 1, "")
Esempio n. 3
0
from utils import DEFINE_float
from utils import print_user_flags
from utils import loss_function
from utils import plot_attention_map

flags = tf.app.flags
FLAGS = flags.FLAGS

DEFINE_boolean("reset_output_dir", False, "Delete output_dir if exists.")
DEFINE_boolean("restore_checkpoint", True, "Auto retrieve checkpoint or not.")
DEFINE_boolean("is_toy", False, "Toy Dataset or not, useful for debugging")
DEFINE_boolean("is_attention", False, "Using Attention for Decoder or not")
DEFINE_string("data_path", "en-es", "Path to parallel corpus data (.txt)")
DEFINE_string("output_dir", "output", "Path to log folder")
DEFINE_string("cell_type", "gru", "GRU or LSTM or naive, ...")
DEFINE_integer("n_epochs", 10, "Number of training epochs")
DEFINE_integer("n_layers", 1, "Number of stacked RNN layers")
DEFINE_integer("n_hidden", 1024, "Dimensionality of RNN output")
DEFINE_integer("emb_dim", 256, "Dimensionality of word embedding, src==tgt")
DEFINE_integer("save_every", 500, "How many batches to save once")
DEFINE_integer("eval_every", 100, "How many batches to evaluate")
DEFINE_integer("batch_size", 64, "Batch size. SET to `2` for easy debugging.")
DEFINE_integer("n_loaded_sentences", 20000, "Number of sentences to load, "
                                            "Set to <= 0 for loading all data,"
                                            "SET LOWER FOR DEBUGGING")
DEFINE_float("init_lr", 1e-3, "Init learning rate. This is default for Adam.")
DEFINE_float("drop_keep_prob", 1.0, "Dropout rate")


def train():
  """
    "The output directory where the model checkpoints will be written.")

## Other parameters

DEFINE_string(
    "init_checkpoint", None,
    "Initial checkpoint (usually from a pre-trained BERT model).")

DEFINE_bool(
    "do_lower_case", True,
    "Whether to lower case the input text. Should be True for uncased "
    "models and False for cased models.")

DEFINE_integer(
    "max_seq_length", 128,
    "The maximum total input sequence length after WordPiece tokenization. "
    "Sequences longer than this will be truncated, and sequences shorter "
    "than this will be padded.")

DEFINE_bool("use_record", False,
    "Whether to use existing train_record and other records.")

DEFINE_bool("do_train", False, "Whether to run training.")

DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")

DEFINE_bool(
    "do_predict", False,
    "Whether to run the model in inference mode on the test set.")

DEFINE_integer("train_batch_size", 32, "Total batch size for training.")