def main(argv=None):
    print("start of main")
    main_time = time.time()

    os.makedirs(RESULT_DIR)

    # loging
    LOG_FILE = os.path.join(RESULT_DIR, "log.txt")
    logger.set_logger(level=FLAGS.get('log_level'),
                      stream=True,
                      fileh=True,
                      filename=LOG_FILE)

    # file handling
    logger.info("create folder for results: {}".format(RESULT_DIR))
    if FLAGS.checkpoint_step > 0:
        os.mkdir(CHECKPOINT_DIR)
        logger.info("create checkpoints folder: {}".format(CHECKPOINT_DIR))

    # import the corresponding module
    # what about models.model ????????
    try:
        model_path = 'models.' + FLAGS.get('model').lower()
        model_module = __import__(model_path, fromlist=[''])
    except ImportError:
        raise ValueError("no such model exists: {}".format())

    # parse all FLAGS
    FLAGS.parse_and_log()

    # start training
    train(model_module.Model)

    # pring something before end
    logger.newline(2)
    logger.info("total time used: {}".format(time.time() - main_time))
    logger.info("summary dir: " + RESULT_DIR)
    logger.newline()
    logger.info("~end of main~")
          help="overwrite the number of testing examples per epoch.")
FLAGS.add("--image_size",
          type=int,
          help="overwrite the default image size, squre image.")

# dataset default settings
DataConf = collections.namedtuple(
    'DataConf', ['num_train', 'num_test', 'num_cats', 'folder', 'image_size'])
DATA_CONF = {
    'mnist': DataConf(2000, 500, 5, '', 20),
    'cifar-10': DataConf(50000, 10000, 10, '', 24),
    'cifar-100': DataConf(5000, 10000, 100, '', 24),
}

# if following dataset flags are None, use default settings.
data_conf = DATA_CONF[FLAGS.get('dataset')]
FLAGS.overwrite_none(num_train_examples=data_conf.num_train,
                     num_test_examples=data_conf.num_test,
                     num_cats=data_conf.num_cats,
                     dataset_fld=data_conf.folder,
                     image_size=data_conf.image_size)

#########################################
# global variables
#########################################
# CATEGORIES = FLAGS.categories.split(',')
# Constants used for dealing with the files, matches convert_to_records.
TFR_SUFFIX = '.TFR'

#########################################
# functions
          type=int,
          default=-1,
          help="""Number of steps to write checkpoint, -1 to disable.""")
FLAGS.add('--num_checkpoints',
          type=int,
          default=5,
          help="Number of maximum checkpoints to keep. default: 10")
FLAGS.add('--sleep',
          type=int,
          default=0,
          help="the number of seconds to sleep between steps. 0, 1, 2...")

#########################################
# global variables
#########################################
RESULT_FLD = FLAGS.get('model') + '_' + CUR_TIME
RESULT_DIR = os.path.join(FLAGS.get('outputs_dir'), RESULT_FLD)
CHECKPOINT_DIR = os.path.join(RESULT_DIR, "checkpoints")
CHECKPOINT_PATH = os.path.join(CHECKPOINT_DIR, 'model.ckpt')

#########################################
# functions
#########################################


def train(model_class):
    """Train neural network for a number of steps."""
    logger.info("\nstart training...")
    with tf.Graph().as_default():
        # build computing graph
        with tf.variable_scope("model", reuse=None):
Exemple #4
0
# ================================
# utils.set_logging(stream=False)
# utils.set_logger(stream=True)
# logger.info("logger111111")
logger.set_logger(stream=True)
logger.info(CUR_TIME)
logger.newline()
logger.error("newline beneath~")
logger.newline(2)
logger.info("haha")

# ================================
# test FLAGS
# ================================
FLAGS.add("--aa", type=float, default=11., help="doc for dd")
logger.info("aa: {}".format(FLAGS.get('aa')))
# for flag that should be overwrite later, don't set default
FLAGS.add("--bb", type=int, default=None, help="doc for dd")
if FLAGS.get('aa') == 11:
    FLAGS.overwrite_none(bb=15)

FLAGS.add("--cc", type=bool, default=False, help="doc for dd")
FLAGS.add("--dd", type=str, default="dddddd", help="doc for dd")
# for flag that should be overwrite later, don't set default
FLAGS.add("--ff", type=str, help="doc for dd")
FLAGS.add("--gg", type=str, help="doc for dd")
FLAGS.add("--hh", type=str, default="hhhhh", help="doc for dd")
# overwrite or set new default values
FLAGS.overwrite_defaults(dd="replaced dd", ee="an extra flag", ff="ff")
FLAGS.overwrite_none(hh="this won't show", gg="gggg", ii="illigal")
FLAGS.add("--jj", type=str, default="hhhhh", help="doc for dd")