Exemplo n.º 1
0
parser.add_argument('--hidden_act', type=str, default='relu')
parser.add_argument('--layer_norm',
                    type=str,
                    default='no_var',
                    choices=['standard', 'no', 'no_var'])
parser.add_argument('--loss_fusion', action='store_true')
parser.add_argument('--dropout', type=float, default=0.1)
parser.add_argument('--bound_opts_relu', type=str, default='zero-lb')

args = parser.parse_args()

writer = SummaryWriter(os.path.join(args.dir, 'log'), flush_secs=10)
file_handler = logging.FileHandler(os.path.join(args.dir, 'log/train.log'))
file_handler.setFormatter(
    logging.Formatter('%(levelname)-8s %(asctime)-12s %(message)s'))
logger.addHandler(file_handler)

data_train_all_nodes, data_train, data_dev, data_test = load_data(args.data)
if args.robust:
    data_dev, data_test = clean_data(data_dev), clean_data(data_test)
if args.auto_test:
    random.seed(args.seed)
    random.shuffle(data_test)
    data_test = data_test[:10]
    assert args.batch_size >= 10
logger.info('Dataset sizes: {}/{}/{}/{}'.format(len(data_train_all_nodes),
                                                len(data_train), len(data_dev),
                                                len(data_test)))

random.seed(args.seed)
np.random.seed(args.seed)
def set_file_handler(logger, dir):
    file_handler = logging.FileHandler(os.path.join(dir, 'train.log'))
    file_handler.setFormatter(
        logging.Formatter('%(levelname)-8s %(asctime)-12s %(message)s'))
    logger.addHandler(file_handler)