def TrainNet(): if args.train_data_dir: assert os.path.exists(args.train_data_dir) print("Loading data from {}".format(args.train_data_dir)) (labels, images) = ofrecord_util.load_imagenet_for_training(args) else: print("Loading synthetic data.") (labels, images) = ofrecord_util.load_synthetic(args) logits = model_dict[args.model](images, args) if args.label_smoothing > 0: one_hot_labels = label_smoothing(labels, args.num_classes, args.label_smoothing, logits.dtype) loss = flow.nn.softmax_cross_entropy_with_logits(one_hot_labels, logits, name="softmax_loss") else: loss = flow.nn.sparse_softmax_cross_entropy_with_logits(labels, logits, name="softmax_loss") if not args.use_fp16: loss = flow.math.reduce_mean(loss) flow.losses.add_loss(loss) predictions = flow.nn.softmax(logits) outputs = {"loss": loss, "predictions": predictions, "labels": labels} # set up warmup,learning rate and optimizer optimizer_util.set_up_optimizer(loss, args) return outputs
def TrainNet(): if args.train_data_dir: assert os.path.exists(args.train_data_dir) print("Loading data from {}".format(args.train_data_dir)) (labels, images) = ofrecord_util.load_imagenet_for_training(args) else: print("Loading synthetic data.") (labels, images) = ofrecord_util.load_synthetic(args) logits = model_dict[args.model]( images, need_transpose=False if args.train_data_dir else True, ) if args.label_smoothing > 0: one_hot_labels = label_smoothing(labels, args.num_classes, args.label_smoothing, logits.dtype) loss = flow.nn.softmax_cross_entropy_with_logits(one_hot_labels, logits, name="softmax_loss") else: loss = flow.nn.sparse_softmax_cross_entropy_with_logits( labels, logits, name="softmax_loss") flow.losses.add_loss(loss) predictions = flow.nn.softmax(logits) outputs = {"loss": loss, "predictions": predictions, "labels": labels} return outputs
on_value=1 - eta + eta / classes, off_value=eta/classes) ======= on_value=1 - eta + eta / classes, off_value=eta / classes) >>>>>>> tianshu @flow.global_function("train", get_train_config(args)) def TrainNet(): if args.train_data_dir: <<<<<<< HEAD print(args.train_data_dir) ======= >>>>>>> tianshu assert os.path.exists(args.train_data_dir) print("Loading data from {}".format(args.train_data_dir)) (labels, images) = ofrecord_util.load_imagenet_for_training(args) else: print("Loading synthetic data.") (labels, images) = ofrecord_util.load_synthetic(args) logits = model_dict[args.model](images, args) if args.label_smoothing > 0: one_hot_labels = label_smoothing(labels, args.num_classes, args.label_smoothing, logits.dtype) loss = flow.nn.softmax_cross_entropy_with_logits(one_hot_labels, logits, name="softmax_loss") else: loss = flow.nn.sparse_softmax_cross_entropy_with_logits(labels, logits, name="softmax_loss") if not args.use_fp16: loss = flow.math.reduce_mean(loss) predictions = flow.nn.softmax(logits) outputs = {"loss": loss, "predictions": predictions, "labels": labels}