def use_log(): log_info("Start") try: log_warning("Below code has error:") a = 1 / 0 except Exception as e: log_error("Division by 0") # 输出单行错误信息 log_exception(e) # 输出堆栈错误信息 log_error_pro("Division by 0", e) # log_error_pro = log_error + log_exception 同时输出自定义错误消息和堆栈信息
parser.add_argument('--img_size', type=int, default=512, help='Size of input images') parser.add_argument('--stride', type=float, default=0.5, help='From 0 to 1') parser.add_argument('--lr', type=float, default=1e-3, help='Learning rate') parser.add_argument('--wsize', type=int, default=60, help='Size of windows for bagging') args = parser.parse_args() log_info('Params: ' + str(args)) train_tr, test_tr = get_basic_transforms() if args.test: train_ds = MnistBags(wsize=(args.wsize, args.wsize)) test_ds = MnistBags(wsize=(args.wsize, args.wsize), train=False) log_info('Mnist dataset is used') elif args.arti: train_ds = GENdataset(transform=train_tr, inp_size=args.img_size, wsize=(args.wsize, args.wsize), crop=True, stride=args.stride) test_ds = GENdataset(transform=test_tr, inp_size=args.img_size,
parser.add_argument('--batch_size', type=int, default=1, help='Batch size') parser.add_argument('--id', type=str, default='default', help='Unique net id to save') parser.add_argument('--save_each', type=int, default=50, help='Save model weights each n epochs') parser.add_argument('--save_best', action='store_false', help='Save best test model?') args = parser.parse_args() log_info('Params: ' + str(args)) # log_info('GIT revision: ' + subprocess.check_output('git rev-parse HEAD', shell=True).decode("utf-8")) # DATASETS INITIALIZATION train_tr, test_tr = get_basic_transforms() if args.test: train_ds = CentriollesDatasetOn(transform=train_tr, pos_dir='dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args.img_size) test_ds = CentriollesDatasetOn(transform=test_tr, pos_dir='dataset/mnist/1', neg_dir='dataset/mnist/0', inp_size=args.img_size, train=False)
parser.add_argument('--wd', type=float, default=1e-6, help='Weight decay') parser.add_argument('--ld', type=float, default=0.95, help='Learning rate multipliyer for every 10 epoches') parser.add_argument('--batch_size', type=int, default=1, help='Batch size') parser.add_argument('--epoch', type=int, default=0, help='Number of epoches') parser.add_argument('--test', action='store_true', help='Test this model on simpler dataset') parser.add_argument('--artif', action='store_true', help='Artificial dataset') parser.add_argument('--crop', action='store_true', help='Crop only the central cell') parser.add_argument('--stride', type=float, default=0.5, help='From 0 to 1') parser.add_argument('--pyramid_layers', type=int, default=28, help='Number of layers in da pyramid') parser.add_argument('--id', type=str, default='default', help='Unique net id to save') parser.add_argument('--save_each', type=int, default=0, help='Save model weights each n epochs') parser.add_argument('--save_best', action='store_true', help='Save best test model?') args = parser.parse_args() log_info('Params: ' + str(args)) # log_info('GIT revision: ' + subprocess.check_output('git rev-parse HEAD', shell=True).decode("utf-8")) # DATASETS INITIALIZATION train_tr, test_tr = get_basic_transforms() if args.use_bags: if args.artif: train_ds = GENdataset(transform=train_tr, inp_size=args.img_size, wsize=(args.wsize, args.wsize), crop=args.crop, stride=args.stride, pyramid_layers=args.pyramid_layers) test_ds = GENdataset(transform=test_tr, inp_size=args.img_size, wsize=(args.wsize, args.wsize), crop=args.crop, stride=args.stride, train=False, pyramid_layers=args.pyramid_layers) real_test_ds = CentriollesDatasetBags(transform=test_tr, inp_size=args.img_size, wsize=(args.wsize, args.wsize),