low = 10 if args.dataset == 'COCO' else 16 imgsize = random.randint(low, 21) * 32 dataset.img_size = imgsize dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_cpu, pin_memory=True, drop_last=False) dataiterator = iter(dataloader) # save checkpoint if iter_i > 0 and (iter_i % args.checkpoint_interval == 0): state_dict = { 'iter': iter_i, 'model': model.state_dict(), 'optimizer': optimizer.state_dict(), } save_path = os.path.join('./weights', f'{job_name}_{today}_{iter_i}.ckpt') torch.save(state_dict, save_path) # save detection if iter_i > 0 and iter_i % args.img_interval == 0: for img_path in eval_img_paths: eval_img = Image.open(img_path) dts = api.detect_once(model, eval_img, conf_thres=0.1, input_size=target_size) np_img = np.array(eval_img)
low = 10 if args.dataset == 'COCO' else 16 imgsize = random.randint(low, 21) * 32 dataset.img_size = imgsize dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_cpu, pin_memory=True, drop_last=False) dataiterator = iter(dataloader) # save checkpoint if iter_i > 0 and (iter_i % args.checkpoint_interval == 0): state_dict = { 'iter': iter_i, 'model_state_dict': model.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), } save_path = os.path.join('./weights', f'{job_name}_{today}_{iter_i}.ckpt') torch.save(state_dict, save_path) # save detection if iter_i > 0 and iter_i % args.img_interval == 0: for img_path in eval_img_paths: eval_img = Image.open(img_path) dts = api.detect_once(model, eval_img, conf_thres=0.1, input_size=target_size) np_img = np.array(eval_img)