def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=0, path='../bold_data/BOLD_ijcv/BOLD_public/optic_flow/', ucf_list = '../bold_data/BOLD_ijcv/BOLD_public/annotations/', ucf_split = '04', in_channel=10, ) train_loader,test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel = 10, test_video=test_video ) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path=r"/mnt/disks/datastorage/videos/flownet2/", ucf_list=r"/home/mlp/two-stream-action-recognition/UCF_list/", ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video, nb_classes=arg.nb_classes, # added the nb_classes finetune=arg.finetune) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, #use number of work 8 path= '/media/semanticslab11/hdd1/data/two-stream-action/data/tvl1_flow/', ucf_list='UCF_list/', ucf_split='01', in_channel=10, bidirectional=arg.bidirectional) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/home/zdadadaz/Desktop/course/medical/data/UCF101_flow/tvl1_flow', ucf_list='./UCF_list/', ucf_split='01', in_channel=10, root_path='../') train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/home/lili/Video/datasets/HMDB51_concise/', train_ucf_list='/home/lili/Video/datasets/HMDB51_concise/list/', test_ucf_list= '/ubc/cs/research/tracking-raid/candice/datasets/HMDB51/list/new_test_flow.list', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/home/ubuntu/data/UCF101/tvl1_flow/', ucf_list= '/home/ubuntu/cvlab/pytorch/ucf101_two_stream/github/UCF_list/', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/data/MM1/aps1/aniru/aniru/repo/dataset/UCF101/tvl1_flow', ucf_list='//data/MM1/aps1/aniru/aniru/repo/actionRecognition/UCF_list', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/data/tvl1_flow/', ucf_list= '/media/lsc/DATA/github/two-stream-action-recognition/UCF_list/', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg # Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/hdd/UCF-101/Data/optical-flow/', ucf_list=os.getcwd() + '/UCF_data_references/', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() # Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video, demo=arg.demo) # Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/home/yzy20161103/csce636_project/project/opt_475/', ucf_list='/home/yzy20161103/csce636_project/project/UCF_list/', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader(BATCH_SIZE=arg.batch_size, num_workers=8, path='/home/hosseing/datasets/tvl1_flow/', ucf_list='/home/hosseing/REPOS/two stream/two-stream-action-recognition//UCF_list/', ucf_split='01', in_channel=10,) train_loader,test_loader, test_video = data_loader.run() # Building model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel = 10*2, test_video=test_video ) # Training with torch.cuda.device(0): model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=12, path='/home/ubuntu/data/UCF101_optical_flow/step1/', ucf_list='/home/bassel/data/ucfTrainTestlist/', ucf_split='01', in_channel=10, ) train_loader,test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel = 10*2, test_video=test_video ) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=0, path='D:\\Data\\UCF101\\ucf101_tvl1_flow\\tvl1_flow\\', ucf_list= 'D:\\Radha\\Downloads\\two-stream-action-recognition-master\\UCF_list\\', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=4, path='/home/niloofar/Work/Data/Motion/UCF-101/', ucf_list=os.getcwd()+'/UCF_list/', ucf_split='01', in_channel=10, ) data_loader_my = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size_my, num_workers=4, path='/home/niloofar/Work/Data/Motion/UCF-101/', ucf_list=os.getcwd()+'/My_list/', ucf_split='01', in_channel=10, ) train_loader,test_loader, test_video = data_loader.run() train_loader_my, test_loader_my, test_video_my = data_loader_my.run() #Model model = Motion_CNN( nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, batch_size_my=arg.batch_size_my, resume=arg.resume, start_epoch=arg.start_epoch, evaluate=arg.evaluate, demo=arg.demo, channel = 10*2, train_loader=train_loader, train_loader_my=train_loader_my, test_loader=test_loader, test_loader_my=test_loader_my, test_video=test_video, test_video_my=test_video_my ) #Training model.run()
def main(): global arg args = parser.parse_args() print(args) #Prepare DataLoader train_loader_obj = dataloader.Motion_DataLoader( is_val=False, batch_size=args.batch_size, num_workers=1, root_dir=args.data_root, desc_numpy_file=args.train_desc, in_channel=SEQ_FRAME_QTY) val_loader_obj = dataloader.Motion_DataLoader( is_val=True, batch_size=args.batch_size, num_workers=1, root_dir=args.data_root, desc_numpy_file=args.test_desc, in_channel=SEQ_FRAME_QTY) train_loader = train_loader_obj.get_loader() val_loader = val_loader_obj.get_loader() model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=val_loader, # Utility start_epoch=args.start_epoch, resume=args.resume, evaluate=args.evaluate, # Hyper-parameter nb_epochs=args.epochs, lr=args.lr, patience=args.patience, batch_size=args.batch_size, nb_classes=args.class_qty, nb_classes_new=args.class_qty_new, channel=SEQ_FRAME_QTY * 2) #Training model.run()
def main(): global arg arg = parser.parse_args() print arg if arg.checkpoint_path: if not os.path.exists(arg.checkpoint_path): os.makedirs(arg.checkpoint_path) if arg.weight_per_class: weights = np.array(arg.weight_per_class) weights = torch.FloatTensor(weights) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path= '/home/candice/Documents/dataset_icehockey/events/flipped_data_correction/flow', train_ucf_list= '/home/candice/Documents/dataset_icehockey/events/experiments/new/train_list.txt', test_ucf_list= '/home/candice/Documents/dataset_icehockey/events/experiments/new/test_list.txt', ucf_split='01', in_channel=arg.num_imgs, frames_list=arg.frames_list) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=2 * arg.num_imgs, test_video=test_video, weights=weights) #Training model.run()
def main(): global arg arg = parser.parse_args() print(arg) #Prepare DataLoader data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=arg.batch_size, num_workers=8, path='/research/dept2/mli/Data/tvl1_flow/', ucf_list='./UCF_list/', ucf_split='01', in_channel=10, ) train_loader, test_loader, test_video = data_loader.run() #Model model = Motion_CNN( # Data Loader train_loader=train_loader, test_loader=test_loader, # Utility start_epoch=arg.start_epoch, resume=arg.resume, evaluate=arg.evaluate, # Hyper-parameter nb_epochs=arg.epochs, lr=arg.lr, batch_size=arg.batch_size, channel=10 * 2, test_video=test_video) #Training if arg.evaluate: cudnn.benchmark = True model.build_model() model.resume_and_evaluate() else: model.run()
from utils import * import torch.optim as optim import dataloader import torch import os from ResidualNetwork import * os.environ["CUDA_VISIBLE_DEVICES"] = "0" # Prepare DataLoader print '*************************Prepare DataLoader********************************' data_loader = dataloader.Motion_DataLoader( BATCH_SIZE=10, num_workers=8, path='/media/ming/DATADRIVE1/UCF101 Dataset/tvl1_flow/', UCF_list='/media/ming/DATADRIVE1/UCF101 Multi-stream Code/UCF_list/', in_channel=10 # NEW ADD ) train_loader, test_loader, test_video = data_loader.run() # build the model model = resnet50(pretrained=False, channel=10 * 2).cuda() criterion = nn.CrossEntropyLoss().cuda() optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9) def train_1epoch(epoch): print '---------train execute ', epoch, ' epoch'