def main(): args = parser.parse_args() is_file = '.' in args.out.split('/')[-1] assert is_file, 'Output must be file' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) in_files = list(glob.glob(args.in_dir + '/*.mat')) assert len(in_files) > 0, 'No mat files found' process(in_files, args.out, args.in_key, args.out_group)
def main(): from attend.pre.util import rm_if_needed args, rest_args = parser.parse_known_args() folds_files = sorted(glob.glob(args.folds_dir + '/*tfrecords')) fold_indices = create_folds(len(folds_files)) if args.base_dir: log_dir = args.base_dir else: log_dir = '/vol/bitbucket/rv1017/log-' + args.prefix rm_if_needed(log_dir, True) job_dir = log_dir + '/jobs' os.makedirs(log_dir, exist_ok=True) os.makedirs(job_dir, exist_ok=True) from attend.util import dict_to_args from attend.condor import generate_job fold_info = {} rest_args += ['--val_batch_size=8', '--keep_all_checkpoints=1'] for train_idxs, val_idx, test_idx in fold_indices: train_idxs = list(train_idxs) fold_idx = train_idxs[0] + 1 train_files = [folds_files[i] for i in train_idxs] val_file = folds_files[val_idx] test_file = folds_files[test_idx] pargs = dict(data_file=train_files, val_data=val_file) pargs_str = dict_to_args(pargs) job_prefix = '{}.{}'.format(args.prefix, fold_idx) job_desc = generate_job(job_prefix, rest_args=pargs_str.split(' ') + rest_args, base_log_path=log_dir) with open('{}/{}.classad'.format(job_dir, fold_idx), 'w') as f: f.write(job_desc) # fold_info[fold_idx] = (dict(train_files=train_files, val_file=val_file, # test_file=test_file, prefix=job_prefix)) fold_info[fold_idx] = {} fold_info[fold_idx].update(dict(prefix=job_prefix, train_idx=train_idxs, val_idx=val_idx, test_idx=test_idx)) with open(log_dir + '/folds.cson', 'w') as f: cson.dump(fold_info, f, sort_keys=True, indent=4)
def main(): args = parser.parse_args() if args.out is None: args.out = args.in_file.replace('.hdf5', '.tfrecords') is_file = '.' in args.out.split('/')[-1] assert is_file, 'Output must be file' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) in_ext = args.in_file.split('.')[-1] assert in_ext in ['hdf5'], 'Unsupported format {}'.format(in_ext) process(args.in_file, args.out)
def main(): args = parser.parse_args() if not os.path.isfile(args.in_file): raise Exception('Input file `{}` does not exist'.format(args.in_file)) if args.out is None: basename = '.'.join(args.in_file.split('.')[:-1]) args.out = basename + '-combined.hdf5' is_file = '.' in args.out.split('/')[-1] assert is_file, 'Directory output not supported yet, try file ext' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) in_ext = args.in_file.split('.')[-1] assert in_ext in ['hdf5'], 'Unsupported format {}'.format(out_ext) process(args.in_file, args.out)
def main(): args = parser.parse_args() if args.out is None: basename = os.path.basename(args.in_file) dirname = os.path.dirname(args.in_file) args.out = dirname + '/' + '.'.join( basename.split('.')[:-1]) + '-split' + '.hdf5' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) def _split_key_value_pair(s): key, value = s.split('=') return key, int(value) splits = list(map(_split_key_value_pair, args.splits.split(' '))) print(splits) process(args.in_file, args.out, splits)
def main(): args = parser.parse_args() if not os.path.isdir(args.in_dir): raise Exception('Input directory `{}` does not exist'.format(args.in_dir)) is_file = '.' in args.out.split('/')[-1] assert is_file, 'Directory output not supported yet, try file ext' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) out_ext = args.out.split('.')[-1] assert out_ext in ['hdf5'], 'Unsupported format {}'.format(out_ext) vid_dirs = list(util.find_deepest_dirs(args.in_dir)) assert len(vid_dirs) != 0, 'Could not find any vids' print('Found {} directories'.format(len(vid_dirs))) process_vids(vid_dirs, args.out)
def main(): args = parser.parse_args() if not os.path.isdir(args.in_dir): raise Exception('Input directory `{}` does not exist'.format( args.in_dir)) is_file = '.' in args.out.split('/')[-1] assert is_file, 'Directory output not supported yet, try file ext' util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) out_ext = args.out.split('.')[-1] assert out_ext in ['hdf5'], 'Unsupported format {}'.format(out_ext) # Mean pixel if args.subtract_mean_pixel: mean_pixel = np.load(args.mean_pixel) else: print('Not using the mean pixel value') mean_pixel = None vid_dirs = list(util.find_deepest_dirs(args.in_dir)) assert len(vid_dirs) != 0, 'Could not find any vids' print('Found {} directories'.format(len(vid_dirs))) # Create a generator that generates batches of features per video vids = process_vids(vid_dirs, model_key=args.feature, debug=args.debug, batch_size=args.batch_size, mean_pixel=mean_pixel) # Write out the batches of features writers[out_ext](vids, args.out)
def main(): args = parser.parse_args() if not os.path.isdir(args.in_dir): raise Exception('Input directory `{}` does not exist'.format( args.in_dir)) is_file = '.' in args.out.split('/')[-1] assert is_file util.rm_if_needed(args.out) util.makedirs_if_needed(args.out) out_ext = args.out.split('.')[-1] assert out_ext in ['npy'], 'Unsupported format {}'.format(out_ext) vid_dirs = list(util.find_deepest_dirs(args.in_dir)) assert len(vid_dirs) != 0, 'Could not find any vids' print('Found {} directories'.format(len(vid_dirs))) # Create a generator that generates batches of features per video pixel = process_vids(vid_dirs) # Write out the batches of features writers[out_ext](pixel, args.out)