def load_config(args): """ Given the arguemnts, load and initialize the configs. Args: args (argument): arguments includes `shard_id`, `num_shards`, `init_method`, `cfg_file`, and `opts`. """ # Setup cfg. cfg = get_cfg() # Load config from cfg. if args.cfg_file is not None: cfg.merge_from_file(args.cfg_file) # Load config from command line, overwrite config from opts. if args.opts is not None: cfg.merge_from_list(args.opts) # Inherit parameters from args. if hasattr(args, "num_shards") and hasattr(args, "shard_id"): cfg.NUM_SHARDS = args.num_shards cfg.SHARD_ID = args.shard_id if hasattr(args, "rng_seed"): cfg.RNG_SEED = args.rng_seed if hasattr(args, "output_dir"): cfg.OUTPUT_DIR = args.output_dir # Create the checkpoint dir. cu.make_checkpoint_dir(cfg.OUTPUT_DIR) return cfg
def test_dataset(): from config.defaults import get_cfg cfg = get_cfg() dataset = ExampleDataset(cfg, train=True) x, y = dataset[4] print(x.shape) print(y)
def main(args): cfg = get_cfg() if args.cfg_file: cfg.merge_from_file(args.cfg_file) if args.opts is not None: cfg.merge_from_list(args.opts) cfg.freeze() solver = Solver(cfg) if cfg.MODE in ["train", "training"]: solver.train(cfg.TRAIN.NUM_EPOCHS) elif cfg.MODE in ['validate', 'validation']: solver.evaluate(split=cfg.VAL.SPLIT) elif cfg.MODE in ['test', 'testing']: solver.inference(split=cfg.TEST.SPLIT, batch_size=cfg.TEST.BATCH_SIZE)
def load_config(args): """ Given the arguemnts, load and initialize the configs. Args: args (argument): arguments includes `shard_id`, `num_shards`, `init_method`, `cfg_file`, and `opts`. """ # Setup cfg. cfg = get_cfg() # Load config from cfg. if args.cfg_file is not None: cfg.merge_from_file(args.cfg_file) # Load config from command line, overwrite config from opts. if args.opts is not None: cfg.merge_from_list(args.opts) if args.test: cfg.TRAIN.ENABLE = False cfg.TEST.ENABLE = True # Inherit parameters from args. if hasattr(args, "num_shards") and hasattr(args, "shard_id"): cfg.NUM_SHARDS = args.num_shards cfg.SHARD_ID = args.shard_id if hasattr(args, "rng_seed"): cfg.RNG_SEED = args.rng_seed if hasattr(args, "output_dir"): cfg.OUTPUT_DIR = args.output_dir cfg_file_name = args.cfg_file.split('/')[-1].split('.yaml')[0] cfg.LOG_NAME = cfg_file_name + '.log' cfg.OUTPUT_DIR = os.path.join(cfg.OUTPUT_DIR, cfg_file_name) cfg.TEST.OUTPUT_DIR = os.path.join(cfg.TEST.OUTPUT_DIR, cfg_file_name) # Create the checkpoint dir. cu.make_checkpoint_dir(cfg.OUTPUT_DIR) return cfg
fuser_input = env_agent_cat_features[:, smpl_bgn:smpl_end].contiguous( ) fuser_input = fuser_input.view(-1, 2, ft_sz).permute(1, 0, 2) fuser_output = self.agents_environment_fuser(fuser_input) fuser_output = torch.mean(fuser_output, dim=0) context_features[:, smpl_bgn:smpl_end] = fuser_output.view( bsz, -1, ft_sz) return self.event_detector(context_features.permute(0, 2, 1)) if __name__ == '__main__': cfg = get_cfg() model = EventDetection(cfg) batch_size = 1 temporal_dim = 100 box_dim = 4 feature_dim = 2304 env_input = torch.randn(batch_size, temporal_dim, feature_dim) agent_input = torch.randn(batch_size, temporal_dim, box_dim, feature_dim) agent_padding_mask = torch.tensor( np.random.randint(0, 1, (batch_size, temporal_dim, box_dim))).bool() a, b, c = model(env_input, agent_input, agent_padding_mask) print(a.shape, b.shape, c.shape)