# In[ ]: db_helen = pickle.load(open(data_dir+conf.dataset+".pickle", "rb" ) ) # print the data structure print(db_helen.keys()) print(db_helen['trainset'].keys()) # print the shape of tratining set print(db_helen['trainset']['pts'].shape) print(db_helen['trainset']['img'].shape) # print the shape of testing set print(db_helen['testset']['pts'].shape) print(db_helen['testset']['img'].shape) # declear data iterator train_batches = utils.get_batch(db_helen['trainset']['img'], db_helen['trainset']['pts'], batch_size = conf.batch_size) valid_batches = utils.get_batch(db_helen['testset']['img'], db_helen['testset']['pts'], batch_size = conf.batch_size) # In[ ]: # load test_ gau map gaumap = pickle.load(open(data_dir+"gau_filter.pickle", "rb" ) ) # # Loss for Heatmap # In[ ]: