def test(fn, limit=15): data = config.load_pickle(data_dir, fn) keys = list(data.keys()) key = keys[0] frames = list(data[key].keys()) frame = frames[0] vec = data[key][frame] test_start = time.time() D, I = index.search(np.array([vec]).astype('float32'), limit) test_end = time.time() test_time = test_end - test_start print("Lookup time: {:.1f}s".format(test_time)) return test_time
def load_data(description, network): games_played = config.load_pickle("games_played") scores = config.load_pickle("scores") if games_played is None or scores is None: print("Starting from scratch") games_played = 1 scores = [] return games_played, scores, network weights_file = config.get_full_path("weights.hdf5") if config.exists(weights_file): network.model.load_weights(weights_file) print("Weights loaded") memory_file = config.get_full_path("memory_states") if config.exists(memory_file): network.memory = config.load_pickle("memory_states") print("Memory loaded") memory_file = config.get_full_path("outcomes") if config.exists(memory_file): network.memory = config.load_pickle("outcomes") print("Outcomes loaded") return games_played, scores, network
def train(which_dir, fn): data = config.load_pickle(which_dir, fn) field = recipe.features.field feats = np.array([ data[v]['metadata'][field][frame] for v in data.keys() for frame in data[v]['metadata'][field].keys() ]).astype('float32') n, d = feats.shape train_start = time.time() index.train(feats) train_end = time.time() train_time = train_end - train_start print("Train time: {:.1f}s".format(train_time)) return train_time
parser.add_argument('--limit', type=int, default=10) parser.add_argument('--test_count', type=int, default=10) opt = parser.parse_args() recipe = config.load(opt.config) dataset = recipe.dataset base_dir = path.join(path.dirname(path.abspath(__file__)), "..") data_dir = path.join(base_dir, "datasets", dataset, "data") pkl_fn = "index.pkl" index_dir = path.join(base_dir, "datasets", dataset, "index") html_dir = path.join(base_dir, "datasets", dataset, "web") html_fn = path.join(base_dir, "datasets", dataset, "web", "index.html") data = config.load_pickle(data_dir, pkl_fn) os.makedirs(html_dir, exist_ok=True) with open(html_fn, "w") as f: f.write(""" <style> section { margin-bottom: 40px; padding-bottom: 20px; border-bottom: 2px solid #ddd; } .result div { display: inline-block; } .result img { width: 200px; max-height: 90px; } .result span { display: block; } </style> """) # TODO: note if our test data is actually verified verified = 1
def add(file_index, fn, verified='0'): data = config.load_pickle(data_dir, path.join("data", str(fn))) return add_photos(file_index, data, verified) + add_videos( file_index, data, verified)