def prepare_network(self, run, epoch): if self.model is None: if self.network == 'dir': model = DirectArch(miniXception_loader, self.net_input_shape, objective="malignancy", output_size=self.net_out_size, normalize=self.net_normalize, pooling=self.net_pool) elif self.network == 'siam': model = SiamArch(miniXception_loader, self.net_input_shape, distance='l2', output_size=self.net_out_size, normalize=self.net_normalize, pooling=self.net_pool) elif self.network == 'dirR': model = DirectArch(miniXception_loader, self.net_input_shape, objective="rating", output_size=self.net_out_size, normalize=self.net_normalize, pooling=self.net_pool) elif self.network == 'siamR': model = SiamArch(miniXception_loader, self.net_input_shape, distance='l2', output_size=self.net_out_size, normalize=self.net_normalize, pooling=self.net_pool, objective="rating") elif self.network == 'trip': model = TripArch(miniXception_loader, self.net_input_shape, distance='l2', output_size=self.net_out_size, normalize=self.net_normalize, pooling=self.net_pool, categorize=self.categorize) else: assert (False) else: model = self.model w = self.Weights(run=run, epoch=epoch) assert (w is not None) if self.network == 'dir': embed_model = model.extract_core(weights=w, repool=False) else: embed_model = model.extract_core(weights=w) return embed_model
H_lim, S_lim, Tau_lim, Conc_lim, Crst_lim = (1e6, 0), (1e6, 0), (1e6, 0), (1e6, 0), (1e6, 0) for p, pooling in enumerate(pooling_options): print('Evaluating {} pooling'.format(pooling)) h, s, tau, conc, crst = [], [], [], [], [] plot_data_filename = './Plots//Data/init_{}.p'.format(pooling) try: h, s, tau, conc, crst = pickle.load(open(plot_data_filename, 'br')) print('loaded cached data for ' + pooling) except: start = timer() for i in range(repeatitions): print('\tRep # {}'.format(i)) model = DirectArch(miniXception_loader, input_shape, objective="malignancy", pooling=pooling, output_size=out_size, normalize=True) core = model.extract_core() embed = core.predict(np.expand_dims(images, axis=-1), batch_size=32) nbrs = NearestNeighbors(n_neighbors=N, algorithm='auto', metric=metric).fit(embed) distances, indices = nbrs.kneighbors(embed) distances, indices = distances[:, 1:], indices[:, 1:] h += [index.calc_hubness(indices)[0]] s += [index.calc_symmetry(indices)[0]] tau += [index.kumar(distances, res=0.01)[0]] conc += [index.concentration(distances)] crst += [index.relative_contrast_imp(distances)] pickle.dump((h, s, tau, conc, crst), open(plot_data_filename, 'bw')) print('evaluated (and cached) {} in {:.1f} minutes '.format(pooling, (timer() - start) / 60))