def query_arch(self, arch=None, train=True, encode_paths=True, deterministic=True, epochs=50): if self.search_space == 'nasbench': if arch is None: arch = Cell.random_cell(self.nasbench) if encode_paths: encoding = Cell(**arch).encode_paths() else: encoding = Cell(**arch).encode_cell() if train: val_loss = Cell(**arch).get_val_loss(self.nasbench, deterministic) test_loss = Cell(**arch).get_test_loss(self.nasbench) return (arch, encoding, val_loss, test_loss) else: return (arch, encoding) else: if arch is None: arch = Arch.random_arch() if encode_paths: encoding = Arch(arch).encode_paths() else: encoding = arch if train: val_loss, test_loss = Arch(arch).query(epochs=epochs) return (arch, encoding, val_loss, test_loss) else: return (arch, encoding)
def generate_distance_matrix(cls, arches_1, arches_2, distance): matrix = np.zeros([len(arches_1), len(arches_2)]) for i, arch_1 in enumerate(arches_1): for j, arch_2 in enumerate(arches_2): matrix[i][j] = Cell(**arch_1).distance(Cell(**arch_2), dist_type=distance) return matrix
def get_hash(self, arch): # return the path indices of the architecture, used as a hash if self.search_space == 'nasbench': return Cell(**arch).get_path_indices() elif self.search_space == 'darts': return Arch(arch).get_path_indices()[0] else: return Cell(**arch).get_string()
def get_hash(self, arch): # return a unique hash of the architecture+fidelity # we use path indices + epochs if self.search_space == 'nasbench': return Cell(**arch).get_path_indices() elif self.search_space == 'darts': return Arch(arch).get_path_indices()[0] else: return Cell(**arch).get_string()
def get_arch_list(self, aux_file_path, distance=None, iteridx=0, num_top_arches=5, max_edits=20, num_repeats=5, random_encoding='adj', verbose=1): # Method used for gp_bayesopt if self.search_space == 'darts': print('get_arch_list only supported for nasbench and nasbench_201') sys.exit() # load the list of architectures chosen by bayesopt so far base_arch_list = pickle.load(open(aux_file_path, 'rb')) top_arches = [ archtuple[0] for archtuple in base_arch_list[:num_top_arches] ] if verbose: top_5_loss = [ archtuple[1][0] for archtuple in base_arch_list[:min(5, len(base_arch_list))] ] print('top 5 val losses {}'.format(top_5_loss)) # perturb the best k architectures dic = {} for archtuple in base_arch_list: path_indices = Cell(**archtuple[0]).get_path_indices() dic[path_indices] = 1 new_arch_list = [] for arch in top_arches: for edits in range(1, max_edits): for _ in range(num_repeats): #perturbation = Cell(**arch).perturb(self.nasbench, edits) perturbation = Cell(**arch).mutate(self.nasbench, edits) path_indices = Cell(**perturbation).get_path_indices() if path_indices not in dic: dic[path_indices] = 1 new_arch_list.append(perturbation) # make sure new_arch_list is not empty while len(new_arch_list) == 0: for _ in range(100): arch = Cell.random_cell(self.nasbench, random_encoding=random_encoding) path_indices = Cell(**arch).get_path_indices() if path_indices not in dic: dic[path_indices] = 1 new_arch_list.append(arch) return new_arch_list
def get_nbhd(self, arch, mutate_encoding='adj'): if self.search_space == 'nasbench': return Cell(**arch).get_neighborhood( self.nasbench, mutate_encoding=mutate_encoding, index_hash=self.index_hash) elif self.search_space == 'nasbench_201': return Cell(**arch).get_neighborhood( self.nasbench, mutate_encoding=mutate_encoding) else: return Arch(arch).get_neighborhood()
def generate_distance_matrix(cls, arches_1, arches_2, distance): matrix = np.zeros([len(arches_1), len(arches_2)]) for i, arch_1 in enumerate(arches_1): for j, arch_2 in enumerate(arches_2): if distance == 'edit_distance': matrix[i][j] = Cell(**arch_1).edit_distance(Cell(**arch_2)) elif distance == 'path_distance': matrix[i][j] = Cell(**arch_1).path_distance(Cell(**arch_2)) else: print('{} is an invalid distance'.format(distance)) sys.exit() return matrix
def mutate_arch(self, arch, mutation_rate=1.0): if self.search_space in ['nasbench', 'nasbench_201']: return Cell(**arch).mutate(self.nasbench, mutation_rate=mutation_rate) else: return Arch(arch).mutate(int(mutation_rate))
def convert_to_cells(self, arches, predictor_encoding='path', cutoff=0, train=True): cells = [] for arch in arches: spec = Cell.convert_to_cell(arch) cell = self.query_arch(spec, predictor_encoding=predictor_encoding, cutoff=cutoff, train=train) cells.append(cell) return cells
def mutate_arch(self, arch, mutation_rate=1.0, mutate_encoding='adj', cutoff=0): if self.search_space in ['nasbench', 'nasbench_201']: return Cell(**arch).mutate(self.nasbench, mutation_rate=mutation_rate, mutate_encoding=mutate_encoding, index_hash=self.index_hash, cutoff=cutoff) else: return Arch(arch).mutate(int(mutation_rate))
def generate_distance_matrix(cls, arches_1, arches_2, distance): # Method used for gp_bayesopt for nasbench matrix = np.zeros([len(arches_1), len(arches_2)]) for i, arch_1 in enumerate(arches_1): for j, arch_2 in enumerate(arches_2): if distance == 'edit_distance': matrix[i][j] = Cell(**arch_1).edit_distance(Cell(**arch_2)) elif distance == 'path_distance': matrix[i][j] = Cell(**arch_1).path_distance(Cell(**arch_2)) elif distance == 'trunc_path_distance': matrix[i][j] = Cell(**arch_1).path_distance(Cell(**arch_2)) elif distance == 'nasbot_distance': matrix[i][j] = Cell(**arch_1).nasbot_distance(Cell(**arch_2)) else: print('{} is an invalid distance'.format(distance)) sys.exit() return matrix
def query_arch(self, arch=None, train=True, predictor_encoding=None, cutoff=0, random_encoding='standard', deterministic=True, epochs=0, random_hash=False, max_edges=None, max_nodes=None): arch_dict = {} arch_dict['epochs'] = epochs if self.search_space in ['nasbench', 'nasbench_201']: if arch is None: arch = Cell.random_cell(self.nasbench, random_encoding=random_encoding, max_edges=max_edges, max_nodes=max_nodes, cutoff=cutoff, index_hash=self.index_hash) arch_dict['spec'] = arch if predictor_encoding: arch_dict['encoding'] = Cell(**arch).encode( predictor_encoding=predictor_encoding, cutoff=cutoff) # special keys for local search and outside_ss experiments if self.search_space == 'nasbench_201' and random_hash: arch_dict['random_hash'] = Cell(**arch).get_random_hash() if self.search_space == 'nasbench': arch_dict['adj'] = Cell(**arch).encode( predictor_encoding='adj') arch_dict['path'] = Cell(**arch).encode( predictor_encoding='path') if train: arch_dict['val_loss'] = Cell(**arch).get_val_loss( self.nasbench, deterministic=deterministic, dataset=self.dataset) arch_dict['test_loss'] = Cell(**arch).get_test_loss( self.nasbench, dataset=self.dataset) arch_dict['num_params'] = Cell(**arch).get_num_params( self.nasbench) arch_dict['val_per_param'] = (arch_dict['val_loss'] - 4.8) * ( arch_dict['num_params']**0.5) / 100 if self.search_space == 'nasbench': arch_dict[ 'dist_to_min'] = arch_dict['val_loss'] - 4.94457682 elif self.dataset == 'cifar10': arch_dict['dist_to_min'] = arch_dict['val_loss'] - 8.3933 elif self.dataset == 'cifar100': arch_dict['dist_to_min'] = arch_dict['val_loss'] - 26.5067 else: arch_dict['dist_to_min'] = arch_dict['val_loss'] - 53.2333 else: # if the search space is DARTS if arch is None: arch = Arch.random_arch() arch_dict['spec'] = arch if predictor_encoding == 'path': encoding = Arch(arch).encode_paths() elif predictor_encoding == 'trunc_path': encoding = Arch(arch).encode_freq_paths() else: encoding = arch arch_dict['encoding'] = encoding if train: if epochs == 0: epochs = 50 arch_dict['val_loss'], arch_dict['test_loss'] = Arch( arch).query(epochs=epochs) return arch_dict
def query_arch(self, arch=None, train=True, encoding_type='path', cutoff=-1, deterministic=True, epochs=0): arch_dict = {} arch_dict['epochs'] = epochs if self.search_space in ['nasbench', 'nasbench_201']: if arch is None: arch = Cell.random_cell(self.nasbench) arch_dict['spec'] = arch if encoding_type == 'adj': encoding = Cell(**arch).encode_standard() elif encoding_type == 'path': encoding = Cell(**arch).encode_paths() elif encoding_type == 'trunc_path': encoding = Cell(**arch).encode_paths()[:cutoff] else: print('invalid encoding type') arch_dict['encoding'] = encoding if train: arch_dict['val_loss'] = Cell(**arch).get_val_loss( self.nasbench, deterministic=deterministic, dataset=self.dataset) arch_dict['test_loss'] = Cell(**arch).get_test_loss( self.nasbench, dataset=self.dataset) arch_dict['num_params'] = Cell(**arch).get_num_params( self.nasbench) arch_dict['val_per_param'] = (arch_dict['val_loss'] - 4.8) * ( arch_dict['num_params']**0.5) / 100 else: if arch is None: arch = Arch.random_arch() arch_dict['spec'] = arch if encoding_type == 'path': encoding = Arch(arch).encode_paths() elif encoding_type == 'trunc_path': encoding = Arch(arch).encode_paths()[:cutoff] else: encoding = arch arch_dict['encoding'] = encoding if train: if epochs == 0: epochs = 50 arch_dict['val_loss'], arch_dict['test_loss'] = Arch( arch).query(epochs=epochs) return arch_dict
def get_path_indices(self, arch): if self.search_space == 'nasbench': return Cell(**arch).get_path_indices() else: return Arch(arch).get_path_indices()[0]
def get_nbhd(self, arch): if self.search_space in ['nasbench', 'nasbench_201']: return Cell(**arch).get_neighborhood(self.nasbench) else: return Arch(arch).get_neighborhood()