コード例 #1
0
    def __init__(self, graph_part, crosslayer_dis):

        '''
        :param nn: NetworkUnit
        '''

        self.graph_part = graph_part
        self.crosslayer_dis = crosslayer_dis
        # 设置结构大小
        self.node_number = len(self.graph_part)


        # 基于节点设置其可能的跨层连接
        self.crosslayer = connect(self.graph_part, self.crosslayer_dis)
        # self.crosslayer = self.get_crosslayer()

        # 读取配置表得到操作的对应映射
        self.setting, self.pros, self.parameters_subscript_node, = load_conf()
        #
        del self.setting['dense']

        self.dic_index = self._init_dict()

        self.p = []

        # 设置优化Dimension
        # 设置优化的参数
        self.__region, self.__type = self.opt_parameters()
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.__region))
        self.dim.set_regions(self.__region, self.__type)
        self.parameters_subscript = []  #
コード例 #2
0
 def __init__(self, graph_part, block_id):
     """
         Generate adjacency of network topology.
         Sampling network operation based on sampled value(table).
         The sampling value is updated by the optimization module
         based on the output given by the evaluation module.
         Attributes:
             graph_part: a Network Topology(Adjacency table).
             block_id: The stage of neural network search.
             Other important operation information and parameters of optimization module
             are given by folder 'parameters'.
     """
     self._p_table = []  # initializing the table value in Sampler.
     self._graph_part = graph_part
     self._node_number = len(self._graph_part)
     self._pattern = NAS_CONFIG['nas_main'][
         'pattern']  #  Parameter setting based on search method
     self._crosslayer_dis = NAS_CONFIG['spl'][
         'skip_max_dist'] + 1  # dis control
     self._cross_node_number = NAS_CONFIG['spl']['skip_max_num']
     self._graph_part_invisible_node = self.graph_part_add_invisible_node()
     self._crosslayer = self._get_crosslayer()
     # Read parameter table to get operation dictionary in stage(block_id)
     self._setting = dict()
     self._setting['conv'] = copy.deepcopy(NAS_CONFIG['spl']['conv_space'])
     self._setting['pooling'] = copy.deepcopy(
         NAS_CONFIG['spl']['pool_space'])
     if self._pattern == "Block":
         self._setting['conv']['filter_size'] = \
             self._setting['conv']['filter_size'][block_id]
     self._dic_index = self._init_dict()  # check
     # Set parameters of optimization module based on the above results
     self.__region, self.__type = self._opt_parameters()
     self.__dim = Dimension()
     self.__dim.set_dimension_size(len(self.__region))  # 10%
     self.__dim.set_regions(self.__region, self.__type)
     self.__parameters_subscript = []  #
     self.opt = Optimizer(self.__dim, self.__parameters_subscript)
     opt_para = copy.deepcopy(NAS_CONFIG["opt"])
     __sample_size = opt_para[
         "sample_size"]  # the instance number of sampling in an iteration
     __budget = opt_para["budget"]  # budget in online style
     __positive_num = opt_para["positive_num"]  # the set size of PosPop
     __rand_probability = opt_para[
         "rand_probability"]  # the probability of sample in model
     __uncertain_bit = opt_para[
         "uncertain_bit"]  # the dimension size that is sampled randomly
     self.opt.set_parameters(ss=__sample_size,
                             bud=__budget,
                             pn=__positive_num,
                             rp=__rand_probability,
                             ub=__uncertain_bit)
     self.opt.clear()  # clear optimization model
コード例 #3
0
    def __init__(self, nn):
        # 设置结构大小
        self.node_number = len(nn.graph_part)
        # 读取配置表得到操作的概率映射
        self.setting, self.pros, self.parameters_subscript, = load_conf()
        self.dic_index = self._init_dict()
        #
        # print(self.dic_index)
        # print(len(self.pros))
        self.p = []
        # 设置优化Dimension
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.pros))
        # 设置优化的参数

        self.dim.set_regions(self.pros, [0 for _ in range(len(self.pros))])
コード例 #4
0
    def __init__(self, nn):

        '''
        :param nn: NetworkUnit
        '''

        # 设置结构大小
        self.node_number = len(nn.graph_part)
        # 读取配置表得到操作的对应映射
        self.setting, self.pros, self.parameters_subscript_node, = load_conf()
        self.dic_index = self._init_dict()

        # print(len(self.pros))
        self.p = []

        # 设置优化Dimension
        # 设置优化的参数
        self.__region, self.__type = self.opt_parameters()
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.__region))
        self.dim.set_regions(self.__region, self.__type)
        self.parameters_subscript = []  #
コード例 #5
0
class Sampler_struct:

    def __init__(self, nn):

        '''
        :param nn: NetworkUnit
        '''

        # 设置结构大小
        self.node_number = len(nn.graph_part)
        # 读取配置表得到操作的对应映射
        self.setting, self.pros, self.parameters_subscript_node, = load_conf()
        self.dic_index = self._init_dict()

        # print(len(self.pros))
        self.p = []

        # 设置优化Dimension
        # 设置优化的参数
        self.__region, self.__type = self.opt_parameters()
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.__region))
        self.dim.set_regions(self.__region, self.__type)
        self.parameters_subscript = []  #

    # 更新p
    def renewp(self, newp):
        self.p = newp

    # 基于操作的对应映射得到优化参数
    def opt_parameters(self):
        __type_tmp = []
        __region_tmp = []
        for key in self.dic_index:
            tmp = int(self.dic_index[key][1] - self.dic_index[key][0])
            __region_tmp.append([0, tmp])
            __type_tmp.append(2)

        __region = []
        __type = []
        for i in range(self.node_number):
            __region = __region + __region_tmp
            __type.extend(__type_tmp)
        return __region, __type

    def sample(self):
        res = []
        # 基于节点的搜索结构参数
        for num in range(self.node_number):
            # 取一个节点大小的概率
            p_node = self.p[num*len(self.dic_index):(num+1)*len(self.dic_index)]
            first = p_node[0]
            tmp = ()
            # 首位置确定 conv 还是 pooling
            if first == 0:
                # 搜索conv下的操作
                # 基于操作的对应映射取配置所在的地址,进行取值
                tmp = tmp + ('conv',)
                struct_conv = ['conv filter_size', 'conv kernel_size', 'conv activation']
                for key in struct_conv:
                    tmp = tmp + (self.setting['conv'][key.split(' ')[-1]]['val'][p_node[self.dic_index[key][-1]]],)
            else:
                # 搜索pooling下的操作
                # 基于操作的对应映射取配置所在的地址,进行取值
                tmp = tmp + ('pooling',)
                struct_pooling = ['pooling pooling_type', 'pooling kernel_size']
                for key in struct_pooling:
                    tmp = tmp + (self.setting['pooling'][key.split(' ')[-1]]['val'][p_node[self.dic_index[key][-1]]],)
            res.append(tmp)
        return res

    # 基于opt.sample()所得结果,基于位置得到操作
    def _init_dict(self):
        dic = {}
        dic['conv'] = (0,1,0)
        cnt = 1
        num = 1
        for key in self.setting:
            for k in self.setting[key]:
                tmp = len(self.setting[key][k]['val']) - 1
                dic[key + ' ' + k ] = (cnt, cnt + tmp, num)
                num += 1
                cnt += tmp

        return dic

    def get_dim(self):
        return self.dim

    def get_parametets_subscript(self):
        return self.parameters_subscript

    # log
    def get_cell_log(self, POOL, PATH, date):
        for i, j in enumerate(POOL):
            s = 'nn_param_' + str(i) + '_' + str(date)
            fp = open(PATH + s, "wb")
            # print(s)
            pickle.dump(j.cell_list, fp)
コード例 #6
0
class Sampler:

    def __init__(self, graph_part, block_id):
        """
            Generate adjacency of network topology.
            Sampling network operation based on sampled value(table).
            The sampling value is updated by the optimization module
            based on the output given by the evaluation module.
            Attributes:
                graph_part: a Network Topology(Adjacency table).
                block_id: The stage of neural network search.
                Other important operation information and parameters of optimization module
                are given by folder 'parameters'.
        """
        self._p_table = []  # initializing the table value in Sampler.
        self._graph_part = graph_part
        self._node_number = len(self._graph_part)
        self._pattern = NAS_CONFIG['nas_main']['pattern']  #  Parameter setting based on search method
        self._crosslayer_dis = NAS_CONFIG['spl']['skip_max_dist'] + 1  # dis control
        self._cross_node_number = NAS_CONFIG['spl']['skip_max_num']
        self._graph_part_invisible_node = self._graph_part_add_invisible_node()
        self._tmp_flag = 0
        self._graph_part_invisible_node_flag = [0 for i in range(len(self._graph_part_invisible_node))]
        self._find_main_chain(self._graph_part_invisible_node)
        # print(self._graph_part_invisible_node_flag)
        self._crosslayer = self._get_crosslayer()
        # Read parameter table to get operation dictionary in stage(block_id)
        self._setting = self._init_setting(block_id)
        self._dic_index = self._init_dict()  # check
        # Set parameters of optimization module based on the above results
        self.__region, self.__type = self._opt_parameters()
        self.__dim = Dimension()
        self.__dim.set_dimension_size(len(self.__region))    # 10%
        self.__dim.set_regions(self.__region, self.__type)
        self.__parameters_subscript = []
        self.opt = Optimizer(self.__dim, self.__parameters_subscript)
        opt_para = copy.deepcopy(NAS_CONFIG["opt"])
        __sample_size = opt_para["sample_size"]  # the instance number of sampling in an iteration
        __budget = opt_para["budget"]  # budget in online style
        __positive_num = opt_para["positive_num"]  # the set size of PosPop
        __rand_probability = opt_para["rand_probability"]  # the probability of sample in model
        __uncertain_bit = opt_para["uncertain_bit"]  # the dimension size that is sampled randomly
        self.opt.set_parameters(ss=__sample_size, bud=__budget,
                                pn=__positive_num, rp=__rand_probability, ub=__uncertain_bit)
        self.opt.clear()  # clear optimization model

    def sample(self):
        """
        Get table based on the optimization module sampling,
        update table in Sampler,
        and sample the operation configuration.
        No Args.
        Retruns:
            1. cell (1d Cell list)
            2. graph_full (2d int list, as NetworkItem.graph_full)
            3. table (1d int list, depending on dimension)
        """
        table = self.opt.sample()
        cell, graph = self.convert(table)
        return cell, graph, table

    def update_opt_model(self, table, score):
        """
        Optimization of sampling space based on Evaluation and optimization method.
        Args:
            1. table (1d int list, depending on dimension)
            2. score(float, 0 ~ 1.0)
        No returns.
        """
        self.opt.update_model(table, score)  # here "-" represent that we minimize the loss

    def _init_setting(self, block_id):
        _setting_tmp = collections.OrderedDict()
        _setting_tmp = copy.deepcopy(NAS_CONFIG['spl']['space'])

        if NAS_CONFIG['spl']['pool_switch'] == 0 and 'pooling' in _setting_tmp:
            del _setting_tmp['pooling']

        for key in _setting_tmp:
            for op in _setting_tmp[key]:
                if type(_setting_tmp[key][op][0]) is list:
                    _setting_tmp[key][op] = _setting_tmp[key][op][block_id]

        return _setting_tmp

    def _graph_part_add_invisible_node(self):
        graph_part_tmp = []
        for i in self._graph_part:
            if not i:
                graph_part_tmp.append([self._node_number])
            else:
                graph_part_tmp.append(i)
        graph_part_tmp.append([])
        return graph_part_tmp

    def _find_main_chain(self, graph):
        q = Queue()
        q.put([0, 0])
        ma = 0
        while q.empty() is False:
            f = q.get()
            ma = max(f[1], ma)
            for i in self._graph_part_invisible_node[f[0]]:
                q.put([i, f[1]+1])

        self._graph_part_invisible_node_flag[0] = 1
        self._dfs(0, 0, ma)

        return

    def _dfs(self, node_id, cnt, ma):
        if cnt == ma:
            self._tmp_flag = 1
        if self._tmp_flag == 1:
            return
        for i in self._graph_part_invisible_node[node_id]:
            if self._graph_part_invisible_node_flag[i] == 0 and self._tmp_flag == 0:
                self._graph_part_invisible_node_flag[i] = 1
                self._dfs(i, cnt+1, ma)
                if self._tmp_flag == 0:
                    self._graph_part_invisible_node_flag[i] = 0

    def _get_crosslayer(self):
        """
        utilizing breadth-first search to set the possible cross layer
        connection for each node of the network topology.
        """
        cl = []
        for i in range(self._node_number):
            if self._graph_part_invisible_node_flag[i] == 1:
                cl.append(self._bfs(i))
            else:
                cl.append([])
        return cl

    def _bfs(self, node_id):
        res_list = []
        q = Queue()
        q.put([node_id, 0])
        v = []
        for i in range(self._node_number + 1):
            v.append(0)
        v[node_id] = 1

        while q.empty() is False:
            f = q.get()
            if f[1] >= 2:
                if f[1] <= self._crosslayer_dis:
                    res_list.append(f[0])
                else:
                    continue
            # for j in self._graph_part[f[0]]:
            for j in self._graph_part_invisible_node[f[0]]:
                if self._graph_part_invisible_node_flag[j] == 1:
                    if v[j] == 0:
                        q.put([j, f[1] + 1])
                        v[j] = 1

        return res_list
    #
    def _region_cross_type(self, __region_tmp, __type_tmp, i):
        region_tmp = copy.copy(__region_tmp)
        type_tmp = copy.copy(__type_tmp)

        for j in range(self._cross_node_number):
            if self._graph_part_invisible_node_flag[i] == 1:
                region_tmp.append([0, len(self._crosslayer[i])])
                type_tmp.append(2)

        return region_tmp, type_tmp

    def _opt_parameters(self):
        """Get the parameters of optimization module based on parameter document."""
        __type_tmp = []
        __region_tmp = []
        for i in range(len(self._dic_index)):
            __region_tmp.append([0, 0])
        # __region_tmp = [[0, 0] for _ in range(len(self._dic_index))]
        for key in self._dic_index:
            tmp = int(self._dic_index[key][1] - self._dic_index[key][0])
            __region_tmp[self._dic_index[key][-1]] = [0, tmp]
            __type_tmp.append(2)

        __region = []
        __type = []
        for i in range(self._node_number):
            __region_cross, __type_cross = \
                self._region_cross_type(__region_tmp, __type_tmp, i)

            __region = __region + __region_cross
            __type.extend(__type_cross)

        return __region, __type

    def convert(self, table_tmp):
        """Search corresponding operation configuration based on table."""
        self._p_table = copy.deepcopy(table_tmp)
        res = []
        l = 0
        r = 0
        graph_part_sample = copy.deepcopy(self._graph_part)
        for num in range(self._node_number):
            l = r
            r = l + len(self._dic_index)
            if self._graph_part_invisible_node_flag[num] == 1:
                r = r + self._cross_node_number

            p_node = self._p_table[l:r]  # Take the search space of a node
            # print(p_node)
            node_cross_tmp = list(set(copy.deepcopy(p_node[len(self._dic_index):])))
            for i in node_cross_tmp:
                if i != 0:
                    graph_part_sample[num].append(self._crosslayer[num][i - 1])
            if not graph_part_sample[num]:
                graph_part_sample[num].append(self._node_number)

            for cnt, key_type in enumerate(self._setting):
                if p_node[self._dic_index['type'][-1]] == cnt:
                    tmp = (key_type,)

                    item_list = Cell.get_format(key_type)

                    for key_item in item_list:
                        tmp = tmp + (self._setting[key_type][key_item]
                                     [p_node[self._dic_index[key_type + ' ' + key_item][-1]]],)
            # print(tmp)
            tmp = Cell(tmp)
            res.append(tmp)

        return res, graph_part_sample

    def _init_dict(self):
        """Operation space dictionary based on parameter file."""

        dic = {}
        dic['type'] = (0, len(self._setting)-1, 0)
        cnt = 1
        num = 1
        for key in self._setting:

            for k in self._setting[key]:
                tmp = len(self._setting[key][k]) - 1
                dic[key + ' ' + k] = (cnt, cnt + tmp, num)
                num += 1
                cnt += tmp

        return dic

    # log
    def _get_cell_log(self, POOL, PATH, date):
        for i, j in enumerate(POOL):
            s = 'nn_param_' + str(i) + '_' + str(date)
            fp = open(PATH + s, "wb")
            # print(s)
            pickle.dump(j.cell_list, fp)

    def ops2table(self, ops, table_tmp):
        """
        set the table under the output in predictor
        the output in predictor looks like:
        [['64', '7'], ['pooling'], ['64', '3'], ['256', '3'], ['1024', '1'],
        ['1024', '1'], ['1024', '3'], ['1024', '3'], ['1024', '3'], ['512', '1'],
        ['128', '5'], ['64', '3'], ['1024', '1'], ['1024', '1'], ['256', '3']]
        """
        self._p_table = copy.deepcopy(table_tmp)
        table = []
        l = 0
        r = 0
        conv_cnt = 0
        pooling_cnt = 1
        for i, j in enumerate(self._setting):
            if j == 'conv':
                conv_cnt = i
            if j == 'pooling':
                pooling_cnt = i
        for num in range(self._node_number):  # Take the search space of a node
            l = r
            r = l + len(self._dic_index)
            if self._graph_part_invisible_node_flag[num] == 1:
                r = r + self._cross_node_number
            p_node = self._p_table[l:r]  # Sample value of the current node

            if len(ops[num]) != 1:
                p_node = self._p_table[l:r]  # Sample value of the current node
                p_node[self._dic_index['type'][-1]] = conv_cnt
                for j, i in enumerate(self._setting['conv']['filter_size']):
                    if str(i) == ops[num][0]:
                        p_node[self._dic_index['conv filter_size'][-1]] = j
                for j, i in enumerate(self._setting['conv']['kernel_size']):
                    if str(i) == ops[num][1]:
                        p_node[self._dic_index['conv kernel_size'][-1]] = j
                for j, i in enumerate(self._setting['conv']['activation']):
                    if i == 'relu':
                        p_node[self._dic_index['conv activation'][-1]] = j
                table = table + p_node
            else:
                if self._pattern == "Global":
                    p_node[self._dic_index['type'][-1]] = pooling_cnt
                table = table + p_node
        return table
コード例 #7
0
class Sampler:

    def __init__(self, graph_part, crosslayer_dis):

        '''
        :param nn: NetworkUnit
        '''

        self.graph_part = graph_part
        self.crosslayer_dis = crosslayer_dis
        # 设置结构大小
        self.node_number = len(self.graph_part)


        # 基于节点设置其可能的跨层连接
        self.crosslayer = connect(self.graph_part, self.crosslayer_dis)
        # self.crosslayer = self.get_crosslayer()

        # 读取配置表得到操作的对应映射
        self.setting, self.pros, self.parameters_subscript_node, = load_conf()
        #
        del self.setting['dense']

        self.dic_index = self._init_dict()

        self.p = []

        # 设置优化Dimension
        # 设置优化的参数
        self.__region, self.__type = self.opt_parameters()
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.__region))
        self.dim.set_regions(self.__region, self.__type)
        self.parameters_subscript = []  #

    # 更新p
    def renewp(self, newp):
        self.p = newp

    def get_crosslayer(self):
        cl = []
        for i in range(len(self.graph_part)):
            cl.append(self.bfs(i))
        return cl

    def bfs(self, i):
        res_list = []
        q = Queue()
        q.put([i, 0])

        v = [0 for ii in range(len(self.graph_part) + 1)]

        while q.empty() is False:
            f = q.get()

            if f[1] >= 2:
                if f[1] <= self.crosslayer_dis:
                    res_list.append(f[0])
                else:
                    continue

            for j in self.graph_part[f[0]]:
                if v[j] == 0:
                    q.put([j, f[1] + 1])
                    v[j] = 1

        return res_list
    #
    def region_cross_type(self, __region_tmp, __type_tmp, i):
        region_tmp = copy.copy(__region_tmp)
        type_tmp = copy.copy(__type_tmp)
        for j in self.crosslayer[i]:
            region_tmp.append([0, 1])
            type_tmp.append(2)

        return region_tmp, type_tmp

    # 基于操作的对应映射得到优化参数
    def opt_parameters(self):
        __type_tmp = []
        __region_tmp = [[0, 0] for _ in range(len(self.dic_index))]
        for key in self.dic_index:
            tmp = int(self.dic_index[key][1] - self.dic_index[key][0])
            __region_tmp[self.dic_index[key][-1]] = [0, tmp]
            __type_tmp.append(2)

        __region = []
        __type = []
        for i in range(self.node_number):

            #
            __region_cross, __type_cross = self.region_cross_type(__region_tmp, __type_tmp, i)

            __region = __region + __region_cross
            __type.extend(__type_cross)
        return __region, __type

    def sample(self):
        res = []
        # 基于节点的搜索结构参数
        l = 0
        r = 0
        graph_part_sample = copy.deepcopy(self.graph_part)

        for num in range(self.node_number):
            # 取一个节点大小的概率
            l = r
            r = l + len(self.dic_index) + len(self.crosslayer[num])
            p_node = self.p[l:r]

            #
            # print(p_node)
            # print(p_node[len(self.dic_index):])
            for i in range(len(p_node[len(self.dic_index):])):
                if p_node[len(self.dic_index):][i] == 1:
                    graph_part_sample[num].append(self.crosslayer[num][i])
            #

            first = p_node[self.dic_index['conv'][-1]]
            # first = p_node[0]
            tmp = ()
            # 首位置确定 conv 还是 pooling
            if first == 0:
                # 搜索conv下的操作
                # 基于操作的对应映射取配置所在的地址,进行取值
                tmp = tmp + ('conv',)
                struct_conv = ['conv filter_size', 'conv kernel_size', 'conv activation']
                for key in struct_conv:
                    tmp = tmp + (self.setting['conv'][key.split(' ')[-1]]['val'][p_node[self.dic_index[key][-1]]],)
            else:
                # 搜索pooling下的操作
                # 基于操作的对应映射取配置所在的地址,进行取值
                tmp = tmp + ('pooling',)
                struct_pooling = ['pooling pooling_type', 'pooling kernel_size']
                for key in struct_pooling:
                    tmp = tmp + (self.setting['pooling'][key.split(' ')[-1]]['val'][p_node[self.dic_index[key][-1]]],)
            res.append(tmp)

        return res, graph_part_sample

    # 基于opt.sample()所得结果,基于位置得到操作
    def _init_dict(self):
        dic = {}
        dic['conv'] = (0, 1, 0)
        cnt = 1
        num = 1
        for key in self.setting:
            for k in self.setting[key]:
                tmp = len(self.setting[key][k]['val']) - 1
                dic[key + ' ' + k] = (cnt, cnt + tmp, num)
                num += 1
                cnt += tmp

        return dic

    def get_dim(self):
        return self.dim

    def get_parametets_subscript(self):
        return self.parameters_subscript

    # log
    def get_cell_log(self, POOL, PATH, date):
        for i, j in enumerate(POOL):
            s = 'nn_param_' + str(i) + '_' + str(date)
            fp = open(PATH + s, "wb")
            # print(s)
            pickle.dump(j.cell_list, fp)

    def init_p(self, op):

        '''
        :param op:
        [['64', '7'], ['pooling'], ['64', '3'], ['256', '3'], ['1024', '1'],
        ['1024', '1'], ['1024', '3'], ['1024', '3'], ['1024', '3'], ['512', '1'],
        ['128', '5'], ['64', '3'], ['1024', '1'], ['1024', '1'], ['256', '3']]
        :return:
        '''

        table = []
        l = 0
        r = 0
        for num in range(self.node_number):
            # 取一个节点大小的概率
            l = r
            r = l + len(self.dic_index) + len(self.crosslayer[num])
            p_node = self.p[l:r]
            # print('--'*20)
            # print(p_node)
            # print(op[num])
            if len(op[num]) != 1:
                # struct_conv = ['conv filter_size', 'conv kernel_size', 'conv activation']
                a = -1
                b = -1
                c = -1
                for j, i in enumerate(self.setting['conv']['filter_size']['val']):
                    if str(i) == op[num][0]:
                        a = j
                        # p_node[self.dic_index['conv filter_size'][-1]] = j
                        # print(j, '##', self.dic_index['conv filter_size'][-1])

                for j, i in enumerate(self.setting['conv']['kernel_size']['val']):
                    if str(i) == op[num][1]:
                        b = j
                        # p_node[self.dic_index['conv kernel_size'][-1]] = j
                        # print(j, '##', self.dic_index['conv kernel_size'][-1])

                for j, i in enumerate(self.setting['conv']['activation']['val']):
                    if i == 'relu':
                        c = j
                        # p_node[self.dic_index['conv activation'][-1]] = j
                        # print(j, '##', self.dic_index['conv activation'][-1])
                # print(a,b,c)
                p_node[self.dic_index['conv'][-1]] = 0
                if a != -1:
                    p_node[self.dic_index['conv filter_size'][-1]] = a
                if b != -1:
                    p_node[self.dic_index['conv kernel_size'][-1]] = b
                if c != -1:
                   p_node[self.dic_index['conv activation'][-1]] = c

                table = table + p_node
                # print(p_node)

                    # tmp = tmp + (self.setting['conv'][key.split(' ')[-1]]['val'][p_node[self.dic_index[key][-1]]],)
            else:
                p_node[self.dic_index['conv'][-1]] = 1
                table = table + p_node

        return table
コード例 #8
0
class Sampler_tree:
    def __init__(self, nn):
        # 设置结构大小
        self.node_number = len(nn.graph_part)
        # 读取配置表得到操作的概率映射
        self.setting, self.pros, self.parameters_subscript, = load_conf()
        self.dic_index = self._init_dict()
        #
        # print(self.dic_index)
        # print(len(self.pros))
        self.p = []
        # 设置优化Dimension
        self.dim = Dimension()
        self.dim.set_dimension_size(len(self.pros))
        # 设置优化的参数

        self.dim.set_regions(self.pros, [0 for _ in range(len(self.pros))])


    # 更新p
    def renewp(self, newp):
        self.p = newp

    def sample(self, node_number):
        res = []
        # 基于节点的搜索结构参数
        for num in range(node_number):
            # 取概率
            p_node = self.p
            first = self.range_sample(p_node, self.dic_index['conv'])
            tmp = ()
            # 首位置确定 conv 还是 pooling
            if first == 0:
                # 搜索conv下的操作
                # 取概率所在的区间,进行采样
                tmp = tmp + ('conv',)
                struct_conv = ['conv filter_size', 'conv kernel_size', 'conv activation']
                for key in struct_conv:
                    tmp = tmp + (self.setting['conv'][key.split(' ')[-1]]['val'][
                                     self.range_sample(p_node,self.dic_index[key])],)
            else:
                # 搜索pooling下的操作
                # 取概率所在的区间,进行采样
                tmp = tmp + ('pooling',)
                struct_pooling = ['pooling pooling_type', 'pooling kernel_size']
                for key in struct_pooling:
                    tmp = tmp + (self.setting['pooling'][key.split(' ')[-1]]['val'][
                        self.range_sample(p_node, self.dic_index[key])],)
            res.append(tmp)
        return res

    # 基于opt.sample()所得结果,基于均匀,进行采样
    def range_sample(self, p_node, range_index):
        k = p_node[range_index[0]:range_index[1]]
        k.append(1-np.array(k).sum())
        # print(k)
        # print(k)
        r = random.random()
        # print(r)
        for j, i in enumerate(k):
            if r <= i:
                return j
            r = r - i

    # 基于读取配置表信息,得到操作的概率区间映射
    def _init_dict(self):
        dic = {}
        dic['conv'] = (0,1,)
        cnt = 1
        for key in self.setting:
            for k in self.setting[key]:
                tmp = len(self.setting[key][k]['val']) - 1
                dic[key + ' ' + k ] = (cnt,cnt + tmp,)
                cnt += tmp

        return dic

    def get_dim(self):
        return self.dim

    def get_parametets_subscript(self):
        return self.parameters_subscript

    # log
    def get_cell_log(self, POOL, PATH, date):
        for i, j in enumerate(POOL):
            s = 'nn_param_' + str(i) + '_' + str(date)
            fp = open(PATH + s, "wb")
            # print(s)
            pickle.dump(j.cell_list, fp)