示例#1
0
def count_neucon2(filelist):
    layers = []
    max_con = []
    assert len(filelist) != 0
    f = open(os.path.join(info.get_output_path(), filelist[0]), 'rb')
    cons = pk.load(f, encoding='iso-8859-1')
    # cons=pk.load(f)

    dst = set()
    for con in cons:
        dst.add(con[1])
    layers.append(len(dst))
    # print(layers)
    f.close()

    for file in filelist[1:]:
        f = open(os.path.join(info.get_output_path(), file), 'rb')
        cons = pk.load(f, encoding='iso-8859-1')
        print(len(cons))
        dst = set()
        maxsou = -1
        for con in cons:
            dst.add(con[1])
            if con[0] > maxsou:
                maxsou = int(con[0])

        layers.append(len(dst))
        cnt_con = np.zeros(((maxsou + 1), ))
        for con in cons:
            cnt_con[int(con[0])] += 1
        maxnum = np.max(cnt_con)
        max_con.append(int(maxnum))
        f.close()
    max_con.append(1)
    return layers, max_con
示例#2
0
def count_neucon(filelist):
    layers = []
    avg_con = []
    assert len(filelist) != 0

    f = open(os.path.join(info.get_output_path(), filelist[0]),
             'rb')  # one-to-one
    cons = pk.load(f)  # 从文件变为python对
    dst = set()  # 创建一个无序不重复元素集
    for con in cons:
        dst.add(con[1])
    layers.append(len(dst))
    f.close()
    for file in filelist[1:]:
        f = open(os.path.join(info.get_output_path(), file), 'rb')
        cons = pk.load(f)
        print(len(cons))
        dst = set()
        for con in cons:
            dst.add(con[1])
        avg_con.append(int(math.ceil(len(cons) / float(layers[-1]))))
        layers.append(len(dst))

        f.close()
    avg_con.append(1)
    return layers, avg_con
示例#3
0
def neuron_link(connfiles, netDepth, layers, nodes):
    neuron_id = []  # 神经元的node号码
    neuron_dst = []  # 神经元的目的神经元
    node_link = []
    # link_to = []
    for i in range(netDepth - 2):
        x_loop = []
        for j in range(nodes[i]):
            x_loop.append([])
        node_link.append(x_loop)

    # for i in range(netDepth - 2):
    #     x_loop = []
    #     for j in range(nodes[i + 1]):
    #         x_loop.append([])
    #     link_to.append(x_loop)
    # link_to.append([[]])
    # print(link_to)
    for i in range(netDepth - 1):
        neuron_num = int(math.ceil(layers[i] / float(nodes[i])))
        # print('neuron_num:',neuron_num)
        neuron_ii = []
        layer_dst = []
        for j in range(layers[i]):
            neuron_ii.append(j // neuron_num)
            layer_dst.append([])
        neuron_id.append(neuron_ii)
        neuron_dst.append(layer_dst)
        # print(neuron_id)

        if i != netDepth - 2:
            f = open(os.path.join(info.get_output_path(), connfiles[i + 1]),
                     'rb')
            # print(f)
            # conn = np.array(pk.load(f))
            conn = np.array(pk.load(f, encoding='iso-8859-1'))
            for line in conn:
                src = int(line[0])
                dst = int(line[1])
                neuron_dst[i][src].append(dst)

    for i in range(netDepth - 2):
        # neuron_num = int(math.ceil(layers[i+1] / float(nodes[i+1]))) # 下一层
        for j in range(layers[i]):
            src_id = neuron_id[i][j]
            for k in range(len(neuron_dst[i][j])):
                dst = neuron_dst[i][j][k]
                dst_id = neuron_id[i + 1][dst]

                if dst_id not in node_link[i][src_id]:
                    node_link[i][src_id].append(dst_id)
                # if src_id not in link_to[i][dst_id]:
                #     link_to[i][dst_id].append(src_id)

    # for i in range(nodes[-1]):
    #     node_link[netDepth - 2][i].append(0)
    # link_to[netDepth - 2][0].append(i)
    return node_link
示例#4
0
def gen_inputdata(cons,
                  spikes,
                  input_node_map,
                  maxtime,
                  filename="input.txt",
                  row_file="row.txt",
                  grid_size=64):
    '''
    cons:连接文件
    spikes:输入脉冲文件
    input_node_map:第一层神经元的分布情况
    maxtime:运行时间
    return:input.txt, row.txt
    '''
    f = open(os.path.join(info.get_output_path(), filename), "w")
    inputnodes = []
    for k in input_node_map:
        inputnodes.append(
            node_input(k, cons, input_node_map[k], grid_size=grid_size))
    connections = change_format(cons)
    times = spike_time(spikes, maxtime)
    count = 0
    res = []
    in_head = '40000'
    for i in range(maxtime + 1)[1:]:
        res.append(count)
        neu_set = times[i]
        for node in inputnodes:
            tmp = node.gen_input(neu_set, connections)
            tmp2 = node.get_headpack()
            body_pack_head = node.get_bodypackhead()
            tail_pack_head = node.get_tailpackhead()
            ss = "%011x" % tmp2  # head
            f.write(in_head + ss + '\n')
            count += 1
            flag = 0
            sig = 0
            if len(tmp) != 0:
                for wt in tmp[:len(tmp) - 1]:
                    if flag == 0:
                        flag = 1
                        sig = wt
                    else:
                        flag = 0
                        ss = "%011x" % (((sig << 16) | wt) + body_pack_head)
                        f.write(in_head + ss + '\n')
                        count += 1
                if flag == 1:
                    ss = "%011x" % (((sig << 16) | tmp[-1]) + tail_pack_head)
                    f.write(in_head + ss + '\n')
                    count += 1
                else:
                    sig = tmp[-1]
                    ss = "%011x" % (((sig << 16) | 0) + tail_pack_head)
                    f.write(in_head + ss + '\n')
                    count += 1
            else:
                ss = "%011x" % tail_pack_head
                f.write(in_head + ss + '\n')
                count += 1
    res.append(count)
    f.close()
    frow = open(os.path.join(info.get_output_path(), row_file), "w")
    ttt = 1
    for i in res[1:]:
        frow.write(str(hex(i))[2:] + '\n')  # rownum  time
        # frow.write(str(hex(i))[2:]+'  '+str(ttt)+'\n')    #rownum  time
        ttt += 1
    frow.close()
示例#5
0
    ttt = 1
    for i in res[1:]:
        row_list.append(str(hex(i))[2:])
        # frow.write(str(hex(i))[2:]+'  '+str(ttt)+'\n')    #rownum  time
        ttt += 1

    return input_list, row_list


if __name__ == '__main__':
    import pickle
    import numpy as np

    # 加载输入层
    fpkl = open(
        os.path.join(info.get_output_path(), './connection/input_conv1.pkl'),
        'rb')
    in_conv1 = pickle.load(fpkl, encoding='iso-8859-1')
    fpkl.close()
    # 加载spikes
    fspk = open(
        os.path.join(info.get_output_path(), './input/spikeTrains.pkl'), 'rb')
    spikes = pickle.load(fspk, encoding='iso-8859-1')
    fspk.close

    input_node_map = {}

    # node分配列表
    layer = []
    node_list = []
    for x in range(8):
示例#6
0
    def gen_config_file(self, filename, leaksign=0):
        f = open(filename, 'w')
        rf = open(os.path.join(info.get_output_path(), "re_config.txt"), "w")
        # f.write(str+'\n')
        for node in self.nodes:
            """
            set registers and memory
            """
            # print(node.get_packlength())
            # print(node,node.x,node.y,node.zero_ref)
            tmp = node.get_headpack()
            # print("%011x" % tmp)
            body_pack_head = node.get_bodypackhead()
            tail_pack_head = node.get_tailpackhead()
            con_head = '40000'
            ss = "%011x" % tmp  # head
            f.write(con_head + ss + '\n')
            rf.write(con_head + ss + '\n')
            # set neunum first and then clear
            tmp = node.setr_neuronnum()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_status()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_vth()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_leak()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_mode(leaksign=leaksign)
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_linker_baddr()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_packet_baddr()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_nizeroref()
            for t in tmp:
                ss = "%011x" % (t + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
            tmp = node.setr_nidelay()
            for index, t in enumerate(tmp):
                if index == len(tmp) - 1 and not self.mmc:
                    ss = "%011x" % (t + tail_pack_head)
                else:
                    ss = "%011x" % (t + body_pack_head)

                f.write(con_head + ss + '\n')
            for index, t in enumerate(tmp):
                if index == len(tmp) - 1:
                    ss = "%011x" % (t + tail_pack_head)
                else:
                    ss = "%011x" % (t + body_pack_head)
                rf.write(con_head + ss + '\n')
            if self.mmc:
                linker, data = node.cal_addr()
                for d in linker:
                    h = (1 << 31) | (node.npu_m << 29) | d[0]
                    ss = "%011x" % (h + body_pack_head)
                    f.write(con_head + ss + '\n')
                    ss = "%011x" % (d[1] + body_pack_head)
                    f.write(con_head + ss + '\n')
                for index, d in enumerate(data):
                    h = (1 << 31) | (node.npu_m << 29) | d[0]
                    ss = "%011x" % (h + body_pack_head)
                    f.write(con_head + ss + '\n')
                    if index == len(data) - 1:
                        ss = "%011x" % (d[1] + tail_pack_head)
                    else:
                        ss = "%011x" % (d[1] + body_pack_head)
                    f.write(con_head + ss + '\n')
            # tmp=node.setr_nizeroref()
            # ss="%011x" % (tmp[0]+body_pack_head)
            # f.write(ss+'\n')
            # ss="%011x" % (tmp[1]+tail_pack_head)
            # f.write(ss+'\n')
        if self.enable:
            for node in self.nodes:
                # print("aaaaaaaaaaaaa")
                tmp = node.get_headpack()
                body_pack_head = node.get_bodypackhead()
                tail_pack_head = node.get_tailpackhead()
                ss = "%011x" % tmp  # head
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
                tmp = node.setr_status(1, 1)
                ss = "%011x" % (tmp[0] + body_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
                ss = "%011x" % (tmp[1] + tail_pack_head)
                f.write(con_head + ss + '\n')
                rf.write(con_head + ss + '\n')
        f.close()