コード例 #1
0
ファイル: cff.py プロジェクト: tgebhart/pt_activation
    def compute_dynamic_filtration_no_inf(self,
                                          x,
                                          hiddens,
                                          percentile=0,
                                          return_nm=False):
        id = 0
        f = dion.Filtration()
        f.append(dion.Simplex([-1], 0))
        enums = []
        nm = {(-1, 0, 0): -1}
        params = self.params
        percentiles = np.zeros((len(params)))

        def collect_result(res):
            nonlocal id
            nonlocal f
            nonlocal nm
            for enum in res:
                nodes = enum[0]
                weight = enum[1]
                if len(nodes) == 1:
                    if nodes[0] not in nm:
                        nm[nodes[0]] = id
                        id += 1
                        f.append(dion.Simplex([nm[nodes[0]]], weight))
                    else:
                        f.append(dion.Simplex([nm[nodes[0]]], weight))
                    f.append(dion.Simplex([nm[nodes[0]], -1], 0))
                if len(nodes) == 2:
                    if nodes[0] not in nm:
                        nm[nodes[0]] = id
                        id += 1
                    if nodes[1] not in nm:
                        nm[nodes[1]] = id
                        id += 1
                    f.append(dion.Simplex([nm[nodes[0]], nm[nodes[1]]],
                                          weight))

        x = x.cpu().detach().numpy()
        num_channels = x.shape[0]
        l = 0
        percentiles[l] = np.percentile(
            np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
        hn = hiddens[l].cpu().detach().numpy()
        nlc = hn.reshape((hn.shape[0], -1)).shape[1]
        stride = 1
        for c in range(num_channels):
            p = params[l].weight.data[:, c, :, :]
            mat = conv_layer_as_matrix(p, x[c], stride)
            m1, h0_births, h1_births = conv_filtration_fast2(
                x[c], mat, l, c, nlc, percentile=percentiles[l])
            # enums = m1
            # enums += [([spec_hash((l,c,i[0]))], h0_births[i].item()) for i in np.argwhere(h0_births > percentile)]
            enums = []
            enums += [([spec_hash(
                (l + 1, i[0] // nlc, i[0] % nlc))], h1_births[i].item())
                      for i in np.argwhere(h1_births > percentile)]
            collect_result(enums)

        h1 = hiddens[l].cpu().detach().numpy()
        l = 1
        percentiles[l] = np.percentile(
            np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
        p = params[l]
        m1, h0_births, h1_births = linear_filtration_fast2(
            h1, p, l, 0, percentile=percentiles[l])
        enums += m1
        comp_percentile = percentiles[
            l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
        enums += [([spec_hash((l, c, i[0]))], h0_births[i])
                  for i in np.argwhere(h0_births > comp_percentile)]

        h1 = hiddens[l].cpu().detach().numpy()
        l = 2
        percentiles[l] = np.percentile(
            np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
        p = params[l]
        m1, h0_births, h1_births_2 = linear_filtration_fast2(
            h1, p, l, 0, percentile=percentiles[l])
        enums += m1

        max1 = np.maximum.reduce([h0_births, h1_births])
        comp_percentile = percentiles[
            l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
        enums += [([spec_hash((l, 0, i[0]))], max1[i])
                  for i in np.argwhere(max1 > comp_percentile)]
        enums += [([spec_hash((l + 1, 0, i[0]))], h1_births_2[i])
                  for i in np.argwhere(h1_births_2 > percentiles[l])]

        collect_result(enums)

        # h1_id_start = x.cpu().detach().numpy().reshape(-1).shape[0]
        # print('h1_id_start', h1_id_start)
        # f, h1_births = conv_filtration(f, x[0], self.conv1.weight.data[:,0,:,:], 0, h1_id_start, percentile=percentile)
        #
        # h2_id_start = h1_id_start + hiddens[0].cpu().detach().numpy().shape[0]
        # print('h2_id_start', h2_id_start)
        # f, h2_births = linear_filtration(f, hiddens[0], self.fc1, h1_births, h1_id_start, h2_id_start, percentile=percentile, last=False)
        #
        # h3_id_start = h2_id_start + hiddens[1].cpu().detach().numpy().shape[0]
        # print('h3_id_start', h3_id_start)
        # f = linear_filtration(f, hiddens[1], self.fc2, h2_births, h2_id_start, h3_id_start, percentile=percentile, last=True)

        print('filtration size', len(f))
        f.sort(reverse=True)
        if return_nm:
            return f, nm
        else:
            return f
コード例 #2
0
ファイル: ccff.py プロジェクト: tgebhart/pt_activation
    def compute_dynamic_filtration2(self,
                                    x,
                                    hiddens,
                                    percentile=0,
                                    return_nm=False,
                                    absolute_value=True,
                                    input_layer=False):
        id = 0
        f = dion.Filtration()
        nm = {}
        wm = {}
        params = self.params
        percentiles = np.zeros((len(params)))
        for l in range(len(params)):
            percentiles[l] = (1 / (l + 1)) * np.percentile(
                np.absolute(hiddens[l].cpu().detach().numpy()), percentile)

        def collect_result(res):
            nonlocal id
            nonlocal f
            nonlocal nm
            nonlocal wm
            for enum in res:
                nodes = enum[0]
                weight = enum[1][0]
                if len(nodes) == 1:
                    if nodes[0] not in nm:
                        nm[nodes[0]] = id
                        id += 1
                        f.append(dion.Simplex([nm[nodes[0]]], weight))
                    else:
                        f.append(dion.Simplex([nm[nodes[0]]], weight))
                if len(nodes) == 2:
                    act_weight = enum[1][1]
                    if nodes[0] not in nm:
                        nm[nodes[0]] = id
                        id += 1
                    if nodes[1] not in nm:
                        nm[nodes[1]] = id
                        id += 1
                    wm[(nodes[0], nodes[1])] = act_weight
                    f.append(dion.Simplex([nm[nodes[0]], nm[nodes[1]]],
                                          weight))

        x = x.cpu().detach().numpy()
        num_channels = x.shape[0]
        l = 0
        hn = hiddens[l].cpu().detach().numpy()
        nlc = hn.shape[0]
        nls = hn.shape[1] * hn.shape[2]
        stride = 1
        enums = []
        for c in range(num_channels):
            p = params[l].weight.data[:, c, :, :]
            mat = conv_layer_as_matrix(p, x[c], stride)
            m1, h0_births, h1_births = conv_filtration_fast2(
                x[c],
                mat,
                l,
                c,
                nlc,
                nls,
                percentile=percentiles[l],
                absolute_value=absolute_value)
            if input_layer:
                enums += m1
                enums += [([spec_hash((l, c, i[0]))], [h0_births[i].item()])
                          for i in np.argwhere(h0_births > percentiles[l])]
            enums += [([spec_hash(
                (l + 1, i[0] // nlc, i[0] % nls))], [h1_births[i].item()])
                      for i in np.argwhere(h1_births > percentiles[l])]
        collect_result(enums)

        h1 = hiddens[l].cpu().detach().numpy()
        num_channels = h1.shape[0]
        l = 1
        hn = hiddens[l].cpu().detach().numpy()
        nlc = hn.reshape((hn.shape[0], -1)).shape[1]
        nls = hn.shape[0]
        stride = 1
        enums = []
        for c in range(num_channels):
            p = params[l].weight.data[:, c, :, :]
            mat = conv_layer_as_matrix(p, h1[c], stride)
            m1, h0_births, h1_births = conv_filtration_fast2(
                h1[c],
                mat,
                l,
                c,
                nlc,
                nls,
                percentile=percentiles[l],
                absolute_value=absolute_value)
            enums += m1
            comp_percentile = percentiles[
                l -
                1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
            enums += [([spec_hash((l, c, i[0]))], [h0_births[i].item()])
                      for i in np.argwhere(h0_births > comp_percentile)]
            enums += [([spec_hash((l + 1, 0, i[0]))], [h1_births[i].item()])
                      for i in np.argwhere(h1_births > percentiles[l])]
        collect_result(enums)

        h1 = hiddens[l].cpu().detach().numpy()
        l = 2
        p = params[l]
        percentiles[l] = np.percentile(
            np.absolute(h1 * p.weight.data.cpu().detach().numpy()), percentile)
        m1, h0_births, h1_births = linear_filtration_fast2(
            h1,
            p,
            l,
            0,
            percentile=percentiles[l],
            absolute_value=absolute_value)
        enums = m1
        comp_percentile = percentiles[
            l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
        enums += [([spec_hash((l, 0, i[0]))], [h0_births[i]])
                  for i in np.argwhere(h0_births > comp_percentile)]

        h1 = hiddens[l].cpu().detach().numpy()
        l = 3
        p = params[l]
        percentiles[l] = np.percentile(
            np.absolute(h1 * p.weight.data.cpu().detach().numpy()), percentile)
        m1, h0_births, h1_births_2 = linear_filtration_fast2(
            h1,
            p,
            l,
            0,
            percentile=percentiles[l],
            absolute_value=absolute_value)
        enums += m1

        max1 = np.maximum.reduce([h0_births, h1_births])
        comp_percentile = percentiles[
            l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
        enums += [([spec_hash((l, 0, i[0]))], [max1[i]])
                  for i in np.argwhere(max1 > comp_percentile)]
        enums += [([spec_hash((l + 1, 0, i[0]))], [h1_births_2[i]])
                  for i in np.argwhere(h1_births_2 > percentiles[l])]

        collect_result(enums)

        print('filtration size', len(f))

        f.sort(reverse=True)
        if return_nm:
            return f, nm, wm
        else:
            return f
コード例 #3
0
ファイル: alexnet.py プロジェクト: tgebhart/pt_activation
def compute_induced_filtration_parallel(x,
                                        hiddens,
                                        params,
                                        percentile=0,
                                        stride=1,
                                        return_nm=True,
                                        absolute_value=True):

    pool = mp.Pool(mp.cpu_count())
    print('cpu count: {}'.format(mp.cpu_count()))

    global nm
    global id
    global f
    global wm

    id = 0
    nm = {}
    wm = {}
    f = dion.Filtration()

    percentiles = np.zeros((len(params)))
    for l in range(len(params)):
        percentiles[l] = (1 / (l + 1)) * np.percentile(
            np.absolute(hiddens[l].cpu().detach().numpy()), percentile)

    x = x.cpu().detach().numpy()
    num_channels = x.shape[0]
    l = 0
    print('layer: {}'.format(l))
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    nls = hn.shape[1] * hn.shape[2]
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        r = pool.apply_async(first_layer,
                             args=(x[c], p, l, c, percentiles[l], stride, nlc,
                                   nls),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 1
    percentiles[l] = np.percentile(np.absolute(h), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        hn2 = hn[c, :, :]
        p = params[l]
        r = pool.apply_async(max_pool_layer,
                             args=(h1, p, l, c, percentiles, hn2),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 2
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    nls = hn.shape[1] * hn.shape[2]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc, nls),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 3
    hn = hiddens[l].cpu().detach().numpy()
    percentiles[l] = np.percentile(np.absolute(h), percentile)
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        hn2 = hn[c, :, :]
        p = params[l]
        r = pool.apply_async(max_pool_layer,
                             args=(h1, p, l, c, percentiles, hn2),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 4
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    nls = hn.shape[1] * hn.shape[2]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc, nls),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 5
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    nls = hn.shape[1] * hn.shape[2]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc, nls),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 6
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    nls = hn.shape[1] * hn.shape[2]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc, nls),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 7
    percentiles[l] = np.percentile(np.absolute(h), percentile)
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        p = params[l]
        r = pool.apply_async(last_pool,
                             args=(h1, p, l, c, percentiles),
                             callback=collect_result)
    pool.close()
    pool.join()

    h1 = hiddens[l].cpu().detach().numpy()
    enums = [([spec_hash((l + 1, 0, i[0]))], [h1[i]])
             for i in np.argwhere(h1 > percentiles[l])]
    l = 8
    print('layer: {}'.format(l))
    p = params[l]
    percentiles[l] = np.percentile(
        np.absolute(h1 * p.weight.data.cpu().detach().numpy()), percentile)
    m1, h0_births, h1_births = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, 0, i[0]))], [h0_births[i]])
              for i in np.argwhere(h0_births > comp_percentile)]

    h1 = hiddens[l].cpu().detach().numpy()
    l = 9
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    p = params[l]
    percentiles[l] = np.percentile(
        np.absolute(h1 * p.weight.data.cpu().detach().numpy()), percentile)
    m1, h0_births, h1_births_9 = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1

    max1 = np.maximum.reduce([h0_births, h1_births])
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, 0, i[0]))], [max1[i]])
              for i in np.argwhere(max1 > comp_percentile)]

    h1 = hiddens[l].cpu().detach().numpy()
    l = 10
    print('layer: {}'.format(l))
    # percentiles[l] = np.percentile(np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    p = params[l]
    percentiles[l] = np.percentile(
        np.absolute(h1 * p.weight.data.cpu().detach().numpy()), percentile)
    m1, h0_births, h1_births_10 = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1

    max1 = np.maximum.reduce([h0_births, h1_births_9])
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, 0, i[0]))], [max1[i]])
              for i in np.argwhere(max1 > comp_percentile)]
    enums += [([spec_hash((l + 1, 0, i[0]))], [h1_births_10[i]])
              for i in np.argwhere(h1_births_10 > percentiles[l])]

    collect_result(enums)

    print('percentiles:', percentiles)

    # with open('quick_dump.txt', 'w') as fp:
    #     for k, v in nm.items():
    #         fp.write('{}, {}\n'.format(k,v))

    print('creating filtration object...')
    print('filtration size', len(f))
    print('Sorting filtration...')
    f.sort(reverse=True)
    if return_nm:
        return f, nm, wm
    else:
        return f