예제 #1
0
def last_pool(h1,p,l,c,percentiles):
    m1, h0_births, h1_births = max_pooling_filtration(h1, p, l, c, percentile=percentiles[l])
    enums = m1
    comp_percentile = percentiles[l-1] if percentiles[l-1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l,c,i[0]%c))], h0_births[i].item()) for i in np.argwhere(h0_births > comp_percentile)]
    enums += [([spec_hash((l+1,0,i[0]+(c*h1_births.shape[0])))], h1_births[i].item()) for i in np.argwhere(h1_births > percentiles[l])]
    return enums
예제 #2
0
def first_layer(x,p,l,c,percentile,stride,nlc):
    mat = conv_layer_as_matrix(p, x, stride)
    m1, h0_births, h1_births = conv_filtration_fast2(x, mat, l, c, nlc, percentile=percentile)
    enums = m1
    enums += [([spec_hash((l,c,i[0]))], h0_births[i].item()) for i in np.argwhere(h0_births > percentile)]
    enums += [([spec_hash((l+1,i[0]//nlc,i[0]%nlc))], h1_births[i].item()) for i in np.argwhere(h1_births > percentile)]
    return enums
예제 #3
0
def mid_conv(h1,p,l,c,percentiles,stride,nlc):
    mat = conv_layer_as_matrix(p, h1, stride)
    m1, h0_births, h1_births = conv_filtration_fast2(h1, mat, l, c, nlc, percentile=percentiles[l])
    enums = m1
    comp_percentile = percentiles[l-1] if percentiles[l-1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l,c,i[0]))], h0_births[i].item()) for i in np.argwhere(h0_births > comp_percentile)]
    enums += [([spec_hash((l+1,i[0]//nlc,i[0]%nlc))], h1_births[i].item()) for i in np.argwhere(h1_births > percentiles[l])]
    return enums
예제 #4
0
def compute_induced_filtration_parallel(x,
                                        hiddens,
                                        params,
                                        percentile=0,
                                        stride=1):

    pool = mp.Pool(mp.cpu_count())
    print('cpu count: {}'.format(mp.cpu_count()))

    global enums
    global nm
    global id
    global f

    percentiles = np.zeros((len(params)))

    x = x.cpu().detach().numpy()
    num_channels = x.shape[0]
    l = 0
    print('layer: {}'.format(l))
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        r = pool.apply_async(first_layer,
                             args=(x[c], p, l, c, percentiles[l], stride, nlc),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 1
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        p = params[l]
        r = pool.apply_async(max_pool_layer,
                             args=(h1, p, l, c, percentiles),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 2
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 3
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        p = params[l]
        r = pool.apply_async(max_pool_layer,
                             args=(h1, p, l, c, percentiles),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 4
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 5
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 6
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    hn = hiddens[l].cpu().detach().numpy()
    nlc = hn.reshape((hn.shape[0], -1)).shape[1]
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        p = params[l].weight.data[:, c, :, :]
        h1 = h[c, :, :]
        r = pool.apply_async(mid_conv,
                             args=(h1, p, l, c, percentiles, stride, nlc),
                             callback=collect_result)
    pool.close()
    pool.join()

    h = hiddens[l].cpu().detach().numpy()
    num_channels = h.shape[0]
    l = 7
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    pool = mp.Pool(mp.cpu_count())
    for c in range(num_channels):
        h1 = h[c, :, :]
        p = params[l]
        r = pool.apply_async(last_pool,
                             args=(h1, p, l, c, percentiles),
                             callback=collect_result)
    pool.close()
    pool.join()

    h1 = hiddens[l].cpu().detach().numpy()
    l = 8
    print('layer: {}'.format(l))
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    p = params[l]
    m1, h0_births, h1_births = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, c, i[0]))], h0_births[i])
              for i in np.argwhere(h0_births > comp_percentile)]

    h1 = hiddens[l].cpu().detach().numpy()
    l = 9
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    print('layer: {}'.format(l))
    p = params[l]
    m1, h0_births, h1_births_9 = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1

    max1 = np.maximum.reduce([h0_births, h1_bireths])
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, 0, i[0]))], max1[i])
              for i in np.argwhere(max1 > comp_percentile)]

    h1 = hiddens[l].cpu().detach().numpy()
    l = 10
    print('layer: {}'.format(l))
    percentiles[l] = np.percentile(
        np.absolute(hiddens[l].cpu().detach().numpy()), percentile)
    p = params[l]
    m1, h0_births, h1_births_10 = linear_filtration_fast2(
        h1, p, l, 0, percentile=percentiles[l])
    enums += m1

    max1 = np.maximum.reduce([h0_births, h1_births_9])
    comp_percentile = percentiles[
        l - 1] if percentiles[l - 1] < percentiles[l] else percentiles[l]
    enums += [([spec_hash((l, 0, i[0]))], max1[i])
              for i in np.argwhere(max1 > comp_percentile)]
    enums += [([spec_hash((l + 1, 0, i[0]))], h1_births_10[i])
              for i in np.argwhere(h1_births_10 > percentiles[l])]

    collect_result(enums)

    # with open('quick_dump.txt', 'w') as fp:
    #     for k, v in nm.items():
    #         fp.write('{}, {}\n'.format(k,v))

    print('creating filtration object...')
    # f = dion.Filtration(enums)
    print('filtration size', len(f))
    print('Sorting filtration...')
    f.sort(reverse=True)

    return f, nm