def run_power_test(actual_mx, disc, red, blue, n, s, annuli, count,
                   two_level_sample):

    r_frac = len(red)
    b_frac = len(blue)
    power_count = 0
    for _ in range(count):
        pts = red + blue
        random.shuffle(pts)
        new_red = pts[:r_frac]
        new_blue = pts[b_frac:]
        m_sample = pyscan.my_sample(new_red, s)
        b_sample = pyscan.my_sample(new_blue, s)

        if two_level_sample:
            net_set1 = pyscan.my_sample(m_sample, n)
            net_set2 = pyscan.my_sample(b_sample, n)
        else:
            net_set1 = m_sample
            net_set2 = b_sample
            n = s

        net_set = net_set1 + net_set2
        m_sample = pyscan.to_weighted(m_sample)
        b_sample = pyscan.to_weighted(b_sample)

        reg, mx = pyscan.max_annuli_scale(net_set, m_sample, b_sample, annuli,
                                          disc)
        if mx > actual_mx:
            power_count += 1
    return power_count / count
示例#2
0
def testing_flux_framework(output_file,
                           red,
                           blue,
                           l_s,
                           h_s,
                           count,
                           region_name="disk",
                           two_level_sample=True,
                           ham_sample=False,
                           max_time=None):

    fieldnames = [
        "disc", "region", "n", "s", "time", "m_disc", "m_disc_approx"
    ]
    with open(output_file, 'w') as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()

        for i in np.logspace(l_s, h_s, count):

            eps = i
            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)

            disc = utils.disc_to_func("disc")
            start_time = time.time()

            m_sample = [
                pyscan.WPoint(1.0, p[0], p[1], 1.0)
                for p in pyscan.my_sample(red, s)
            ]
            b_sample = [
                pyscan.WPoint(1.0, p[0], p[1], 1.0)
                for p in pyscan.my_sample(blue, s)
            ]

            if two_level_sample:
                net_set1 = pyscan.my_sample(m_sample, n)
                net_set2 = pyscan.my_sample(b_sample, n)
                if ham_sample:
                    s = int(1 / (2 * eps**(4.0 / 3)) *
                            math.log(1 / eps)**(2 / 3.0))

                    m_sample = pyscan.ham_tree_sample(m_sample, s)
                    b_sample = pyscan.ham_tree_sample(b_sample, s)
            else:
                net_set1 = [pyscan.Point(p[0], p[1], p[2]) for p in m_sample]
                net_set2 = [pyscan.Point(p[0], p[1], p[2]) for p in b_sample]
                n = s

            net_set1 = [pyscan.Point(p[0], p[1], p[2]) for p in net_set1]
            net_set2 = [pyscan.Point(p[0], p[1], p[2]) for p in net_set2]
            net_set = net_set1 + net_set2

            if region_name == "halfplane":
                reg, mx = pyscan.max_halfplane(net_set, m_sample, b_sample,
                                               disc)
            elif region_name == "disk":
                reg, mx = pyscan.max_disk(net_set, m_sample, b_sample, disc)
            elif region_name == "rectangle":
                grid = pyscan.Grid(n, m_sample, b_sample)
                s1 = pyscan.max_subgrid_linear(grid, -1.0, 1.0)
                s2 = pyscan.max_subgrid_linear(grid, 1.0, -1.0)
                if s1.fValue() > s2.fValue():
                    reg = grid.toRectangle(s1)
                    mx = s1.fValue()
                else:
                    reg = grid.toRectangle(s2)
                    mx = s2.fValue()
            else:
                return
            end_time = time.time()

            st = time.time()
            actual_mx = pyscan.evaluate_range(reg, red, blue, disc)
            et = time.time()
            print("Time to evaluate region {}".format(et - st))

            row = {
                "disc": "disc",
                "region": region_name,
                "n": n,
                "s": s,
                "time": end_time - start_time,
                "m_disc_approx": mx,
                "m_disc": actual_mx
            }
            writer.writerow(row)
            f.flush()
            print(row)
            if max_time is not None and end_time - start_time > max_time:
                return
def testing_full_framework(
        trajectories,
        l_s, h_s, count,
        vparam="eps",
        eps=.01,
        r=.04,
        p=0.5,
        q=.2,
        alpha=.01,
        planted_points=None,
        actual_mx=None,
        max_disk_r=.1,
        min_disk_r=.05,
        disc_name="disc",
        input_size=10000):

    """
    How do I convert the trajectories over?
    1) Just sample evenly from the length.
    2) Choose points evenly
    3) Choose
    :param trajectories:
    :param l_s:
    :param h_s:
    :param count:
    :param vparam:
    :param eps:
    :param eps_r:
    :param r:
    :param q:
    :param disc_name:
    :param region_name:
    :param input_size:
    :return:
    """
    output_file = "{0}_multi_disk_{1:.2f}_{2:.2f}_full_discrepancy.csv".format(disc_name,  min_disk_r, max_disk_r)

    fieldnames = ["vparam", "input_size", "region", "disc", "n", "s", "r", "p", "q", "alpha", "time",
                  "m_disc",
                  "m_disc_approx"]
    with open(output_file, 'w') as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()

        for i in np.logspace(l_s, h_s, count):

            if vparam == "eps":
                eps = i
            elif vparam == "r":
                r = i
            elif vparam == "q":
                q = i
            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)

            disc = utils.disc_to_func(disc_name)


            st = time.time()
            if planted_points is None:
                red, blue, _, actual_mx = pyscan.plant_full_disk(trajectories, r, p, q, disc)
            else:
                red, blue = planted_points

            red_sample = pyscan.my_sample(red, s)
            blue_sample = pyscan.my_sample(blue, s)
            red_net = pyscan.my_sample(red, n)
            blue_net = pyscan.my_sample(blue, n)
            net = red_net + blue_net

            et = time.time()
            print("Time to plant region {}".format(et - st))

            start_time = time.time()
            reg, mx = pyscan.max_disk_traj_grid(net, [pyscan.WTrajectory(1.0, traj) for traj in red_sample],
                                                    [pyscan.WTrajectory(1.0, traj) for traj in blue_sample], min_disk_r, max_disk_r, disc)
            end_time = time.time()

            row = {"vparam": vparam,
                   "input_size": input_size,
                   "disc": disc_name,
                   "region": "multi_disk",
                   "n": n, "s": s,
                   "r": r, "q": q,"p":p,
                   "alpha":alpha,
                   "time": end_time - start_time,
                   "m_disc_approx": mx,
                   "m_disc": actual_mx}
            writer.writerow(row)
            f.flush()
            print(row)
def testing_partial_framework(red,
                              blue,
                              output_file,
                              l_s,
                              h_s,
                              count,
                              r=.04,
                              p=0.5,
                              q=.2,
                              error_thresh=3,
                              two_level_sample=True,
                              ham_sample=True,
                              disc_name="disc",
                              region_name="disk",
                              sample_method="block",
                              max_time=None):
    """
    How do I convert the trajectories over?
    1) Just sample evenly from the length.
    2) Choose points evenly
    3) Choose 
    :param trajectories:
    :param l_s:
    :param h_s:
    :param count:
    :param vparam:
    :param eps:
    :param eps_r:
    :param r:
    :param q:
    :param disc_name:
    :param region_name:
    :param input_size:
    :return:
    """
    fieldnames = [
        "disc", "region", "n", "s", "r", "p", "q", "time", "m_disc",
        "m_disc_approx", "sample_method"
    ]
    with open(output_file, 'w') as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()

        disc = utils.disc_to_func(disc_name)
        s_prime = int(10**(2 * error_thresh) + .5)
        m_sample_prime = pyscan.uniform_sample(red, s_prime, False)
        b_sample_prime = pyscan.uniform_sample(blue, s_prime, False)

        for i in np.logspace(l_s, h_s, count):
            eps = i
            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)

            start_time = time.time()
            if sample_method == "block":
                f_sample = pyscan.block_sample
            elif sample_method == "even":
                f_sample = pyscan.even_sample
            elif sample_method == "uniform":
                f_sample = pyscan.uniform_sample

            m_sample = f_sample(red, s, False)
            b_sample = f_sample(blue, s, False)
            m_sample = pyscan.to_weighted(m_sample)
            b_sample = pyscan.to_weighted(b_sample)

            if two_level_sample:
                net_set1 = pyscan.my_sample(m_sample, n)
                net_set2 = pyscan.my_sample(b_sample, n)
                if ham_sample:
                    s = int(1 / (2 * eps**(4.0 / 3)) *
                            math.log(1 / eps)**(2 / 3.0))

                    m_sample = pyscan.ham_tree_sample(m_sample, s)
                    b_sample = pyscan.ham_tree_sample(b_sample, s)
            else:
                net_set1 = [pyscan.Point(p[0], p[1], p[2]) for p in m_sample]
                net_set2 = [pyscan.Point(p[0], p[1], p[2]) for p in b_sample]
                n = s

            net_set1 = [pyscan.Point(p[0], p[1], p[2]) for p in net_set1]
            net_set2 = [pyscan.Point(p[0], p[1], p[2]) for p in net_set2]
            net_set = net_set1 + net_set2

            if region_name == "halfplane":
                reg, mx = pyscan.max_halfplane(net_set, m_sample, b_sample,
                                               disc)
            elif region_name == "disk":
                reg, mx = pyscan.max_disk(net_set, m_sample, b_sample, disc)
            elif region_name == "rectangle":
                grid = pyscan.Grid(n, m_sample, b_sample)
                s1 = pyscan.max_subgrid_linear(grid, -1.0, 1.0)
                s2 = pyscan.max_subgrid_linear(grid, 1.0, -1.0)
                if s1.fValue() > s2.fValue():
                    reg = grid.toRectangle(s1)
                    mx = s1.fValue()
                else:
                    reg = grid.toRectangle(s2)
                    mx = s2.fValue()
            else:
                return

            end_time = time.time()

            actual_mx = pyscan.evaluate_range(
                reg, pyscan.to_weighted(m_sample_prime),
                pyscan.to_weighted(b_sample_prime), disc)

            row = {
                "disc": disc_name,
                "region": region_name,
                "n": n,
                "s": s,
                "r": r,
                "q": q,
                "p": p,
                "time": end_time - start_time,
                "m_disc_approx": mx,
                "m_disc": actual_mx,
                "sample_method": sample_method
            }
            writer.writerow(row)
            print(row)
            f.flush()
            if max_time is not None and end_time - start_time > max_time:
                return
def testing_bandwidth_framework(pts,
                                output_file,
                                l_s,
                                h_s,
                                count,
                                r=.04,
                                p=0.8,
                                q=.5,
                                eps=.02,
                                annuli_eps=.1,
                                two_level_sample=True,
                                algorithm="annuli",
                                statistic="k",
                                power_test=None,
                                max_time=None,
                                seed=None):
    if seed is not None:
        random.seed(a=seed)
    else:
        seed = random.randint(0, 10000000)
        random.seed(a=seed)
    #seed = 9704595

    red, blue, bandwidth_orig, center_pt = pyscan.plant_kernel_disk_region(
        pts, r, p, q)

    actual_mx = pyscan.disc_bernoulli_kern(red, blue, p, q, bandwidth_orig,
                                           center_pt)

    try:
        with open(output_file, "r") as fh:
            exists = True
    except FileNotFoundError:
        exists = False
    with open(output_file, 'a+') as f:
        writer = None

        for i in np.logspace(l_s, h_s, count):

            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)
            print(i)
            bandwidth = bandwidth_orig * i
            start_time = time.time()

            m_sample = pyscan.my_sample(red, s)
            b_sample = pyscan.my_sample(blue, s)

            if two_level_sample:
                net_set1 = pyscan.my_sample(m_sample, n)
                net_set2 = pyscan.my_sample(b_sample, n)
            else:
                net_set1 = m_sample
                net_set2 = b_sample
                n = s

            net_set = net_set1 + net_set2
            m_sample = pyscan.to_weighted(m_sample)
            b_sample = pyscan.to_weighted(b_sample)
            # kern = pyscan.gaussian_kernel(bandwidth)
            # disc = pyscan.Bernoulli_K(kern)

            r_g = bandwidth * math.sqrt(math.e) * eps * 5
            disk_r = bandwidth * math.sqrt(math.log(1 / eps))

            if algorithm == "kernel":
                reg, mx = pyscan.max_kernel_slow(m_sample, b_sample, r_g,
                                                 disk_r, bandwidth)
            elif algorithm == "kernel2":
                reg, mx = pyscan.max_kernel_slow2(m_sample, b_sample, r_g,
                                                  disk_r, bandwidth)
            elif algorithm == "kernel_adaptive":
                reg, mx = pyscan.max_kernel_adaptive(m_sample, b_sample, r_g,
                                                     disk_r, bandwidth)
            elif algorithm == "kernel_prune":
                disk_r = bandwidth * math.sqrt(
                    math.log((len(m_sample) + len(b_sample)) / eps))
                reg, mx = pyscan.max_kernel_prune_far(m_sample, b_sample, r_g,
                                                      disk_r, bandwidth)
            elif algorithm == "kernel_fast":
                reg, mx = pyscan.max_kernel(m_sample, b_sample, r_g, disk_r,
                                            bandwidth)

            end_time = time.time()
            print(reg, mx)

            (p_m, q_m, mx) = pyscan.measure_kernel(reg.get_origin(),
                                                   pyscan.to_weighted(red),
                                                   pyscan.to_weighted(blue),
                                                   bandwidth_orig)

            row = {
                "disc":
                "bernoulli",
                "n":
                n,
                "s":
                s,
                "r":
                r,
                "q":
                q,
                "p":
                p,
                "p_m":
                p_m,
                "q_m":
                q_m,
                "time":
                end_time - start_time,
                "m_disc_approx":
                mx,
                "m_disc":
                actual_mx,
                "center_distance":
                reg.get_origin().dist(center_pt),
                "kl_div":
                Kl_divergence(pts, bandwidth_orig, center_pt,
                              reg.get_origin()),
                "l2_dist":
                l2_dist(pts, bandwidth_orig, center_pt, reg.get_origin()),
                "angular_dist":
                angular_dist(pts, bandwidth_orig, center_pt, reg.get_origin()),
                "jc":
                extended_jc(pts, bandwidth_orig, center_pt, reg.get_origin()),
                "power":
                None,
                "bandwidth":
                bandwidth_orig,
                "seed":
                seed,
                "scale":
                i
            }
            if writer is None:
                writer = csv.DictWriter(f, fieldnames=list(row.keys()))
                if not exists:
                    writer.writeheader()

            writer.writerow(row)
            print(row)
            f.flush()
            if max_time is not None and end_time - start_time > max_time:
                return
def testing_partial_framework(pts,
                              output_file,
                              l_s,
                              h_s,
                              count,
                              r=.04,
                              p=0.8,
                              q=.5,
                              annuli_eps=.1,
                              two_level_sample=True,
                              algorithm="annuli",
                              statistic="k",
                              power_test=None,
                              max_time=None,
                              seed=None,
                              planted_reg=None):
    if seed is not None:
        random.seed(a=seed)
    else:
        seed = random.randint(0, 10000000)
        random.seed(a=seed)
    #seed = 9704595

    if planted_reg is None:
        red, blue, bandwidth, center_pt = pyscan.plant_kernel_disk_region(
            pts, r, p, q)
    else:
        red, blue, bandwidth, center_pt = planted_reg

    bernoulli_sample = [(pt, 1) for pt in red] + [(pt, 0) for pt in blue]
    print(bandwidth, center_pt)

    actual_mx = pyscan.disc_bernoulli_kern(red, blue, p, q, bandwidth,
                                           center_pt)

    try:
        with open(output_file, "r") as fh:
            exists = True
    except FileNotFoundError:
        exists = False
    with open(output_file, 'a+') as f:
        writer = None

        for i in np.logspace(l_s, h_s, count):
            eps = i
            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)

            start_time = time.time()
            if statistic == "bs":
                baseline_sample = pyscan.my_sample(bernoulli_sample, s)
                m_sample = [pt for (pt, id) in baseline_sample if id]
                b_sample = [pt for (pt, _) in baseline_sample]
                print(len(m_sample))
                print(len(baseline_sample))
                print("here")
            else:
                m_sample = pyscan.my_sample(red, s)
                b_sample = pyscan.my_sample(blue, s)

            if two_level_sample:
                net_set1 = pyscan.my_sample(m_sample, n)
                net_set2 = pyscan.my_sample(b_sample, n)
            else:
                net_set1 = m_sample
                net_set2 = b_sample
                n = s

            net_set = net_set1 + net_set2
            m_sample = pyscan.to_weighted(m_sample)
            b_sample = pyscan.to_weighted(b_sample)
            # kern = pyscan.gaussian_kernel(bandwidth)
            # disc = pyscan.Bernoulli_K(kern)

            r_g = bandwidth * math.sqrt(math.e) * eps * 5
            disk_r = bandwidth * math.sqrt(math.log(1 / eps))

            if algorithm == "kernel":
                reg, mx = pyscan.max_kernel_slow(m_sample, b_sample, r_g,
                                                 disk_r, bandwidth)
            elif algorithm == "kernel2":
                reg, mx = pyscan.max_kernel_slow2(m_sample, b_sample, r_g,
                                                  disk_r, bandwidth)
            elif algorithm == "kernel_adaptive":
                reg, mx = pyscan.max_kernel_adaptive(m_sample, b_sample, r_g,
                                                     disk_r, bandwidth)
            elif algorithm == "kernel_prune":
                disk_r = bandwidth * math.sqrt(
                    math.log((len(m_sample) + len(b_sample)) / eps))
                reg, mx = pyscan.max_kernel_prune_far(m_sample, b_sample, r_g,
                                                      disk_r, bandwidth)
            elif algorithm == "kernel_fast":
                reg, mx = pyscan.max_kernel(m_sample, b_sample, r_g, disk_r,
                                            bandwidth)

            elif algorithm == "combinatorial" or "satscan" in algorithm:
                if statistic == "k":
                    disc_f = pyscan.RKULLDORF
                elif statistic == "b":
                    disc_f = pyscan.bernoulli(0)
                elif statistic == "bs":
                    disc_f = pyscan.rbernoulli(eps * .0001)
                    #disc_f = pyscan.bernoulli(len(red), len(blue), eps / 2)
                elif statistic == "d":
                    disc_f = pyscan.DISC

            if algorithm == "combinatorial":
                reg, mx = pyscan.max_disk(net_set, m_sample, b_sample, disc_f)
            elif algorithm == "satscan_grid":
                reg, mx = pyscan.satscan_grid(m_sample, b_sample, r_g, disk_r,
                                              disc_f)
            elif algorithm == "satscan_points":
                reg, mx = pyscan.satscan_points(m_sample, b_sample, disc_f)

            end_time = time.time()
            print("Finished this run.")
            print(reg, mx)

            (p_m, q_m, mx) = pyscan.measure_kernel(reg.get_origin(),
                                                   pyscan.to_weighted(red),
                                                   pyscan.to_weighted(blue),
                                                   bandwidth)

            if power_test is not None:
                power = run_power_test(mx, disc, red, blue, n, s, annuli,
                                       power_test, two_level_sample)
            else:
                power = None

            # r_g = bandwidth * math.sqrt(math.e) * eps * 5
            # disk_r = bandwidth * math.sqrt(math.log((len(m_sample) + len(b_sample)) / eps))
            # centers = pyscan.kernel_centers(m_sample, b_sample, r_g, disk_r, bandwidth)
            #
            # fig, ax = plt.subplots(figsize=(20,20))
            #
            # bx = Mapping.get_bx(pts)
            # #ax, _ = Mapping.map_trajectories([], [], bx)
            # pyscan.plot_points(ax, m_sample, "r")
            # pyscan.plot_points(ax, b_sample, "b")
            # #pyscan.plot_points(ax, net_set, "k")
            # #pyscan.plot_kernel(ax, pts, center_pt, bandwidth, res=50)
            # #pyscan.plot_points(ax, centers, "k")
            # print(reg.get_origin())
            # pyscan.plot_kernel(ax, pts, reg.get_origin(), bandwidth, res=50)
            # plt.axis('off')

            # plt.show()
            #kl_val = Kl_divergence(pts, bandwidth, center_pt, reg.get_origin())
            kl_val = 0
            print("Finished kl div")
            l2 = l2_dist(pts, bandwidth, center_pt, reg.get_origin())
            print("Finished l2")
            angle = 0  #angular_dist(pts, bandwidth, center_pt, reg.get_origin())
            print("Finished Angle")
            ejc = extended_jc(pts, bandwidth, center_pt, reg.get_origin())
            print("Finished EJC")
            row = {
                "disc": "bernoulli",
                "n": n,
                "s": s,
                "r": r,
                "q": q,
                "p": p,
                "p_m": p_m,
                "q_m": q_m,
                "time": end_time - start_time,
                "m_disc_approx": mx,
                "m_disc": actual_mx,
                "center_distance": reg.get_origin().dist(center_pt),
                "kl_div": kl_val,
                "l2_dist": l2,
                "angular_dist": angle,
                "jc": ejc,
                "power": power,
                "bandwidth": bandwidth,
                "seed": seed
            }
            if writer is None:
                writer = csv.DictWriter(f, fieldnames=list(row.keys()))
                if not exists:
                    writer.writeheader()

            writer.writerow(row)
            print("Run time {}".format(time.time() - start_time))
            print(row)
            f.flush()
            if max_time is not None and end_time - start_time > max_time:
                return
示例#7
0
    return trajectory_pieces


import paths

eps = .1
s = 800
r = .08
p = .5
q = .8

pts = paths.load_philly()
red, blue, bandwidth, center_pt = pyscan.plant_kernel_disk_region(pts, r, p, q)

bx = get_bx(pts)

m_sample = pyscan.my_sample(red, s)
b_sample = pyscan.my_sample(blue, s)

# r_g = bandwidth * math.sqrt(math.e) * eps * 5
# disk_r = bandwidth * math.sqrt(math.log(1 / eps))
#reg, mx = pyscan.max_kernel_slow(pyscan.to_weighted(m_sample), pyscan.to_weighted(b_sample), r_g, disk_r, bandwidth)

ax, _ = map_trajectories([], [], bx)
plot_points(ax, m_sample, "r", transform=projection)
plot_points(ax, b_sample, "b", transform=projection)

pyscan.plot_kernel(ax, pts, center_pt, bandwidth, res=50, transform=projection)
#pyscan.plot_kernel(ax, pts, reg.get_origin(), bandwidth, res=50, transform=projection)
#plt.show()
plt.savefig("philly.pdf", bbox_inches='tight')
def plot_trajectory_set(trajectories, sample):
    ax = plt.subplot()
    for traj in pyscan.my_sample(trajectories, sample):
        plot_line(ax, traj, "r")
    plt.show()
def testing_full_framework(
        red, blue,
        output_file,
        l_s, h_s, count,
        vparam="eps",
        eps=.01,
        alpha=.01,
        max_disk_r=None,
        min_disk_r=None,
        disc_name="disc",
        region_name="halfplane",
        sample_method="halfplane",
        fast_disk = True,
        two_level_sample=True,
        max_time=None):

    """
    How do I convert the trajectories over?
    1) Just sample evenly from the length.
    2) Choose points evenly
    3) Choose
    :param trajectories:
    :param l_s:
    :param h_s:
    :param count:
    :param vparam:
    :param eps:
    :param eps_r:
    :param r:
    :param q:
    :param disc_name:
    :param region_name:
    :param input_size:
    :return:
    """

    fieldnames = ["vparam", "disc", "region", "n", "s", "n_pts", "m_pts", "b_pts", "alpha", "time",
                  "m_disc", "m_disc_approx", "sample_method"]
    with open(output_file, 'w') as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()

        for i in np.logspace(l_s, h_s, count):

            if vparam == "eps":
                eps = i
            elif vparam == "alpha":
                alpha = i
            n = 1 / eps
            s = 1 / (2 * eps * eps)
            n = int(round(n) + .1)
            s = int(round(s) + .1)


            disc = utils.disc_to_func(disc_name)

            red_sample = pyscan.my_sample(red, s)
            blue_sample = pyscan.my_sample(blue, s)
            if two_level_sample:
                red_net = pyscan.my_sample(red, n)
                blue_net = pyscan.my_sample(blue, n)
            else:
                red_net = red_sample
                blue_net = blue_sample

            net = red_net + blue_net

            print("Running: {} {}".format(n, s))

            start_time = time.time()

            if region_name == "multiscale_disk":
                if max_disk_r is not None and alpha > max_disk_r:
                    print("Max Disk Radius is greater than alpha")
                    continue
                reg, mx = multiscale_disk(min_disk_r, max_disk_r, alpha, red_sample, blue_sample, net, disc, fast_disk)
                m_sample, b_sample, net_set = [], [], []
            else:
                if sample_method == "halfplane":
                    m_sample = [pyscan.halfplane_kernel([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj], alpha) for traj in red_sample]
                    b_sample = [pyscan.halfplane_kernel([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj], alpha) for traj in blue_sample]
                    pt_net = [pyscan.halfplane_kernel([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj], alpha) for traj in net]
                elif sample_method == "dp":
                    m_sample = [pyscan.dp_compress(traj, alpha) for traj in red_sample]
                    b_sample = [pyscan.dp_compress(traj, alpha) for traj in blue_sample]
                    pt_net = [pyscan.dp_compress(traj, alpha) for traj in net]
                elif sample_method == "hull":
                    m_sample = [pyscan.convex_hull([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj]) for traj in red_sample]
                    b_sample = [pyscan.convex_hull([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj]) for traj in blue_sample]
                    pt_net = [pyscan.convex_hull([pyscan.Point(pt[0], pt[1], 1.0) for pt in traj]) for traj in net]
                elif sample_method is None:
                    #just takes the waypoints.
                    m_sample = [[pyscan.Point(pt[0], pt[1], 1.0) for pt in traj] for traj in red_sample]
                    b_sample = [[pyscan.Point(pt[0], pt[1], 1.0) for pt in traj] for traj in blue_sample]
                    pt_net = [[pyscan.Point(pt[0], pt[1], 1.0) for pt in traj] for traj in net]
                elif sample_method == "grid":
                    m_sample = [pyscan.grid_kernel(traj, alpha) for traj in red_sample]
                    b_sample = [pyscan.grid_kernel(traj, alpha) for traj in blue_sample]
                    pt_net = [pyscan.grid_kernel(traj, alpha) for traj in net]
                elif sample_method == "lifting":
                    m_sample = [pyscan.lifting_kernel(traj, alpha) for traj in red_sample]
                    b_sample = [pyscan.lifting_kernel(traj, alpha) for traj in blue_sample]
                    pt_net = [pyscan.lifting_kernel(traj, alpha) for traj in net]
                elif sample_method == "grid_direc":

                    if max_disk_r is not None and alpha > max_disk_r:
                        print("Max Disk Radius is greater than alpha")
                        continue
                    chord_l = math.sqrt(4 * alpha * max(min_disk_r, alpha) - 2 * alpha * alpha)
                    m_sample = [pyscan.grid_direc_kernel(pyscan.dp_compress(traj, alpha), chord_l, alpha) for traj in
                                red_sample]
                    b_sample = [pyscan.grid_direc_kernel(pyscan.dp_compress(traj, alpha), chord_l, alpha) for traj in
                                blue_sample]
                    pt_net = [pyscan.grid_direc_kernel(pyscan.dp_compress(traj, alpha), chord_l, alpha) for traj in net]
                elif sample_method == "even":
                    m_sample = [pyscan.even_sample_error(traj, alpha, False) for traj in red_sample]
                    b_sample = [pyscan.even_sample_error(traj, alpha, False) for traj in blue_sample]
                    pt_net = [pyscan.even_sample_error(traj, alpha, False) for traj in net]
                else:
                    return

                if region_name == "multiscale_disk_fixed":
                    m_sample = list(pyscan.trajectories_to_labels(m_sample))
                    b_sample = list(pyscan.trajectories_to_labels(b_sample))
                    net_set = list(pyscan.trajectories_to_labels(pt_net))
                    reg, mx = multiscale_disk_fixed(min_disk_r, max_disk_r, m_sample, b_sample, net_set, disc, fast_disk)
                else:
                    m_sample = list(pyscan.trajectories_to_labels(m_sample))
                    b_sample = list(pyscan.trajectories_to_labels(b_sample))
                    net_set = list(itertools.chain.from_iterable(pt_net))
                    if region_name == "halfplane":
                        reg, mx = pyscan.max_halfplane_labeled(net_set, m_sample, b_sample, disc)
                    elif region_name == "disk":
                        reg, mx = pyscan.max_disk_labeled(net_set, m_sample, b_sample, disc)
                    elif region_name == "rectangle":
                        reg, mx = pyscan.max_rect_labeled(n, 2 * max_disk_r, m_sample, b_sample, disc)
                    elif region_name == "rectangle_scale":
                        reg, mx = pyscan.max_rect_labeled_scale(n, 2 * max_disk_r, alpha, net_set, m_sample, b_sample, disc)
                    else:
                        return

            end_time = time.time()
            actual_mx = pyscan.evaluate_range_trajectory(reg, red, blue, disc)
            row = {"vparam": vparam,
                   "disc": disc_name,
                   "region": region_name,
                   "n": n, "s": s,
                   "n_pts": len(net_set), "m_pts":len(m_sample), "b_pts":len(b_sample),
                   "alpha":alpha,
                   "time": end_time - start_time,
                   "m_disc_approx": mx,
                   "m_disc": actual_mx,
                   "sample_method": sample_method}
            writer.writerow(row)
            f.flush()
            print(row)
            if max_time is not None and end_time - start_time > max_time:
                return