Esempio n. 1
0
def make_plot(surf, random=False, append=False):
    from util.plotly import Plot
    mult = 5
    fun = lambda x: np.cos(x[0] * mult) + np.sin(x[1] * mult)
    np.random.seed(0)
    p = Plot()
    low = -0.1
    upp = 1.1
    dim = 2
    plot_points = 2000
    N = 30
    if random:
        x = np.random.random(size=(N, dim))
    else:
        N = int(round(N**(1 / dim)))
        x = np.array([
            r.flatten() for r in np.meshgrid(np.linspace(low, upp, N),
                                             np.linspace(low, upp, N))
        ]).T
    y = np.array([fun(v) for v in x])
    # x = np.array([[0.5,0.5], [0.2,0.2], [0.2,0.8], [0.8,0.8], [0.8,0.2]])
    # y = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
    p.add("Training Points", *x.T, y)
    surf.fit(x, y)
    p.add_func("VMesh", surf, *([(low, upp)] * dim), plot_points=plot_points)
    p.plot(file_name="vmesh.html", append=append)
Esempio n. 2
0
def minimize(objective,
             solution,
             bounds=None,
             args=tuple(),
             max_time=DEFAULT_MAX_TIME_SEC,
             min_steps=DEFAULT_MIN_STEPS,
             max_steps=DEFAULT_MAX_STEPS,
             min_improvement=DEFAULT_MIN_IMPROVEMENT,
             display=False,
             method=DiRect,
             checkpoint=False,
             checkpoint_file=CHECKPOINT_FILE):
    # Convert the solution into a float array (if it's not already)
    solution = np.asarray(solution, dtype=float)

    # Generate some arbitrary bounds
    if type(bounds) == type(None):
        upper = solution + np.ones((len(solution), )) * DEFAULT_SEARCH_SIZE
        lower = solution - np.ones((len(solution), )) * DEFAULT_SEARCH_SIZE
        bounds = list(zip(lower, upper))

    # Initialize a tracker for halting the optimization
    t = Tracker(objective, max_time, min_steps, min_improvement, display,
                checkpoint, checkpoint_file)
    # Get the initial objective value of the provided solution.
    t.check(solution, *args)

    # Call the optimization function and get the best solution
    method(t.check, t.done, bounds, solution, args=args)

    if display:
        print()
        try:
            from util.plotly import Plot
            name = objective.__name__.title()
            p = Plot(f"Minimization Performance on Objective '{name}'",
                     "Trial Number", "Objective Value")
            trial_numbers = [n for (o, n, s) in t.record]
            obj_values = [o for (o, n, s) in t.record]
            p.add("",
                  trial_numbers,
                  obj_values,
                  color=p.color(1),
                  mode="lines+markers")
            p.show(show_legend=False)
        except:
            pass

    # Remove the checkpoint file
    if os.path.exists(checkpoint_file): os.remove(checkpoint_file)
    return t.best_sol
Esempio n. 3
0
from util.plotly import Plot

p = Plot("","System Paramter","File I/O Throughput (kb/s) Mean")
n1 = "Config 1"
n2 = "Config 2"
n3 = "Config 3"
p.add_node(n1, 0, 0, color=p.color(0), size=10)
p.add_node(n2, 1, 1, color=p.color(1), size=10)
p.add_node(n3, 2, .3, color=p.color(2), size=10)
p.add_edge([n1,n2,n3])
p.graph(file_name="interpolating_values.html", show_titles=True,
        y_range=[-.15,1.15], x_range=[-.2,2.2])

p = Plot("","System Paramter","File I/O Throughput (kb/s) Distribution")
n1 = "Config 1"
n2 = "Config 2"
n3 = "Config 3"
p.add_node(n1, 0, 0, color=p.color(0), size=10, label=True, label_y_offset=-.08)
p.add_node(n2, 1, 1, color=p.color(1), size=10, label=True, label_y_offset=.08)
p.add_node(n3, 2, .3, color=p.color(2), size=10, label=True, label_y_offset=-.08)
p.add_edge([n1,n2,n3])
p.graph(file_name="interpolating_functions.html", show_titles=True,
        y_range=[-.15,1.15], x_range=[-.2,2.2])
Esempio n. 4
0
            # the defining bound (by closeness to max / min).
            # If an overtake is about to occur,
            #  raise the lipschitz constant estimate
            # If the distance to the closest bound is dominating ,
            #  raise the lipschitz constant estimate

            response.append((max(self.y - distances * min_l) +
                             min(self.y + distances * min_l)) / 2)
        return np.array(response)


if __name__ == "__main__":
    from util.plotly import Plot
    f = lambda od: (max(0, (od + .2) if (od < -.1) else
                        (-od)) + max(0, (od) if (od < .1) else (.2 - od)))
    p = Plot()
    p.add_func("", f, [-1, 1])
    _ = p.show()
    exit()

    # from util.approximate import Delaunay as model
    # model = LipschitzMedian
    model = MinimumLipschitz

    p, _, _ = test_plot(model, N=3, D=1, random=False)

    # Generate a test plot showing this algorithm
    # p = test_plot(model, N=90, D=2, plot_points=2000, random=False,
    #               low=-.5, upp=1.5)

    p.show(file_name="test_plot.html")
Esempio n. 5
0
def turn(addition, turn_name=TURN_NAME):
    # Change the type of addition to match necessary image type
    addition = np.asarray(addition, dtype=np.uint8)
    # Reshape the addition to be the same shape as the image
    addition = addition.reshape(ADVERSARIAL_IMAGE_SIZE)
    # Collect all the deltas for this addition
    all_delta = []
    # print(addition.shape)
    addition = Image.fromarray(addition)
    # addition.save("initial_addition.png")
    if USE_RANDOM_TRANSFORMATIONS:
        # Cycle N random transformations
        for transform in range(NUM_TRANSFORMATIONS):
            print("[{:s}>{:s}]".format("-"*int(round(PROGRESS_LEN*transform/NUM_TRANSFORMATIONS)),
                                       " "*int(PROGRESS_LEN - round(PROGRESS_LEN*transform/NUM_TRANSFORMATIONS))),end="\r")
            # Train on the ability to turn
            transformed_addition = generate_transformed_addition(addition)
            if TRAIN_ON_REAL_IMAGES:
                # Cycle all images real training images
                random.shuffle(IMAGES_AND_STEERING)
                for (original, sa) in IMAGES_AND_STEERING[:NUM_IMAGES]:
                    original = original.reshape(IMAGE_SHAPE)
                    # Combine the original image with the transformed addition
                    img = original + np.where(transformed_addition >= NEUTRAL_VALUE, 
                                              transformed_addition - NEUTRAL_VALUE, 0)
                    img -= np.where(transformed_addition < NEUTRAL_VALUE,
                                    transformed_addition, 0)
                    # Identify the change in turning angle provided by the image
                    turn = float(MODEL.predict(np.array([img]), batch_size=1))
                    delta = turn - sa
                    all_delta.append(delta)
                    if SAVE_ALL_IMAGES:
                        img = Image.fromarray(np.asarray(img.reshape(IMAGE_SHAPE), dtype=np.uint8))
                        img_name = "{turn}_adv_imgs/({angle:+.2f})_{turn}_adversarial_{num:03d}-({orig:+.2f}).png".format(
                            turn=turn_name, angle=turn, num=transform, orig=sa)
                        img.save(img_name)
                        print("Saved '%s'"%img_name)
            else:
                # Identify the change in turning angle provided by the image
                turn = float(MODEL.predict(np.array([transformed_addition]), batch_size=1))
                all_delta.append(turn)
                if SAVE_ALL_IMAGES:
                    img = Image.fromarray(np.asarray(transformed_addition.reshape(IMAGE_SHAPE), dtype=np.uint8))
                    img_name = "{turn}_adv_imgs/({angle:+.2f})_{turn}_adversarial_{num:03d}_NEUTRAL.png".format(
                        turn=turn_name, angle=turn, num=transform)
                    img.save(img_name)
                    print("Saved '%s'"%img_name)
        print()
    else:
        # Do not use random transformations, just optimize over a
        # single image that takes up the entire view field of the car.
        addition = addition.resize(IMAGE_SHAPE[:-1])
        adv_img = np.array(addition).reshape((1,)+IMAGE_SHAPE)
        all_delta = [float(MODEL.predict(adv_img))]

    if SAVE_ALL_IMAGES: 
        print("Done saving first round of images.")
        exit()

    if PLOT_DELTA_DISTRIBUTION:
        print(all_delta)
        p = Plot("Randomly Transformed Adversarial {:s} Turn Image (100 bins)".format(turn_name.title()), 
                 "Normalized Turning Angle", "Probability")
        p.add_histogram("Turn Angles", all_delta)
        p.plot(show=False, file_name="{}_adversarial_turn_angles.html".format(turn_name),show_legend=False)

    # Flip the sign if we are optimizing for right turns
    avg_delta = sum(all_delta) / len(all_delta)
    if TURN_NAME == "right": avg_delta = -avg_delta
    # Return the average delta achieved by all transformations on all images
    return avg_delta
Esempio n. 6
0
if NEUTRAL_IMAGE_TEST:
    fake_img = np.random.randint(0,255,size=ADVERSARIAL_IMAGE_SIZE, dtype=np.uint8).flatten()
    fake_img[:] = 125
    print(fake_img.shape)
    print(turn(fake_img))

# ====================================
#      Adversarial Image Analysis     
# ====================================
if ANALYZE_EXISTING_IMAGE:
    # with open("{}_random_solution_{}.pkl".format(TURN_NAME, SOLUTION_NUMBER), "rb") as f:
    #     output = pickle.load(f)
    output = np.array(initial_solution)

    p = Plot()
    p.add_histogram("Adversarial Image", output)
    p.plot(file_name="{}_random_solution_{}.html".format(TURN_NAME, SOLUTION_NUMBER), show=False)

    print("Turn prouduced:", turn(output))
    img = Image.fromarray(np.asarray(output.reshape(ADVERSARIAL_IMAGE_SIZE), dtype=np.uint8))
    img.save("{}_random_solution_{}.png".format(TURN_NAME, SOLUTION_NUMBER))
    exit()

# ======================================
#      Adversarial Image Generation     
# ======================================
if GENERATE_OPTIMIZED_ADVERSARIAL_IMAGES:
    sample_img = np.ones(ADVERSARIAL_IMAGE_SIZE, dtype=np.uint8).flatten()*NEUTRAL_VALUE
    print("Starting optimization.")
    bounds = [(0,255)]*np.prod(ADVERSARIAL_IMAGE_SIZE)
Esempio n. 7
0
    for h in header:
        unique_elements = np.unique(data[h])
        if len(unique_elements) < 100:
            unique[h] = sorted(unique_elements)
            print(("%"+str(h_width)+"s")%h,unique[h])
        else:
            print(("%"+str(h_width)+"s")%h,len(unique_elements),"elements values.")
    print()

    if PLOT_AGGREGATE_HISTOGRAMS:
        print("Generating histograms for similarity checks...")
        first = True
        for test in unique["Test"]:
            subset = data[data["Test"] == test]
            machines = sorted(np.unique(subset["Machine"]))
            p1 = Plot("Throughputs by machine for '%s' test"%test,
                      "Throughput", "Probability Mass")
            p2 = Plot("Runtime by machine for '%s' test"%test,
                      "Runtime", "Probability Mass")
            print("","processing test '%s'"%test)
            for m in machines:
                print("","","machine '%s'"%m)
                p1.add_histogram(m,subset[subset["Machine"] == m]["Throughput"],
                                 group=m)
                p2.add_histogram(m,subset[subset["Machine"] == m]["Runtime"],
                                 group=m, show_in_legend=False)

            multiplot([[p1],[p2]], append=(not first), file_name=TEST_HIST_FILE)
            first = False

if SHOW_COUNT_SUMMARY:
    count_array = np.array(list(map(len, counts.values())))
Esempio n. 8
0
            if sum(weights) > 0:
                guess = sum(np.array(weights)*self.values)/sum(weights)
            else:
                guess = 0
            predictions.append( guess )
        # print("weights: ",weights)
        # print("predictions: ",predictions)
        return predictions


if __name__ == "__main__":
    from util.plotly import Plot
    mult = 5
    fun = lambda x: np.cos(x[0]*mult) + np.sin(x[1]*mult)
    np.random.seed(0)
    p = Plot()
    low = 0
    upp = 1
    dim = 2
    plot_points = 2000
    N = 4
    random = True
    if random:
        x = np.random.random(size=(N,dim))
    else:
        N = int(round(N ** (1/dim)))
        x = np.array([r.flatten() for r in np.meshgrid(np.linspace(low,upp,N), np.linspace(low,upp,N))]).T
    y = np.array([fun(v) for v in x])
    p.add("Training Points", *x.T, y)
    
    surf = Linn()
Esempio n. 9
0
# layout = kwargs.get("layout",{})
# layout.update(dict(boxmode="group"))
# local_kwargs = kwargs.copy()
# local_kwargs["layout"] = layout
# print("Generating Mean_Dim plot..")
# p.plot(y_range=[-5,5], file_name="Mean_Dim.html", **local_kwargs)
# print("Done")

# VARIANCE PLOT
print("Collecting Var_Dim data..")
brief_title = "Predicting I/O Throughput Var"
error_col = "Relative_Mean_Error"
dimensions = [1, 2, 3, 4]
# Initialize thep lot
y_axis = "Signed Relative Error in Predicted System Throughput"
p = Plot(brief_title, "", y_axis)
for alg in algorithms:
    alg_data = var_data[(var_data["Algorithm"] == alg)]
    box_values = []
    box_locations = []
    for (dim) in dimensions:
        # Reduce to a local set of data
        set_data = alg_data[(dim == alg_data["Dimension"])]
        x_axis = "%i Dimension" % (dim) + ("s" if dim > 1 else "")
        box_values += list(set_data[error_col])
        box_locations += [x_axis] * len(set_data)
    p.add_box(alg, box_values, box_locations)
layout = kwargs.get("layout", {})
layout.update(dict(boxmode="group"))
local_kwargs = kwargs.copy()
local_kwargs["layout"] = layout
Esempio n. 10
0
from util.plotly import Plot
# from util.approximate import minimize
from util.optimize import minimize
import numpy as np
from vmesh import VoronoiMesh
import time

p = Plot()
# for n in range(1000,10001, 1000):
#     for d in range(2,13,1):
#         v = VoronoiMesh()
#         points = np.random.random((n,d))
#         start = time.time()
#         v.fit( points, np.ones((n,)) )
#         v(np.array([[0.5,0.5]]))
#         total = time.time() - start
#         print("[%i, %i, %f]"%(n,d,total))
        
pts = np.array([
    [0, 2, 0],
    [0, 3, 0],
    [0, 4, 0],
    [0, 5, 0],
    [0, 6, 0],
    [0, 7, 0],
    [0, 8, 0],
    [0, 9, 0],
    [0, 10, 0],
    [0, 11, 0],
    [0, 12, 0],
    [1000, 2, 0.133740],
Esempio n. 11
0
# Generate an interesting random set of points
points = np.array([
    [0.48, 0.52],
    [0.40, 0.24],
    [0.65, 0.93],
    [0.16, 0.56],
    [0.93, 0.68],
    [0.70, 0.16],
])

# ============================
#      Iterative Box Mesh
# ============================

p = Plot()
# Define an additive matrix for shifting the corners of boxes outwards
shift = np.array([
    [-1, 1],
    [1, 1],
    [1, -1],
    [-1, -1],
], dtype=float)
# Shifts per box
shift_0 = shift.copy()
shift_5 = shift.copy()
shift_1 = shift.copy()
# Corners of the box
box_0 = np.ones((4, 2)) * points[0] + (.05 * shift)
box_5 = np.ones((4, 2)) * points[5] + (.05 * shift)
box_1 = np.ones((4, 2)) * points[1] + (.05 * shift)