Пример #1
0
def _save_img_list(img_list, save_path, config):
    #_show_img_list(img_list)
    metadata = dict(title='generator images', artist='Matplotlib', comment='Movie support!')
    writer = ImageMagickWriter(fps=1,metadata=metadata)
    ims = [np.transpose(i, (1, 2, 0)) for i in img_list]
    fig, ax = plt.subplots()
    with writer.saving(fig, "%s/img_list.gif" % save_path,500):
        for i in range(len(ims)):
            ax.imshow(ims[i])
            ax.set_title("step {}".format(i * config["save_every"]))
            writer.grab_frame()
Пример #2
0
    def save_animation(self, fps=5, time=5, *, filename="robot_animation.gif"):
        original_position = self.theta
        number_of_frames = self.path.shape[1]

        frames_to_animate = fps * time
        if number_of_frames < frames_to_animate:
            step = 1
            fps = max(1, time // number_of_frames)
        else:
            step = number_of_frames // frames_to_animate

        fig = plt.figure()
        robot_animation = ImageMagickWriter(fps=fps)

        with robot_animation.saving(fig, filename, dpi=150):
            for column in np.arange(start=0, stop=number_of_frames, step=step):
                self.theta = np.array(self.path[:, column])
                self.plot(show=False)
                robot_animation.grab_frame()
                fig.clear()
                self._set_plot_options()

        self.theta = original_position  # ??
        plt.close()
Пример #3
0
           train_x[:, 2],
           s=5,
           c=create_color_list(train_y))
scat = ax.scatter(weights[:, 0],
                  weights[:, 1],
                  weights[:, 2],
                  c='k',
                  marker='^')

max_iterations = 100
learning_rate = 0.005
old_weights = np.zeros(weights.shape)
writer = ImageMagickWriter(fps=4)
file_name = "kohonens.gif"
try:
    with writer.saving(fig, file_name, dpi=100):
        for k in range(max_iterations):
            neuron_class = 4 * np.ones(weight_size)
            print(k)
            random.shuffle(train_set)
            for x, y in train_set:
                d = np.zeros(weight_size)
                for i in range(weight_size):
                    d[i] = np.linalg.norm(x - weights[i, :])
                winner_index = np.argmin(d)
                neuron_class[winner_index] = y
                h = neighbor(k, winner_index)
                for j in range(weight_size):
                    delta_w = learning_rate * h[j] * (x - weights[j])
                    weights[j] += delta_w