def save(self, path):
        """

        :param path:
        :return:
        """
        path = EPath(path)
        # shutil.rmtree(path.as_posix(), ignore_errors=True)
        path.mkdir(exist_ok=True, parents=True)
        with open(path / 'MyNN.p', 'wb') as dump_file:
            pickle.dump(
                dict(model_id=self.model_id,
                     input_param=self.input_param,
                     opt_param=self.opt_param,
                     step_length=self.step_length,
                     model_options=self.model_options,
                     loss_options=self.loss_options,
                     mono=self.mono), dump_file)
        self.save_weights(path=path)
        self.save_checkpoint_weights(path=path)
        summary.summarize(
            # Function parameters
            path=path,
            title='My NN',
            # Summary parameters
            model_id=self.model_id,
            **self.model_options,
            **self.opt_param,
            **self.loss_options)
    def save_weights(self, path):
        """

        :param path:
        :return:
        """
        path = EPath(path)
        path.mkdir(exist_ok=True, parents=True)
        with open(path / 'weights.p', 'wb') as dump_file:
            pickle.dump(dict(weights=self.model.get_weights()), dump_file)
    def get_checkpoint_path():
        """

        :return:
        """
        checkpoint_path = EPath('temp')
        checkpoint_path.mkdir(exist_ok=True, parents=True)
        i = 0
        while (checkpoint_path / f'token_checkpoint_weights_{i}.txt').exists() \
                or (checkpoint_path / f'checkpoint_weights_{i}.p').exists():
            i += 1
        token_path = checkpoint_path / f'token_checkpoint_weights_{i}.txt'
        with open(token_path.as_posix(), 'w') as f:
            f.write('token file')
        return checkpoint_path / f'checkpoint_weights_{i}.p'
Exemple #4
0
def style_transfert(content_path,
                    style_path,
                    extractor,
                    optimizers,
                    image_start='content'):
    image_couple = images.load_content_style_img(content_path.as_posix(),
                                                 style_path.as_posix(),
                                                 plot_it=True)
    image = image_couple.get_start_image(image_start=image_start)

    results_folder = EPath('results') / content_path.stem / style_path.stem
    results_folder.mkdir(exist_ok=True, parents=True)
    train_step = create_train_step(extractor=extractor,
                                   optimizers=optimizers,
                                   image_couple=image_couple)
    bar_epoch = loadbar.ColorBar(color=loadbar.Colors.cyan,
                                 max=var.p.epochs,
                                 title='Epoch',
                                 show_eta=False)
    bar_epoch.start()
    for n in range(var.p.epochs):
        # pb = ProgressBar(max_iteration=(n + 1) * var.psteps_per_epoch, title=f'Epoch {n + 1}/{var.pepochs}')
        bar_epoch.update(step=n, end='\n')

        bar_step = loadbar.LoadBar(max=(n + 1) * var.p.steps_per_epoch,
                                   title='Step')
        bar_step.start()
        for m in range((n + 1) * var.p.steps_per_epoch):
            train_step(image=image,
                       content_image=image_couple.content_image,
                       style_image=image_couple.style_image)
            bar_step.update()
        bar_step.end()
        plot.display(image)
        file_name = results_folder / f'{image_start}_step_{(n + 1) * (n + 2) * var.p.steps_per_epoch // 2}.png'
        images.tensor_to_image(image).save(file_name.str)
    bar_epoch.end()
    del image_couple, image, train_step
    gc.collect()
def save_img(array, path):
    """

    :param array: shape (nb_instruments, 128, nb_steps, 2)
    :return:
    """
    activations = array[:, :, :, 0]  # (nb_instruments, 128, nb_steps)
    np.place(activations, 0.5 <= activations, 1)
    np.place(activations, activations < 0.5, 0)
    path = EPath(path)
    path.mkdir(exist_ok=True, parents=True)
    for i in range(len(activations)):
        save_path = (path / 'inst({0}).jpg'.format(i)).as_posix()
        a = np.array(255 * activations[i], dtype=int)
        values = []
        for j in range(len(activations[i])):
            for k in range(len(activations[i][j])):
                if a[j][k] not in values:
                    values.append(a[j][k])
        img = Image.fromarray(
            (255 * np.flip(activations[i], axis=0)).astype(np.uint8), mode='L')
        # img.show()
        img.save(save_path)
Exemple #6
0
    def __init__(self,
                 sequence,
                 nb_steps_per_file=100,
                 batch_size=None,
                 dataset_folder_path=None):
        """


        :param sequence: Sequence to copy (Keras Sequence)
        :param nb_steps_per_file:
        :param batch_size:

        Attributs:

        :var folder_path: path to the folder containing the npy files
        :var token_path: path to the .txt file which is used as a token
        :var batch_size: the batch size
        :var nb_steps_per_file: number of steps saved in a .npy file
        :var nb_steps: total number of steps in this dataset
        :var npy_loaded: number of the file already loaded
        :var x_loaded: inputs array already loaded
        :var y_loaded: outputs array already loaded
        :var has_mask = if the sequence is supposed to have a mask in the inputs
        :var mask_loaded = masks array already loaded


        """
        # Create the folder temp im the dataset_path
        if dataset_folder_path is None:
            # Find it from the given sequence
            data_temp_folder = sequence.path.parent / 'temp'
        else:
            data_temp_folder = EPath(dataset_folder_path) / 'temp'
        data_temp_folder.mkdir(exist_ok=True, parents=True)
        self.folder_path, self.token_path = self.get_token_path_file(
            data_temp_folder)

        # Set up batch size number
        self.batch_size = sequence.batch_size if batch_size is None else batch_size

        # Set up nb_steps_per_file number
        old_batch_size = sequence.batch_size
        sequence.change_batch_size(1)
        self.nb_steps_per_file = len(
            sequence) if nb_steps_per_file is None else nb_steps_per_file
        self.nb_steps = len(sequence)
        sequence.change_batch_size(old_batch_size)

        # if a mask is given by the sequence
        self.has_mask = len(sequence[0][0]) == len(sequence[0][1]) + 1

        # ----------------------------------------
        # Create the replicated dataset
        self.replicate_dataset(sequence)

        # ----------------------------------------
        # Create the variable for loading
        self.npy_loaded = None
        self.x_loaded = None
        self.y_loaded = None
        self.mask_loaded = None