Пример #1
0
def extract_data():
    """
    Construct the data files
    :return: 2 List<EP>: files of content and style
    """

    cp = EPath('content')
    sp = EPath('style')
    if cp.exists() and sp.exists():
        # Check if I have to reconstruct it or not
        cp_list = cp.listdir(concat=True)
        sp_list = sp.listdir(concat=True)
        if len(cp_list) > 0 and len(sp_list) > 0:
            # Data is already there
            return cp_list, sp_list
        else:
            # Remove everything to do it again
            cp.rmdir()
            sp.rmdir()

    EPath('content').mkdir()
    EPath('style').mkdir()
    possible_parents = [
        '.', '..'
    ]  # The working folder (my computer) or the parent folder (colab)
    data_found = False
    for possible_parent in possible_parents:
        cp_zip = EPath(possible_parent, 'content.zip')
        sp_zip = EPath(possible_parent, 'style.zip')
        if cp_zip.exists() and sp_zip.exists() and not data_found:
            data_found = True
            # the zip files are provided in the project folder
            with ZipFile(cp_zip, 'r') as zip_ref:
                zip_ref.extractall('./')
            with ZipFile(sp_zip, 'r') as zip_ref:
                zip_ref.extractall('./')
    if not data_found:
        # no image provided
        cp = tf.keras.utils.get_file(
            'YellowLabradorLooking_new.jpg',
            'https://storage.googleapis.com/download.tensorflow.org/example_images/YellowLabradorLooking_new.jpg'
        )
        sp = tf.keras.utils.get_file(
            'kandinsky5.jpg',
            'https://storage.googleapis.com/download.tensorflow.org/example_images/Vassily_Kandinsky%2C_1913_-_Composition_7.jpg'
        )
        shutil.move(cp, 'content/YellowLabradorLooking_new.jpg')
        shutil.move(sp, 'style/kandinsky5.jpg')
    content_path_list = EPath('content').listdir(concat=True)
    style_path_list = EPath('style').listdir(concat=True)
    return content_path_list, style_path_list
Пример #2
0
def get_data():
    """
    Construct the data files
    :return: 2 List<EP>: files of content and style
    """

    cp = EPath('content')
    sp = EPath('style')
    if cp.exists() and sp.exists():
        cp_list = cp.listdir(concat=True)
        sp_list = sp.listdir(concat=True)
        if len(cp_list) > 0 and len(sp_list) > 0:
            # Data is already there
            return cp_list, sp_list
    return extract_data()
Пример #3
0
    def cleanup(self):
        account = self.e1.get()
        password = self.e2.get()
        # access = 'tim'
        path = EPath('data', f'{account}.txt')

        if not path.exists():
            self.attempts += 1
            if self.attempts == 5:
                window.quit()
            # self.e.delete(0, 'end')
            messagebox.showerror(
                'Incorrect Account',
                'Incorrect Account, attempts remaining: ' +
                str(5 - self.attempts))
        else:
            with open(path, 'r') as f:
                stored_password = f.readline()
            if password == stored_password:
                self.loop = True
                self.top.destroy()
                window.deiconify()
            else:
                self.attempts += 1
                if self.attempts == 5:
                    window.quit()
                # self.e1.delete(0, 'end')
                messagebox.showerror(
                    'Incorrect Password',
                    'Incorrect password, attempts remaining: ' +
                    str(5 - self.attempts))
Пример #4
0
 def delete_token(self):
     """
     Delete the token associated to the model
     :return:
     """
     if self.full_name_i is not None:
         token_path = EPath('temp', f'token_{self.full_name}')
         if token_path.exists():
             token_path.unlink()
Пример #5
0
    def delete_token_midis_path(self):
        """

        :return:
        """
        if self._save_midis_path_i is not None:
            path = EPath(
                'temp',
                f'token_{self.full_name}-generation({self._save_midis_path_i}).txt'
            )
            if path.exists():
                path.unlink()
Пример #6
0
def get_next_files(content_path_list, style_path_list):
    """

    :param content_path_list: List of the content_path
    :param style_path_list: List of the style_path
    :return:
    """
    for content_path in content_path_list:
        for style_path in style_path_list:
            result_path = EPath('results', content_path.stem, style_path.stem)
            if not result_path.exists():
                return content_path, style_path, result_path, var.p.image_start[
                    0]
            else:
                files = result_path.listdir(t='str')
                for img_start in var.p.image_start:
                    if not functools.reduce(
                            lambda x, y: x or y.startswith(img_start), files,
                            False):
                        return content_path, style_path, result_path, img_start
    return None, None, None, None
Пример #7
0
def get_next_files(content_path_list,
                   style_path_list,
                   image_start=options.image_start,
                   data_path=None):
    """

    :param content_path_list: List of the content_path
    :param style_path_list: List of the style_path
    :return:
    """

    for content_path in content_path_list:
        for style_path in style_path_list:
            result_path = EPath('results', content_path.stem, style_path.stem)
            start_path_list = get_start_path_list(content_path=content_path,
                                                  style_path=style_path,
                                                  image_start=image_start,
                                                  data_path=data_path)
            if not result_path.exists():
                return FileCombination(content_path=content_path,
                                       style_path=style_path,
                                       start_path=start_path_list[0],
                                       n=0)
            else:
                files = result_path.listdir(
                    t='str')  # existing files in the result folder
                for start_path in start_path_list:
                    for p in range(param.length):
                        file_combination = FileCombination(
                            content_path=content_path,
                            style_path=style_path,
                            start_path=start_path,
                            n=p)
                        if not functools.reduce(
                                lambda x, y: x or y.startswith(
                                    file_combination.result_stem), files,
                                False):
                            return file_combination
    return None
Пример #8
0
def main(args):
    """

    :param args:
    :return:
    """
    if not args.midi:
        shutil.rmtree(path='generated_midis', ignore_errors=True)
    if not args.hp:
        shutil.rmtree(path='hp_search', ignore_errors=True)
    if not args.model:
        shutil.rmtree(path='saved_models', ignore_errors=True)
    if not args.tensorboard:
        shutil.rmtree(path='tensorboard', ignore_errors=True)
    if not args.temp:
        shutil.rmtree(path='temp', ignore_errors=True)
    if not args.data_temp:
        data_temp = EPath(g.path.get_data_folder_path(args)) / 'temp'
        shutil.rmtree(path=data_temp, ignore_errors=True)
    if not args.zip:
        zip_path = EPath('my_zip.zip')
        if zip_path.exists():
            zip_path.unlink()
    print('Done cleaning')
import sys
from epicpath import EPath
import json

from . import utils

img_size_nn = 512  # Max size of the input of the nn
img_size_hd = 1024  # TODO: change it to 4096
nb_offsets = 4  # number of offsets to do overlapping

colab = 'google.colab' in sys.modules

if not colab:
    # on my pc
    img_size_nn = 64
    img_size_hd = 128
    nb_offsets = 2

json_path = EPath('style_transfert_global_variables.json')

if json_path.exists():
    # If a json file exists, then take the variables
    with open(json_path, 'r') as json_file:
        data = json.load(json_file)
        img_size_nn = utils.get_key(data, 'img_size_nn', img_size_nn)
        img_size_hd = utils.get_key(data, 'img_size_hd', img_size_hd)
        nb_offsets = utils.get_key(data, 'nb_offsets', nb_offsets)

ratio_size = img_size_hd // img_size_nn
Пример #10
0
class CheckPoint(KerasCallback):
    """

    """
    def __init__(self, filepath):
        """

        :param filepath:
        """
        self.filepath = EPath(filepath)
        super(CheckPoint, self).__init__()

        self.best_acc = -np.Inf
        self.best_loss = np.Inf

    def on_epoch_end(self, epoch, logs=None):
        """

        :param epoch:
        :param logs:
        :return:
        """
        logs = logs or {}
        if self.greater_result(logs):
            self.best_acc = self.get_val_acc_mean(logs)
            self.best_loss = self.get_val_loss(logs)
            if self.filepath.exists():
                self.filepath.unlink()
            # self.model.save_weights(self.filepath.as_posix(), overwrite=True)
            # tf.keras.experimental.export_saved_model(self.model, self.filepath.as_posix())
            with open(self.filepath, 'wb') as dump_file:
                pickle.dump(dict(weights=self.model.get_weights()), dump_file)

    def greater_result(self, logs):
        """

        :param logs:
        :return:
        """
        acc = self.get_val_acc_mean(logs)
        return np.greater(
            acc, self.best_acc) or (np.equal(acc, self.best_acc) and np.less(
                self.get_val_loss(logs), self.best_loss))

    @staticmethod
    def get_val_acc_mean(logs):
        """

        :param logs:
        :return:
        """
        i = 0
        res = 0
        while f'val_Output_{i}_acc' in logs:
            res += logs.get(f'val_Output_{i}_acc')
            i += 1
        return res / i

    @staticmethod
    def get_val_loss(logs):
        """

        :param logs:
        :return:
        """
        return logs.get('val_loss')
Пример #11
0
import sys
from epicpath import EPath
import json

from . import utils
from ..STMode import STMode

st_mode = STMode.Noise.value  # Mode of the style transfert

image_start = ['content']  #, 'style']  # all_content, style, all_style, all

colab = 'google.colab' in sys.modules
if not colab:
    pass

options_json_path = EPath('style_transfer_options.json')

if options_json_path.exists():
    with open(options_json_path, 'r') as json_file:
        data = json.load(json_file)

        st_mode = utils.get_key(data, 'st_mode', st_mode)
        image_start = utils.get_key(data, 'image_start', image_start)
Пример #12
0
]

p.loss = 'l1'

colab = 'google.colab' in sys.modules

if not colab:
    # on my pc
    p.img_size = 128
    p.img_size_nn = 64
    p.epochs = 4
    p.steps_per_epoch = 5

style_transfert_parameters_path = EPath('style_transfer_parameters.json')

if style_transfert_parameters_path.exists():
    # Then update the default parameters
    with open(style_transfert_parameters_path, 'r') as json_file:
        data = json.load(json_file)
        for key, value in data.items():
            if value is not None:
                p.update(key, value)

style_transfert_parameters_list_path = EPath(
    'style_transfer_parameters_list.json')
style_transfert_parameters_grid_path = EPath(
    'style_transfer_parameters_grid.json')

if style_transfert_parameters_list_path.exists():
    p.list_mode()
Пример #13
0
def main(args):
    # To have colors from the first script
    # ----------------------------------------
    # Check that all the folder are available
    # ----------------------------------------
    if not args.in_place:
        if args.from_checkpoint is not None:
            if len(args.from_checkpoint.split('-')) == 1:
                name = 'bayesian_opt'
            else:
                name = f'bayesian_opt_{"_".join([str(s) for s in args.from_checkpoint.split("-")[:-1]])}'
            i_start = int(str(args.from_checkpoint).split('-')[-1]) + 1
        else:
            i_start = 0
            print('bo name', args.bo_name)
            name = 'bayesian_opt' if args.bo_name is None else f'bayesian_opt_{args.bo_name}'
        cprint('This script will need the next paths to be available:', 'red')
        for i in range(i_start, i_start + args.nscripts):
            path = EPath('hp_search', name + f'_{i}')
            cprint('path ' + path.as_posix(), 'yellow')
            if path.exists():
                raise FileExistsError(f'The folder "{path}" already exists')

    # ----------------------------------------
    # For the first Bayesian Optimization run, we just copy all the arguments except n-script
    # ----------------------------------------
    print(f'Script 1/{args.nscripts}')
    s = 'python bayesian-opt.py'
    for k, value in vars(args).items():
        if k not in ['nscripts', 'from_checkpoint', 'bo_name']:
            if k not in [
                    'in_place', 'no_eager', 'pc', 'no_pc_arg', 'debug',
                    'seq2np', 'fast_seq', 'memory_seq', 'mono',
                    'from_checkpoint'
            ]:
                s += f' --{k.replace("_", "-")} {value}'
            elif value:
                s += f' --{k.replace("_", "-")}'
    if args.from_checkpoint is not None:
        s += f' --from-checkpoint {args.from_checkpoint}'
    if args.bo_name is not None:
        s += f' --bo-name {args.bo_name}'
    os.system(s)

    # ----------------------------------------
    # For all the others
    # ----------------------------------------
    if args.from_checkpoint:
        cp_id = args.from_checkpoint.split('-')
        id_saved_folder_base = '' if len(cp_id) == 1 else cp_id[0]
    elif args.bo_name:
        id_saved_folder_base = args.bo_name + '-'
    else:
        id_saved_folder_base = ''
    i_cp = int(args.from_checkpoint.split('-')
               [-1]) if args.from_checkpoint is not None else 0
    if not args.in_place and args.from_checkpoint is not None:
        i_cp += 1

    for script in range(1, args.nscripts):
        print(f'Script {script + 1}/{args.nscripts}')
        s = 'python bayesian-opt.py'
        # We only need to say the number of call and the check point
        # And the if in_place, we keep it
        s += f' --n-calls {args.n_calls}'
        s += f' --from-checkpoint {id_saved_folder_base}{i_cp}'
        if args.in_place:
            s += ' --in-place'
        else:
            i_cp += 1
        os.system(s)
if file_combination is not None:
    # image_couple = src.images.load_content_style_img(content_path.as_posix(), style_path.as_posix(), plot_it=True)
    src.st.var.param.n = file_combination.n

    extractor = src.st.StyleContentModel(
        style_layers=src.st.var.param.style_layers.value,
        content_layers=src.st.var.param.content_layers.value,
        content_gram_layers=src.st.var.param.content_gram_layers.value)

    if src.st.var.param.length > 1:
        print(f'param: {src.st.var.param.n}')

    optimizers = src.st.Optimizers(shape=(1, ), lr=src.st.var.param.lr.value)

    file_combination.results_folder.mkdir()
    parameters_path = EPath('results/parameters.txt')
    if not parameters_path.exists():
        src.st.var.param.save_all_txt(parameters_path)
    p_path = EPath(f'results/p{src.st.var.param.n}.txt')
    if not p_path.exists():
        src.st.var.param.save_current_txt(p_path)

    src.st.train.style_transfer(
        file_combination=file_combination,
        extractor=extractor,
        optimizers=optimizers,
        epochs=src.st.var.param.epochs.value,
        steps_per_epoch=src.st.var.param.steps_per_epoch.value)
else:
    print('No result_path left...')
Пример #15
0
def main(args):
    """
        Entry point
    """

    if args.pc:
        data_path = EPath(os.path.join('../Dataset', args.data))
    else:
        data_path = EPath(
            os.path.join('../../../../../../storage1/valentin', args.data))
    data_checked_path = EPath(data_path.as_posix() + '_checked')
    if data_checked_path.exists():  # Delete the folder of the transformed data
        shutil.rmtree(data_checked_path.as_posix())
    shutil.copytree(src=data_path.as_posix(), dst=data_checked_path)

    # Stat
    max_notes_range, min_notes_range = 0, int(args.notes_range[1]) - int(
        args.notes_range[0])
    nb_correct_files = 0
    nb_measures = 0

    print(
        '\t',
        colored('Check_dataset with instruments : ', 'cyan', 'on_white') +
        colored('{0}'.format(args.instruments), 'magenta', 'on_white'), '\n')

    all_midi_paths = Midi.open.all_midi_files(data_checked_path.as_posix(),
                                              False)
    nb_files = len(all_midi_paths)
    print('note_range:', colored(args.notes_range, 'magenta'))
    for i in range(nb_files):
        midi_path = EPath(all_midi_paths[i])
        checked_file_name = EPath(
            midi_path.parent, midi_path.stem + '_checked' + midi_path.suffix)
        checked_file_name_image = EPath(midi_path.parent,
                                        midi_path.stem + '_checked.jpg')
        print(
            colored(
                "-- {0}/{1} ----- : ----- Checking {2} ----------".format(
                    i + 1, nb_files, midi_path), 'white', 'on_blue'))
        if args.bach:
            matrix_midi = Midi.open.midi_to_matrix_bach(
                filename=midi_path.as_posix(),
                print_instruments=True,
                notes_range=args.notes_range,
                transpose=not args.no_transpose)
        else:
            matrix_midi = Midi.open.midi_to_matrix(
                filename=midi_path.as_posix(),
                instruments=args.instruments,
                print_instruments=True,
                notes_range=args.notes_range,
            )  # (nb_args.instruments, 128, nb_steps, 2)
        if matrix_midi is None:
            continue
        if args.mono:
            matrix_midi = Midi.open.to_mono_matrix(matrix_midi)

        # Update stats
        matrix_bound_min, matrix_bound_max = Midi.common.range_notes_in_matrix(
            matrix_midi, mono=args.mono)
        min_notes_range = min(min_notes_range, matrix_bound_min)
        max_notes_range = max(max_notes_range, matrix_bound_max)
        nb_correct_files += 1
        nb_measures += Midi.common.nb_measures(matrix_midi)

        # matrix_midi = np.transpose(matrix_midi, , 3))
        output_notes = Midi.create.matrix_to_midi(matrix_midi,
                                                  instruments=args.instruments,
                                                  notes_range=args.notes_range,
                                                  mono=args.mono)
        Midi.create.save_midi(output_notes, args.instruments,
                              checked_file_name.as_posix())
        Images.pianoroll.save_array_as_pianoroll(array=matrix_midi,
                                                 folder_path=midi_path.parent,
                                                 name=midi_path.stem +
                                                 '_checked',
                                                 seed_length=0,
                                                 mono=args.mono or args.bach)
    min_notes_range += args.notes_range[0]
    max_notes_range += args.notes_range[0]
    print('----------', colored('Checking is done', 'white', 'on_green'),
          '----------')
    print('Number of correct files :', colored(nb_correct_files, 'magenta'),
          '-- nb measures :', colored(nb_measures, 'magenta'))
    print('Range of notes :', colored(min_notes_range, 'magenta'), ':',
          colored(max_notes_range, 'magenta'))