def cleanup(self): account = self.e1.get() password = self.e2.get() # access = 'tim' path = EPath('data', f'{account}.txt') if not path.exists(): self.attempts += 1 if self.attempts == 5: window.quit() # self.e.delete(0, 'end') messagebox.showerror( 'Incorrect Account', 'Incorrect Account, attempts remaining: ' + str(5 - self.attempts)) else: with open(path, 'r') as f: stored_password = f.readline() if password == stored_password: self.loop = True self.top.destroy() window.deiconify() else: self.attempts += 1 if self.attempts == 5: window.quit() # self.e1.delete(0, 'end') messagebox.showerror( 'Incorrect Password', 'Incorrect password, attempts remaining: ' + str(5 - self.attempts))
def save(self, path): """ :param path: :return: """ path = EPath(path) # shutil.rmtree(path.as_posix(), ignore_errors=True) path.mkdir(exist_ok=True, parents=True) with open(path / 'MyNN.p', 'wb') as dump_file: pickle.dump( dict(model_id=self.model_id, input_param=self.input_param, opt_param=self.opt_param, step_length=self.step_length, model_options=self.model_options, loss_options=self.loss_options, mono=self.mono), dump_file) self.save_weights(path=path) self.save_checkpoint_weights(path=path) summary.summarize( # Function parameters path=path, title='My NN', # Summary parameters model_id=self.model_id, **self.model_options, **self.opt_param, **self.loss_options)
def eval_songs_folder(path): """ :param path: :return: """ path = EPath(path) files = list( filter(lambda x: x.suffix == '.mid', path.listdir(concat=True))) all_res = {} text = "" res_seed = 0 res_created = 0 for file in files: seed_length = 8 if file.rstem[20:] != "redo_song_generate_3" else None res = eval_song_midi(file, seed_length) all_res[file.rstem] = res h_s = res['harmony_seed'] h_c = res['harmony_created'] if h_s is not None and not np.isnan(h_s): res_seed += res['harmony_seed'] if h_c is not None and not np.isnan(h_c): res_created += h_c text += file.rstem + '\n' for k in res: text += f'\t{k}: {res[k]}\n' plots.plot_res((file.parent / file.rstem + '_harmony_measure').with_suffix('.jpg'), res) res_seed /= len(files) res_created /= len(files) text = f'\t\tRes for generation\n\nMean:\n\tharmony_seed: {res_seed}\n\tharmony_created: {res_created}\n' + text with open(path / 'res.txt', 'w') as f: f.write(text) with open(path / 'res.p', 'wb') as dump_file: pickle.dump(all_res, dump_file)
def check_args(args): """ :param args: :return: """ if args.pc: # args.data = 'lmd_matched_mini' data_path = os.path.join('../Dataset', args.data) else: data_path = os.path.join('../../../../../../storage1/valentin', args.data) data_transformed_path = data_path + '_transformed' if not args.no_transpose: data_transformed_path += 'Transposed' if args.mono: data_transformed_path += 'Mono' if os.path.exists(data_transformed_path ): # Delete the folder of the transformed data shutil.rmtree(data_transformed_path) if not os.path.exists(data_transformed_path): os.mkdir(data_transformed_path) return args, EPath(data_path), EPath(data_transformed_path)
def create_token(self): """ create a token in the temp file for the save folder :return: """ EPath('temp').mkdir(exist_ok=True, parents=True) with open(EPath('temp', 'token_' + self.full_name), 'w') as f: f.write(f'token for the model {self.full_name}')
def delete_token(self): """ Delete the token associated to the model :return: """ if self.full_name_i is not None: token_path = EPath('temp', f'token_{self.full_name}') if token_path.exists(): token_path.unlink()
def save_weights(self, path): """ :param path: :return: """ path = EPath(path) path.mkdir(exist_ok=True, parents=True) with open(path / 'weights.p', 'wb') as dump_file: pickle.dump(dict(weights=self.model.get_weights()), dump_file)
def __init__(self, filepath): """ :param filepath: """ self.filepath = EPath(filepath) super(CheckPoint, self).__init__() self.best_acc = -np.Inf self.best_loss = np.Inf
def delete_token_midis_path(self): """ :return: """ if self._save_midis_path_i is not None: path = EPath( 'temp', f'token_{self.full_name}-generation({self._save_midis_path_i}).txt' ) if path.exists(): path.unlink()
def create_token_midis_path(self): """ :return: """ EPath('temp').mkdir(exist_ok=True, parents=True) with open( EPath( 'temp', 'token_' + self.full_name + f'-generation({self.save_midis_path_i}).txt'), 'w') as f: f.write( f'token for the generation folder at {self.save_midis_path}')
def get_new_save_midis_path_i(self): """ set up a new save Midi path :return: """ self.delete_token_midis_path() i = 0 name = self.full_name + '-generation({0})' while EPath('generated_midis', name.format(i)).exists() or EPath( 'temp', ('token_' + name + '.txt').format(i)).exists(): i += 1 self.save_midis_path_i = i print('new save path for Midi files :', colored(str(self.save_midis_path), 'cyan'))
def get_input_tensorboard_command(names=None): """ :param names: list of strings :return: """ if names is None: names = os.listdir(EPath(*['..' for _ in range(3)], 'big_runs').str) root = EPath(*['..' for _ in range(3)], 'big_runs') inputs = [ f'{name}:/{(root / name / "saved_models" / os.listdir(root / name / "saved_models")[0] / "tensorboard").as_posix()}' for name in names ] return ','.join(inputs)
def get_new_i(self): """ :return: set up a new unique full name and the corresponding path to save the trained model """ self.delete_tokens() i = 0 full_name = self.full_name_no_i + '-({0})' saved_model_path = EPath('saved_models') while EPath(saved_model_path, full_name.format(i)).exists() or EPath( 'temp', 'token_' + full_name.format(i)).exists(): i += 1 self.full_name_i = i print('Got new full_name :', colored(self.full_name, 'blue')) self.get_new_save_midis_path_i()
def get_checkpoint_path(): """ :return: """ checkpoint_path = EPath('temp') checkpoint_path.mkdir(exist_ok=True, parents=True) i = 0 while (checkpoint_path / f'token_checkpoint_weights_{i}.txt').exists() \ or (checkpoint_path / f'checkpoint_weights_{i}.p').exists(): i += 1 token_path = checkpoint_path / f'token_checkpoint_weights_{i}.txt' with open(token_path.as_posix(), 'w') as f: f.write('token file') return checkpoint_path / f'checkpoint_weights_{i}.p'
def recreate_model(self, id, with_weights=True, print_model=True): """ create a new model witht the same options as the saved model and then load the weights (if with_weights==True) :param id: :param with_weights: if we have to load the weight of the model :param print_model: :return: """ name, model_id, total_epochs, indice = id.split('-') path_to_load = EPath( 'saved_models', f'{name}-m({model_id})-e({total_epochs})-({indice})') with open(str(path_to_load / 'infos.p'), 'rb') as dump_file: d = pickle.load(dump_file) # Model self.model_id = d['model_id'] self.work_on = d['work_on'] self.input_param = d['input_param'] self.name = d['name'] self.predict_offset = d['predict_offset'] # Data self.instruments = d['instruments'] self.notes_range = d['notes_range'] self.mono = d['mono'] self.data_transformed_path = d['data_transformed_path'] # Logistic self.total_epochs = d['epochs'] if with_weights else 0 self.full_name_i = d['i'] if with_weights else self.get_new_i() self.keras_nn = KerasNeuralNetwork() self.keras_nn.recreate((path_to_load / 'MyNN').as_posix(), with_weights=with_weights) if print_model: self.print_model()
def save_generated_arrays_cross_images(self, generated_arrays, folder_path, name, replicate=False, titles=None, subtitles=None): """ All the generated arrays in one subplot to give an easier way to compare them :param subtitles: :param titles: :param replicate: :param generated_arrays: :param folder_path :param name """ folder_path = EPath(folder_path) # file_name = self.get_unique_path(folder_path / (name + '.jpg')) # From MGLogistic file_name = (folder_path / (name + '.jpg')).get_unique() pianoroll.save_arrays_as_pianoroll_subplot( arrays=generated_arrays, file_name=file_name, seed_length=self.nb_steps * self.step_length, mono=self.mono, replicate=replicate, titles=titles, subtitles=subtitles, notes_range=self.notes_range, step_length=self.step_length )
def recreate(self, path, with_weights=True): """ :param with_weights: :param path: :return: """ path = EPath(path) with open(str(path / 'MyNN.p'), 'rb') as dump_file: d = pickle.load(dump_file) model_id = d['model_id'] input_param = d['input_param'] opt_param = d['opt_param'] step_length = d['step_length'] model_options = d['model_options'] loss_options = d['loss_options'] self.mono = d['mono'] self.new_model(model_id=model_id, input_param=input_param, opt_param=opt_param, step_length=step_length, model_options=model_options, loss_options=loss_options) if with_weights: self.load_weights(path=path)
def get_folder_path(id=None, name=None): """ :param name: The name of the future saved folder :type id: The id of an existing folder :return: the path to the folder to save the results """ if id is None: # Then it has to be a new folder name_str = f'_{name}' if name is not None else '' return EPath('hp_search', f'bayesian_opt{name_str}').get_unique(always_ext=True) else: id_list = id.split('-') id_str = '_' + '_'.join([str(s) for s in id_list]) return EPath('hp_search', f'bayesian_opt{id_str}')
def bayesian_opt(args): """ Preprocess the args for the file bayesian-opt.py :param args: :return: """ if args.pc and not args.no_pc_arg: args.epochs = 1 args.n_calls = 2 if args.in_place and args.from_checkpoint is None: warnings.warn( 'The arg "in-place" is set to "True" while the arg "from-checkpoint" is "None"' ) if args.from_checkpoint is not None: # It means the bayesian optimization continues a previous one, hence, some args must be the same the # the optimization stays coherent with open( EPath( 'hp_search', f'bayesian_opt_{"_".join([str(s) for s in args.from_checkpoint.split("-")])}', 'checkpoint', 'args.p'), 'rb') as dump_file: d = pickle.load(dump_file) saved_args = d['args'] for k, value in vars(saved_args).items(): # if k not in ['in_place', 'from_checkpoint', 'n_calls', 'gpu', 'debug']: if k not in ['from_checkpoint']: setattr(args, k, value) return args
def hp_summary(args): """ :param args: :return: """ args.folder = EPath('hp_search', f'bayesian_opt_{args.folder}') return args
def __init__(self): super().__init__() self.logs = [ ] # logs = {'loss': 4.495124205389112, 'Output_0_loss' : 2.400269569744329, # 'Output_1_loss': 2.094854634212782, 'Output_0_acc_act': 0.9934636604502837, # 'Output_0_mae_dur': 0.2902308425676854, 'Output_1_acc_act': 0.9946330100062025, # 'Output_1_mae_dur': 0.25196381778232657} self.current_logs = None self.paths = [] # Where are stored the saved_models self.hparams = [] # the hyper parameters of the model self.best_index = None i = 0 while EPath('tests_hp/Summary_test_{0}.txt'.format(i)).exists(): i += 1 self.path = EPath('tests_hp/Summary_test_{0}.txt'.format(i)) EPath('tests_hp').mkdir(parents=True, exist_ok=True) with open(self.path.as_posix(), 'w') as f: f.write('\n')
def name_to_foler(name): """ :param name: name of the folder of the big run :return: """ # Root Path path = EPath(*['..' for _ in range(3)], 'big_runs') # Path where all the folders are # name return path / name
def load_data(self, data_transformed_path=None, data_test_transformed_path=None, verbose=1): """ :return: load the data """ self.data_transformed_path = EPath( data_transformed_path) if data_transformed_path is not None else self.data_transformed_path self.input_param = {} with open(str(self.data_transformed_path / 'infos_dataset.p'), 'rb') as dump_file: d = pickle.load(dump_file) self.input_param['input_size'] = d['input_size'] self.input_param['nb_instruments'] = d['nb_instruments'] self.instruments = d['instruments'] self.notes_range = d['notes_range'] self.mono = d['mono'] self.transposed = d['transposed'] if verbose == 1: print('data at', colored(data_transformed_path, 'grey', 'on_white'), 'loaded') self.data_test_transformed_path = EPath( data_test_transformed_path) if data_test_transformed_path is not None else self.data_test_transformed_path
def get_next_files(content_path_list, style_path_list): """ :param content_path_list: List of the content_path :param style_path_list: List of the style_path :return: """ for content_path in content_path_list: for style_path in style_path_list: result_path = EPath('results', content_path.stem, style_path.stem) if not result_path.exists(): return content_path, style_path, result_path, var.p.image_start[ 0] else: files = result_path.listdir(t='str') for img_start in var.p.image_start: if not functools.reduce( lambda x, y: x or y.startswith(img_start), files, False): return content_path, style_path, result_path, img_start return None, None, None, None
def get_next_files(content_path_list, style_path_list, image_start=options.image_start, data_path=None): """ :param content_path_list: List of the content_path :param style_path_list: List of the style_path :return: """ for content_path in content_path_list: for style_path in style_path_list: result_path = EPath('results', content_path.stem, style_path.stem) start_path_list = get_start_path_list(content_path=content_path, style_path=style_path, image_start=image_start, data_path=data_path) if not result_path.exists(): return FileCombination(content_path=content_path, style_path=style_path, start_path=start_path_list[0], n=0) else: files = result_path.listdir( t='str') # existing files in the result folder for start_path in start_path_list: for p in range(param.length): file_combination = FileCombination( content_path=content_path, style_path=style_path, start_path=start_path, n=p) if not functools.reduce( lambda x, y: x or y.startswith( file_combination.result_stem), files, False): return file_combination return None
def style_transfert(content_path, style_path, extractor, optimizers, image_start='content'): image_couple = images.load_content_style_img(content_path.as_posix(), style_path.as_posix(), plot_it=True) image = image_couple.get_start_image(image_start=image_start) results_folder = EPath('results') / content_path.stem / style_path.stem results_folder.mkdir(exist_ok=True, parents=True) train_step = create_train_step(extractor=extractor, optimizers=optimizers, image_couple=image_couple) bar_epoch = loadbar.ColorBar(color=loadbar.Colors.cyan, max=var.p.epochs, title='Epoch', show_eta=False) bar_epoch.start() for n in range(var.p.epochs): # pb = ProgressBar(max_iteration=(n + 1) * var.psteps_per_epoch, title=f'Epoch {n + 1}/{var.pepochs}') bar_epoch.update(step=n, end='\n') bar_step = loadbar.LoadBar(max=(n + 1) * var.p.steps_per_epoch, title='Step') bar_step.start() for m in range((n + 1) * var.p.steps_per_epoch): train_step(image=image, content_image=image_couple.content_image, style_image=image_couple.style_image) bar_step.update() bar_step.end() plot.display(image) file_name = results_folder / f'{image_start}_step_{(n + 1) * (n + 2) * var.p.steps_per_epoch // 2}.png' images.tensor_to_image(image).save(file_name.str) bar_epoch.end() del image_couple, image, train_step gc.collect()
def get_data(): """ Construct the data files :return: 2 List<EP>: files of content and style """ cp = EPath('content') sp = EPath('style') if cp.exists() and sp.exists(): cp_list = cp.listdir(concat=True) sp_list = sp.listdir(concat=True) if len(cp_list) > 0 and len(sp_list) > 0: # Data is already there return cp_list, sp_list return extract_data()
def load_weights(self, path): """ :param path: :return: """ path = EPath(path) if (path / 'checkpoint_weights.p').exists(): file_path = path / 'checkpoint_weights.p' else: file_path = path / 'weights.p' with open(file_path, 'rb') as dump_file: d = pickle.load(dump_file) self.model.set_weights(d['weights'])
def __init__(self, path, nb_steps, batch_size=4, work_on=g.mg.work_on, noise=0, replicate=False, predict_offset=g.train.predict_offset): """ :type predict_offset: :param path: The path to the data :param nb_steps: The number of steps in the inputs :param work_on: if it is on note/beat/measure :param replicate: if true, input = output, else output = last step of input + 1 """ # -------------------- Attribut -------------------- self.predict_offset = 0 if replicate else predict_offset # The next step as y self.path = EPath(path) # Path to the dataset self.npy_path = self.path / 'npy' # npy folder path self.nb_steps = nb_steps # Number of steps in the x self.work_on = work_on # measure/beat/note self.step_size = g.mg.work_on2nb(work_on) # The size of a step self.noise = noise self.batch_size = batch_size self.replicate = replicate # Boolean, if True, y = x self.nb_songs = None # Number of songs in the dataset self.all_shapes = None # all the size of the npy array # List(nb_files, nb_song_in_file)[(length, nb_instruments, input_size, channels) self.nb_songs_per_npy_file = None # Maximum number of songs in a .npy file self.nb_instruments = None # Number of instruments self.file_loaded = None # The .npy file currently loaded in memory self.npy_loaded = None # The value of the .npy file currently loaded in memory self._nb_elements_available = None # number of steps in the all dataset self._nb_elements_available_per_file = None # All the len of the songs List(nb_files)[int] self._nb_elements_available_per_song = None # All the len of the songs List(nb_files, nb_songs_per_file)[int] # -------------------- Technical Attributs -------------------- with open(self.path / 'infos_dataset.p', 'rb') as dump_file: d = pickle.load(dump_file) self.nb_songs = d['nb_files'] # nb available files in the dataset self.all_shapes = d[ 'all_shapes'] # [nb_files, nb_song_in_file], (length, nb_instrument, input_size, 2) self.nb_songs_per_npy_file = d['nb_files_per_npy'] self.nb_instruments = d['nb_instruments'] self.file_loaded = None # number of the .npy already loaded self.npy_loaded = None # npy file already loaded
def load_weights(self, id): """ :param id: id of the model to load :return: load the weights of a model """ name, model_id, total_epochs, i = id.split('-') self.total_epochs = int(total_epochs) self.get_new_i() path_to_load = EPath('saved_models', self.full_name) self.keras_nn.load_weights(str(path_to_load / 'MyNN')) self.print_model() print('Weights of the', colored(f'{id}', 'white', 'on_blue'), 'model loaded')