def clean(): import src.utils.path as pth import src.utils.logger as log import src.parser.toml as tml from shutil import rmtree log.info("Cleaning project") pth.__remove_file(tml.value('json', section='data', subkey='fname')) pth.__remove_file(tml.value('numpy', section='data', subkey='fname')) dnames = tml.value('dnames', section='demo') if pth.__exists(dnames['input']): rmtree(dnames['input']) if pth.__exists(dnames['output']): rmtree(dnames['output']) pth.__remove_file(tml.value('fx_name', section='demo')) dnames = tml.value('dnames', section='neuralnet') if pth.__exists(dnames['predicted_labels']): rmtree(dnames['predicted_labels']) if pth.__exists(dnames['expected_labels']): rmtree(dnames['expected_labels']) if pth.__exists(dnames['original_data']): rmtree(dnames['original_data'])
def demo(): import src.utils.logger as log import src.utils.path as pth import src.parser.toml as tml from src.utils.tools import download, extract # Downloading data from URLs and extracting downloaded files dry_url = tml.value('urls', section='demo', subkey='dry') fx_url = tml.value('urls', section='demo', subkey='fx') dry_dpath = tml.value('dnames', section='demo', subkey='input') fx_fname = tml.value('fx_name', section='demo') log.info("Downloading and extracting dataset and fx") fx_fpath = download(fx_url) pth.__rename_file(fx_fpath, fx_fname) if not pth.__exists(dry_dpath): dry_fpath = download(dry_url) extract(dry_fpath, dry_dpath) else: log.warning("\"{0}\" already exist, skipping dataset downloading".format(dry_dpath)) run(dry_dpath, fx_fname, tml.value('dnames', section='demo', subkey='output'))
def rfname(path='.', prefix=''): """Return path to random file name at <path> prefixed by <prefix>.""" fpath = pth.__join_path(path, ''.join([prefix, rstr()])) while (pth.__exists(fpath)): fpath = pth.__join_path(path, ''.join([prefix, rstr()])) return fpath
def generate_dataset(dry_dpath, fx_fpath, output_dpath=None, func=None): """Generate dataset of wet samples. Fill a JSON file wich matches dry and wet samples. """ if not output_dpath: output_dpath = mkrdir() elif not pth.__exists(output_dpath): pth.__make_dir(output_dpath) fx = _read(fx_fpath) jsn.init() info, save_steps = dict(), tml.value('json', section='data', subkey='save_steps') for idx, dryfpath in enumerate(__list_audio_files(dry_dpath)): wet_signal = apply_fx(_read(dryfpath), fx, func) dpath = pth.__with_extension( rfname(path=output_dpath, prefix='{0}_'.format(idx)), '.wav') _save(wet_signal, dpath) info[dryfpath] = dpath if (idx + 1) % save_steps == 0: log.debug("{0} samples processed".format(idx + 1)) jsn.dump(info) jsn.dump(info)
def mkrdir(path='.', prefix=''): """Create a random directory at <path>, prefixed by <prefix>. Return path to created directory. """ dpath = pth.__join_path(path, ''.join([prefix, rstr()])) while (pth.__exists(dpath)): dpath = pth.__join_path(path, ''.join([prefix, rstr()])) pth.__make_dir(dpath) return dpath
def _save(npy_array, fpath, override=True): """Save a numpy array as a wave file at <fpath>.""" if not override and pth.__exists(fpath): return while pth.__file_name(fpath).endswith('.'): fpath = pth.__with_name(fpath, pth.__file_name(fpath)[:-1]) if pth.__file_extension(fpath) != '.wav': fpath = pth.__with_extension(fpath, '.wav') write(fpath, tml.value('s_rate', section='audio'), npy_array)
def _export(npy_arrays, outdpath=None, override=True): """Save a list of numpy arrays as wave files at <outdpath>. If <outdpath> is None, create a random directory. If <oudtpath> doesn't exist, it creates it. """ if outdpath is None: outdpath = mkrdir() elif pth.__is_file(outdpath): log.critical( "Can't export songs in {0} since it is a file".format(outdpath)) elif not pth.__exists(outdpath): pth.__make_dir(outdpath) for idx, npy_array in enumerate(npy_arrays): _save(npy_array, rfname(path=outdpath, prefix='{0}_'.format(idx)), override)