Exemplo n.º 1
0
 def __init__(self, datadir, transform=None, data='mfcc', splits=[.8, .1, .1], shuffle_files=True, train='train'):
     self.data = data
     # Metadata and raw
     self.data_files = []
     # Spectral transforms
     self.spectral_files = []
     # Retrieve list of files
     tmp_files = sorted(glob.glob(datadir + '/raw/*.npz'))
     trans_files = sorted(glob.glob(datadir + '/' + data + '/*.npy'))
     self.data_files.extend(tmp_files)
     self.spectral_files.extend(trans_files)
     # Analyze dataset
     self.analyze_dataset()
     # Create splits
     self.create_splits(splits, shuffle_files)
     # Compute mean and std of dataset
     self.compute_normalization()
     # Now we can create the rightful transform
     self.transform = transform
     if (transform is None):
         tr = []
         if (self.data != 'mfcc'):
             tr.append(LogTransform(clip=1e-3))
         tr.append(NormalizeTensor(self.mean, self.var))
         tr.append(transforms.RandomApply([NoiseGaussian(factor=1e-2)], p=0.333))
         #tr.append(transforms.RandomApply([OutliersZeroRandom(factor=.1)], p=0.333))
         self.transform = transforms.Compose(tr)
Exemplo n.º 2
0
 def switch_set(self, name):
     if (name == 'test'):
         self.features_files = self.test_files[0]
     if (name == 'valid'):
         self.features_files = self.valid_files[0]
     tr = []
     tr.append(NormalizeTensor(self.mean, self.var))
     self.transform = transforms.Compose(tr)
     self.test_files = None
     self.valid_files = None
     return self
Exemplo n.º 3
0
 def switch_set(self, name):
     if (name == 'test'):
         self.data_files = self.test_files[0]
         self.spectral_files = self.test_files[1]
         self.metadata = self.test_files[2]
     if (name == 'valid'):
         self.data_files = self.valid_files[0]
         self.spectral_files = self.valid_files[1]
         self.metadata = self.valid_files[2]
     tr = []
     if (self.data != 'mfcc'):
         tr.append(LogTransform(clip=1e-3))
     tr.append(NormalizeTensor(self.mean, self.var))
     self.transform = transforms.Compose(tr)
     self.test_files = None
     self.valid_files = None
     return self
Exemplo n.º 4
0
 def __init__(self, datadir, mean, var, transform=None, data='mfcc', splits=[.8, .1, .1], shuffle_files=True, train='train'):
     self.data = data.split("_") #ex. ["mel", "mfcc"]
     # Metadata and raw
     self.data_files = []
     # Retrieve list of files
     tmp_files = sorted(glob.glob(datadir + '/*.wav'))
     self.data_files.extend(tmp_files)
     # Now we can create the rightful transform
     # self.transform = transform #whatever
     self.transforms = []
     # Save properties
     self.means = mean #dictionary
     self.vars = var
     for d in self.data:
         tr = []
         if (d != 'mfcc'):
             tr.append(LogTransform(clip=1e-3))
         tr.append(NormalizeTensor(self.means[d], self.vars[d]))
         self.transforms.append(transforms.Compose(tr))
Exemplo n.º 5
0
 def __init__(self,
              datadir,
              args,
              transform=None,
              splits=[.8, .1, .1],
              shuffle_files=True,
              train='train'):
     self.args = args
     # Metadata and raw
     self.data_files = []
     # Spectral transforms
     self.features_files = []
     # Construct set of extractors
     self.construct_extractors(args)
     # Construct the FFT extractor
     self.multi_fft = MultiscaleFFT(args.scales)
     # Retrieve list of files
     tmp_files = sorted(glob.glob(datadir + '/raw/*.wav'))
     self.data_files.extend(tmp_files)
     if (not os.path.exists(datadir + '/data')
             or len(glob.glob(datadir + '/data/*.npy')) == 0):
         os.makedirs(datadir + '/data')
         self.preprocess_dataset(datadir)
     feat_files = sorted(glob.glob(datadir + '/data/*.npy'))
     self.features_files.extend(feat_files)
     # Analyze dataset
     self.analyze_dataset()
     # Create splits
     self.create_splits(splits, shuffle_files)
     # Compute mean and std of dataset
     self.compute_normalization()
     # Now we can create the normalization / augmentation transform
     self.transform = transform
     # Otherwise create a basic normalization / augmentation transform
     if (transform is None):
         tr = []
         # Normalize amplitude
         tr.append(NormalizeTensor(self.mean, self.var))
         # Augment with some random noise (p = .333)
         tr.append(
             transforms.RandomApply([NoiseGaussian(factor=1e-3)], p=0.333))
         self.transform = transforms.Compose(tr)
Exemplo n.º 6
0
 def __init__(self, datadir, spectral_files, transform=None, data_type='mel', stats=None, set_type=None):
     self.data_type = data_type
     # Spectral transforms
     self.spectral_files = np.array(spectral_files, dtype=np.unicode_)
     self.datadir = datadir
     # Retrieve list of files
     # Compute mean and std of dataset
     if stats is None: # entire set        
         self.compute_normalization()
     else: #train, test, valid sets
         self.mean, self.var = stats
     # Now we can create the rightful transform
     self.transform = transform
     if (transform is None):
         tr = []
         if (self.data_type != 'mfcc'):
             tr.append(LogTransform(clip=1e-3))
         tr.append(NormalizeTensor(self.mean, self.var))
         if set_type == "train": # apply noise to train sets
             tr.append(transforms.RandomApply([NoiseGaussian(factor=1e-2)], p=0.333))
         #tr.append(transforms.RandomApply([OutliersZeroRandom(factor=.1)], p=0.333))
         self.transform = transforms.Compose(tr)      
Exemplo n.º 7
0
    def __init__(self,
                 datadir,
                 args,
                 transform=None,
                 splits=[.8, .1, .1],
                 shuffle_files=True,
                 train='train'):
        self.args = args
        # Metadata and raw
        self.data_files = []
        self.data_dir = datadir
        # Spectral transforms
        self.features_files = []
        # Construct set of extractors
        self.construct_extractors(args)
        # Construct the FFT extractor
        self.multi_fft = MultiscaleFFT(args.scales)
        # Retrieve list of files
        tmp_files = sorted(glob.glob(datadir + '/raw/*.wav'))
        violin_train_meta = pd.read_csv(f'{datadir}/violin_training.csv')
        violin_test_meta = pd.read_csv(f'{datadir}/violin_test.csv')
        violin_validation_meta = pd.read_csv(
            f'{datadir}/violin_validation.csv')

        violin_meta = violin_train_meta.append(violin_test_meta)
        violin_meta = violin_meta.append(violin_validation_meta)

        if (not os.path.exists(datadir + '/data')):
            os.mkdir(datadir + '/data')

        # feat_files = glob.glob(datadir + '/data/*.npy')
        if not h5py.is_hdf5(datadir + '/data/dataset.hdf5'):
            self.dataset_file = h5py.File(datadir + '/data/dataset.hdf5', 'a')
            self.preprocess_parallel(violin_meta)
            self.dataset_file.flush()
        else:
            self.dataset_file = h5py.File(datadir + '/data/dataset.hdf5', 'r+')

        self.features_files = sorted(
            list(filter(lambda x: x != 'gv', self.dataset_file.keys())))

        if 'gv' not in self.dataset_file:
            self.compute_normalization()
        else:
            self.mean = self.dataset_file['gv'][0]
            self.var = self.dataset_file['gv'][1]

        # Analyze dataset
        self.analyze_dataset()
        # Create splits
        self.create_splits(splits, shuffle_files)
        # Compute mean and std of dataset

        # Now we can create the normalization / augmentation transform
        self.transform = transform
        # Otherwise create a basic normalization / augmentation transform
        if (transform is None):
            tr = []
            # Normalize amplitude
            tr.append(NormalizeTensor(self.mean, self.var))
            # Augment with some random noise (p = .333)
            tr.append(
                transforms.RandomApply([NoiseGaussian(factor=1e-3)], p=0.333))
            self.transform = transforms.Compose(tr)