Example #1
0
     The name of the trained model 
 scale_factor : float, optional
     Scale the magnitude of the files to be separated with this factor
 batch_size : int, optional
     The number of examples in a batch (see LargeDataset in dataset.py)  
 batch_memory : int, optional
     The number of batches to load in memory at once (see LargeDataset in dataset.py)
 time_context : int, optional
     The time context modeled by the network
 overlap : int, optional
     The number of overlapping frames between adjacent segments (see LargeDataset in dataset.py)
 nprocs : int, optional
     The number of CPU to use when loading the data in parallel: the more, the faster (see LargeDataset in dataset.py)
 """
 if len(sys.argv) > -1:
     climate.add_arg('--db', help="the ikala dataset path")
     climate.add_arg('--model', help="the name of the model to test/save")
     climate.add_arg('--nepochs', help="number of epochs to train the net")
     climate.add_arg(
         '--time_context',
         help="number of frames for the recurrent/lstm/conv net")
     climate.add_arg('--batch_size', help="batch size for training")
     climate.add_arg('--batch_memory',
                     help="number of big batches to load into memory")
     climate.add_arg('--overlap', help="overlap time context for training")
     climate.add_arg('--nprocs',
                     help="number of processor to parallelize file reading")
     climate.add_arg('--scale_factor', help="scale factor for the data")
     climate.add_arg('--feature_path',
                     help="the path where to load the features from")
     db = None
Example #2
0
    You should have received a copy of the GNU General Public License
    along with DeepConvSep.  If not, see <http://www.gnu.org/licenses/>.
 """

import os, sys
import transform
import util
from transform import transformFFT
import numpy as np
import re
from scipy.signal import blackmanharris as blackmanharris
import climate

if __name__ == "__main__":
    if len(sys.argv) > -1:
        climate.add_arg('--db', help="the dataset path")
        climate.add_arg('--feature_path',
                        help="the path where to save the features")
    kwargs = climate.parse_args()
    db = None
    if kwargs.__getattribute__('db'):
        db = kwargs.__getattribute__('db')
    # else:
    #     db='/home/marius/Documents/Database/iKala/'
    if kwargs.__getattribute__('feature_path'):
        feature_path = kwargs.__getattribute__('feature_path')
    else:
        feature_path = os.path.join(db, 'transforms', 't1')
    assert os.path.isdir(
        db
    ), "Please input the directory for the iKala dataset with --db path_to_iKala"
Example #3
0
     The name of the trained model
 scale_factor : float, optional
     Scale the magnitude of the files to be separated with this factor
 batch_size : int, optional
     The number of examples in a batch (see LargeDataset in dataset.py)
 batch_memory : int, optional
     The number of batches to load in memory at once (see LargeDataset in dataset.py)
 time_context : int, optional
     The time context modeled by the network
 overlap : int, optional
     The number of overlapping frames between adjacent segments (see LargeDataset in dataset.py)
 nprocs : int, optional
     The number of CPU to use when loading the data in parallel: the more, the faster (see LargeDataset in dataset.py)
 """
 if len(sys.argv) > -1:
     climate.add_arg('--db', help="the Bach10 dataset path")
     climate.add_arg('--dbs', help="the Bach10 Sibelius dataset path")
     climate.add_arg('--output',
                     help="the path where to save the model and the output")
     climate.add_arg('--model', help="the name of the model to test/save")
     climate.add_arg('--nepochs', help="number of epochs to train the net")
     climate.add_arg(
         '--time_context',
         help="number of frames for the recurrent/lstm/conv net")
     climate.add_arg('--batch_size', help="batch size for training")
     climate.add_arg('--batch_memory',
                     help="number of big batches to load into memory")
     climate.add_arg('--overlap', help="overlap time context for training")
     climate.add_arg('--nprocs',
                     help="number of processor to parallelize file reading")
     climate.add_arg('--scale_factor', help="scale factor for the data")
Example #4
0
import sys
import itertools
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
from os import listdir
from os.path import isfile, join
import pandas as pd
import numpy as np
from scipy import stats
from scipy import io
import climate

if __name__=='__main__':
    if len(sys.argv)>-1:
        climate.add_arg('--db', help="the path to the results directory")

        kwargs = climate.parse_args()
        if kwargs.__getattribute__('db'):
            db = kwargs.__getattribute__('db')
        else:
            db='/Volumes/Macintosh HD 2/Documents/Database/Bach10/results_paper/'

        methods = []
        for d in sorted(os.listdir(db)):
            if not os.path.isfile(os.path.join(db, d)):
                methods.append(d)

        sns.set()
        sns.set_context("notebook", font_scale=1.4)
        sns.set_palette(sns.cubehelix_palette(8, start=.5, rot=-.75))
import climate
import numpy as np
import sklearn.metrics
import theanets

import models

climate.add_arg('--codebook', metavar='FILE', help='decode classes from numpy FILE')


def main(args):
    X = np.load(args.dataset, mmap_mode='r')
    cut = int(0.9 * len(X))

    D = np.load(args.codebook, mmap_mode='r')
    print('D:', D.shape)
    K = sklearn.metrics.pairwise.euclidean_distances(
        X, D, (D * D).sum(axis=1), squared=True).argmin(axis=1)

    def batch(Z):
        def create():
            inputs = np.zeros((args.batch_size, args.time_steps, len(D)), 'f')
            outputs = np.zeros((args.batch_size, args.time_steps), 'i')
            for b in range(args.batch_size):
                o = np.random.randint(len(Z) - args.time_steps - 1)
                inputs[b, :, Z[o:o+args.time_steps]] = 1
                outputs[b] = Z[o+1:o+1+args.time_steps]
            return [inputs, outputs]
        return create

    net = models.train(
    along with DeepConvSep.  If not, see <http://www.gnu.org/licenses/>.
 """

import os, sys
import transform
import util
from transform import transformFFT
import numpy as np
import re
import itertools as it
from scipy.signal import blackmanharris as blackmanharris
import climate

if __name__ == "__main__":
    if len(sys.argv) > -1:
        climate.add_arg('--db', help="the Bach10 Sibelius dataset path")
        climate.add_arg('--feature_path',
                        help="the path where to save the features")
        climate.add_arg(
            '--gt',
            help=
            "compute features for the ground truth aligned rendition or the others"
        )
    db = None
    kwargs = climate.parse_args()
    if kwargs.__getattribute__('db'):
        db = kwargs.__getattribute__('db')
    else:
        db = '/home/marius/Documents/Database/Bach10/Source separation/'
        # db='/Volumes/Macintosh HD 2/Documents/Database/Bach10/Source separation/'
    if kwargs.__getattribute__('feature_path'):
Example #7
0
            audio[:,0] = np.sum(audio[:,1:len(self.sources)+1],axis=1)

            tt.compute_transform(audio,os.path.join(feature_path,f,self.style[s],f+'_'+str(c).encode('base64','strict')+'_'+str(chnk)+'.data'),phase=False)

            audio = None
            melody= None
          except GetOutOfLoop:
            pass

class GetOutOfLoop( Exception ):
  pass


if __name__ == "__main__":
  if len(sys.argv)>-1:
    climate.add_arg('--db', help="the Bach10 Sibelius dataset path")
    climate.add_arg('--rwc', help="the rwc instrument sound path with mat and wav subfolders")
    climate.add_arg('--chunk_size', help="the chunk size to split the midi")
    climate.add_arg('--sample_size', help="sample this number of combinations of possible cases")
    climate.add_arg('--nprocs', help="the number of processors")
    climate.add_arg('--feature_path', help="the path where to save the features")
    climate.add_arg('--original', help="compute features for the original score or ground truth aligned score")
    kwargs = climate.parse_args()
    if kwargs.__getattribute__('db'):
      db = kwargs.__getattribute__('db')
    else:
      db='/home/marius/Documents/Database/Bach10/Source separation/'
      # db='/Volumes/Macintosh HD 2/Documents/Database/Bach10/Source separation/'

    if kwargs.__getattribute__('rwc'):
      rwc_path = kwargs.__getattribute__('rwc')
Example #8
0
'''This module contains command line flags.'''

import climate

climate.add_arg('--help-activation', action='store_true',
                help='show available activation functions')
climate.add_arg('--help-optimize', action='store_true',
                help='show available optimization algorithms')

g = climate.add_group('Architecture')
g.add_argument('-n', '--layers', nargs='+', type=int, metavar='N',
               help='construct a network with layers of size N1, N2, ...')
g.add_argument('-g', '--hidden-activation', default='logistic', metavar='FUNC',
               help='function for hidden unit activations')
g.add_argument('--output-activation', default='linear', metavar='FUNC',
               help='function for output unit activations')
g.add_argument('-t', '--tied-weights', action='store_true',
               help='tie encoding and decoding weights')
g.add_argument('--decode-from', type=int, default=1, metavar='N',
               help='decode from the final N layers of the net')

g = climate.add_group('Training')
g.add_argument('-O', '--optimize', default=(), nargs='+', metavar='ALGO',
               help='train with the given optimization algorithm(s)')
g.add_argument('-p', '--patience', type=int, default=4, metavar='N',
               help='stop training if less than --min-improvement for N validations')
g.add_argument('-v', '--validate-every', type=int, default=10, metavar='N',
               help='validate the model every N updates')
g.add_argument('-b', '--batch-size', type=int, default=64, metavar='N',
               help='train with mini-batches of size N')
g.add_argument('-B', '--train-batches', type=int, metavar='N',
Example #9
0
import climate
import json
import numpy as np
import numpy.random as rng
import os

logging = climate.get_logger(__name__)

climate.add_arg('--dataset', metavar='FILE', help='load dataset from FILE')
climate.add_arg('--save-model', metavar='FILE', help='save trained model to FILE')
climate.add_arg('--load-model', metavar='FILE', help='load model from FILE for training')
climate.add_arg('--layers', nargs='+', type=int, metavar='N', help='use hidden layers N...')
climate.add_arg('--layer-json', metavar='FILE', help='load hidden layers from FILE')
climate.add_arg('--input-noise', type=float, default=0, metavar='R', help='add R input noise')
climate.add_arg('--input-dropout', type=float, default=0, metavar='R', help='drop R input units')
climate.add_arg('--hidden-noise', type=float, default=0, metavar='R', help='add R hidden noise')
climate.add_arg('--hidden-dropout', type=float, default=0, metavar='R', help='drop R hidden units')
climate.add_arg('--hidden-l1', type=float, default=0, metavar='R', help='penalize hidden activation by R')
climate.add_arg('--algo', default='nag', help='learning algorithm')
climate.add_arg('--learning-rate', type=float, default=0.0001, metavar='A', help='learning rate A')
climate.add_arg('--nesterov', action='store_true', help='use nesterov momentum')
climate.add_arg('--momentum', type=float, default=0.9, metavar='M', help='momentum M')
climate.add_arg('--min-improvement', type=float, default=0, metavar='R', help='minimum patience improvement R')
climate.add_arg('--patience', type=int, default=10, metavar='N', help='wait for N failed validations')
climate.add_arg('--batch-size', type=int, default=64, metavar='N', help='batches contain N examples')
climate.add_arg('--time-steps', type=int, default=128, metavar='T', help='recurrent time steps')


def train(args, Model, train, valid, **kwargs):
    batch = train() if callable(train) else train
    I = O = batch[0].shape[-1]
import climate
import numpy as np
import numpy.random as rng
import theanets

climate.add_arg('--dataset', metavar='FILE', help='load dataset from FILE')
climate.add_arg('--save-model', metavar='FILE', help='save trained model to FILE')
climate.add_arg('--load-model', metavar='FILE', help='load model from FILE for training')
climate.add_arg('--window', metavar='N', type=int, default=20, help='train on windows of length N')


def slicer(dataset, batch_size, window_size):
    def call():
        batch = np.zeros((batch_size, len(dataset[0]) * window_size), 'f')
        for b in range(batch_size):
            i = rng.randint(len(dataset) - window_size)
            batch[b] = dataset[i:i+window_size].ravel()
        return [batch]
    return call


def main(args):
    data = np.load(args.dataset, mmap_mode='r')
    cut = int(0.9 * len(data))
    n = len(data[0]) * args.window

    e = theanets.Experiment(theanets.Autoencoder, layers=[n] + args.layers + [n])

    if args.load_model:
        e.network.load_params(args.load_model)
Example #11
0
     The name of the trained model
 scale_factor : float, optional
     Scale the magnitude of the files to be separated with this factor
 batch_size : int, optional
     The number of examples in a batch (see LargeDataset in dataset.py)
 batch_memory : int, optional
     The number of batches to load in memory at once (see LargeDataset in dataset.py)
 time_context : int, optional
     The time context modeled by the network
 overlap : int, optional
     The number of overlapping frames between adjacent segments (see LargeDataset in dataset.py)
 nprocs : int, optional
     The number of CPU to use when loading the data in parallel: the more, the faster (see LargeDataset in dataset.py)
 """
 if len(sys.argv) > -1:
     climate.add_arg('--db', help="the DSD100 dataset path")
     climate.add_arg('--output',
                     help="the path where to save the model and the output")
     climate.add_arg('--model', help="the name of the model to test/save")
     climate.add_arg('--nepochs', help="number of epochs to train the net")
     climate.add_arg(
         '--time_context',
         help="number of frames for the recurrent/lstm/conv net")
     climate.add_arg('--batch_size', help="batch size for training")
     climate.add_arg('--batch_memory',
                     help="number of big batches to load into memory")
     climate.add_arg('--overlap', help="overlap time context for training")
     climate.add_arg('--nprocs',
                     help="number of processor to parallelize file reading")
     climate.add_arg('--scale_factor', help="scale factor for the data")
     climate.add_arg('--feature_path',
#!/usr/bin/env python

# Source: https://github.com/EmbodiedCognition/py-c3d
'''A simple OpenGL viewer for C3D files.'''

import c3d
import climate
import collections
import contextlib
import numpy as np
import pyglet

from pyglet.gl import *

climate.add_arg('inputs',
                nargs='+',
                metavar='FILE',
                help='show these c3d files')

BLACK = (0, 0, 0)
WHITE = (1, 1, 1)
RED = (1, 0.2, 0.2)
YELLOW = (1, 1, 0.2)
ORANGE = (1, 0.7, 0.2)
GREEN = (0.2, 0.9, 0.2)
BLUE = (0.2, 0.3, 0.9)
COLORS = (WHITE, RED, YELLOW, GREEN, BLUE, ORANGE)


@contextlib.contextmanager
def gl_context(scale=None, translate=None, rotate=None, mat=None):
    glPushMatrix()
Example #13
0
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

'''This file contains command line flags.'''

import argparse
import climate

climate.add_arg('--help-activation', action='store_true',
                help='show available activation functions')
climate.add_arg('--help-optimize', action='store_true',
                help='show available optimization algorithms')

g = climate.add_arg_group('Architecture')
g.add_argument('-n', '--layers', nargs='+', type=int, metavar='N',
               help='construct a network with layers of size N1, N2, ...')
g.add_argument('-g', '--activation', default='logistic', metavar='FUNC',
               help='function for hidden unit activations DEPRECATED')
g.add_argument('--hidden-activation', default='logistic', metavar='FUNC',
               help='function for hidden unit activations')
g.add_argument('--output-activation', default='linear', metavar='FUNC',
               help='function for output unit activations')
g.add_argument('-t', '--tied-weights', action='store_true',
               help='tie encoding and decoding weights')
g.add_argument('--decode', type=int, default=1, metavar='N',
Example #14
0
#!/usr/bin/env python

# Source: https://github.com/EmbodiedCognition/py-c3d
'''A simple OpenGL viewer for C3D files.'''

import c3d
import climate
import collections
import contextlib
import numpy as np
import pyglet

from pyglet.gl import *

climate.add_arg('inputs', nargs='+', metavar='FILE', help='show these c3d files')

BLACK = (0, 0, 0)
WHITE = (1, 1, 1)
RED = (1, 0.2, 0.2)
YELLOW = (1, 1, 0.2)
ORANGE = (1, 0.7, 0.2)
GREEN = (0.2, 0.9, 0.2)
BLUE = (0.2, 0.3, 0.9)
COLORS = (WHITE, RED, YELLOW, GREEN, BLUE, ORANGE)


@contextlib.contextmanager
def gl_context(scale=None, translate=None, rotate=None, mat=None):
    glPushMatrix()
    if mat is not None:
        glMultMatrixf(vec(*mat))
Example #15
0
import climate
import lmj.plot as plt
import numpy as np
import seaborn as sns
import theanets

logging = climate.get_logger('rica')

import models

climate.add_arg('--codebook', metavar='FILE', help='save codebook to FILE')
climate.add_arg('--frames', type=int, metavar='T', help='train on sequences of T frames')
climate.add_arg('--overcomplete', type=float, default=2, metavar='K',
                help='learn a Kx overcomplete codebook')


def main(args):
    data = np.load(args.dataset, mmap_mode='r')
    N = data.shape[1]
    T = args.frames
    K = int(N * T * args.overcomplete)

    def batches():
        batch = np.zeros((args.batch_size, N * T), 'f')
        for b in range(args.batch_size):
            o = np.random.randint(len(data) - T - 1)
            batch[b] = data[o:o+T].ravel()
        return [batch]

    net = theanets.Autoencoder([N * T, (K, 'linear'), (N * T, 'tied')])
    net.train(batches,