예제 #1
0
def get_data_maxener(pSize=4,
                     keyInv=True,
                     downBeatInv=False,
                     bars=1,
                     partialbar=1,
                     offset=0):
    """
    Util function for something we do all the time
    Remove the empty patterns
    INPUT:
        - pSize        (default: 16)
        - keyInv       (default: True)
        - downBeatInv  (default: False)
        - bars         (default: 2)
    """
    import data_iterator
    import feats_utils as FU
    import numpy as np
    import time
    # start time
    tstart = time.time()
    # get maltab files
    allfiles = FU.get_all_matfiles('.')
    print len(allfiles), ' .mat files found'
    # create and set iterator
    data_iter = data_iterator.DataIterator()
    data_iter.setMatfiles(allfiles)  # set matfiles
    if bars > 0:
        data_iter.useBars(bars)  # a pattern spans 'bars' bars
        if partialbar < 1:
            assert (bars == 1)
            data_iter.usePartialBar(partialbar)
    else:
        data_iter.useBars(0)  # important to set it to zero!
        data_iter.setFeatsize(pSize)  # a pattern is a num. of beats
    if offset > 0:
        data_iter.setOffset(offset)
    data_iter.stopAfterOnePass(True)  # stop after one full iteration
    # get features
    featsNorm = [
        FU.normalize_pattern_maxenergy(p, pSize, keyInv,
                                       downBeatInv).flatten()
        for p in data_iter
    ]
    print 'found ', len(featsNorm), ' patterns before removing empty ones'
    # make it an array
    featsNorm = np.array(featsNorm)
    # remove empyt patterns
    res = [np.sum(r) > 0 for r in featsNorm]
    res2 = np.where(res)
    featsNorm = featsNorm[res2]
    # time?
    print 'all patterns acquired and normalized in ' + str(time.time() -
                                                           tstart) + 'seconds'
    print 'featsNorm.shape = ', featsNorm.shape
    return featsNorm
예제 #2
0
def get_data_maxener(pSize=4,keyInv=True,downBeatInv=False,bars=1,partialbar=1,offset=0):
    """
    Util function for something we do all the time
    Remove the empty patterns
    INPUT:
        - pSize        (default: 16)
        - keyInv       (default: True)
        - downBeatInv  (default: False)
        - bars         (default: 2)
    """
    import data_iterator
    import feats_utils as FU
    import numpy as np
    import time
    # start time
    tstart = time.time()
    # get maltab files
    allfiles = FU.get_all_matfiles('.')
    print len(allfiles),' .mat files found'
    # create and set iterator
    data_iter = data_iterator.DataIterator()
    data_iter.setMatfiles(allfiles) # set matfiles
    if bars > 0:
        data_iter.useBars( bars )            # a pattern spans 'bars' bars
        if partialbar < 1:
            assert(bars==1)
            data_iter.usePartialBar( partialbar )
    else:
        data_iter.useBars(0)                 # important to set it to zero!
        data_iter.setFeatsize( pSize )       # a pattern is a num. of beats
    if offset > 0:
        data_iter.setOffset(offset)
    data_iter.stopAfterOnePass(True)# stop after one full iteration
    # get features
    featsNorm = [FU.normalize_pattern_maxenergy(p,pSize,keyInv,downBeatInv).flatten() for p in data_iter]
    print 'found ', len(featsNorm),' patterns before removing empty ones'
    # make it an array
    featsNorm = np.array(featsNorm)
    # remove empyt patterns
    res = [np.sum(r) > 0 for r in featsNorm]
    res2 = np.where(res)
    featsNorm = featsNorm[res2]
    # time?
    print 'all patterns acquired and normalized in ' + str(time.time()-tstart) + 'seconds'
    print 'featsNorm.shape = ',featsNorm.shape
    return featsNorm
예제 #3
0
def get_all_barfeats():
    """
    Returns all barfeats, we assume we're at the top of beatFeats dir
    """
    import data_iterator
    import feats_utils as FU
    import numpy as np
    import time

    # get maltab files
    allfiles = FU.get_all_matfiles('.')

    # create and set iterator
    data_iter = data_iterator.DataIterator()
    data_iter.setMatfiles(allfiles)  # set matfiles
    data_iter.useBars(2)  # a pattern spans two bars
    data_iter.stopAfterOnePass(True)  # stop after one full iteration

    # get all feats
    tstart = time.time()
    featsNorm = [
        FU.normalize_pattern(np.array(p), 16, True, True).flatten()
        for p in data_iter
    ]
    print 'all patterns acquired and normalized in ' + str(time.time() -
                                                           tstart) + 'seconds'

    # some stats
    print 'number of matfiles: ' + str(len(allfiles))
    print 'number of patterns: ' + str(len(featsNorm))

    # get one nice big matrix
    featsNorm = np.array(featsNorm)

    # and... we're done, let's launch the algo!

    import scipy.cluster
    import scipy.cluster.vq as SCVQ

    # run a 20 iteration, looking for a codebook of size 10
    tstart = time.time()
    codebook, distortion = SCVQ.kmeans2(featsNorm, 10, 20, minit='points')
    print 'kmeans performed in ' + str(time.time() - tstart) + 'seconds'
예제 #4
0
def get_all_barfeats():
    """
    Returns all barfeats, we assume we're at the top of beatFeats dir
    """
    import data_iterator
    import feats_utils as FU
    import numpy as np
    import time

    # get maltab files
    allfiles = FU.get_all_matfiles('.')

    # create and set iterator
    data_iter = data_iterator.DataIterator()
    data_iter.setMatfiles(allfiles) # set matfiles
    data_iter.useBars(2)            # a pattern spans two bars
    data_iter.stopAfterOnePass(True)# stop after one full iteration

    # get all feats
    tstart = time.time()
    featsNorm = [FU.normalize_pattern(np.array(p),16,True,True).flatten() for p in data_iter]
    print 'all patterns acquired and normalized in ' + str(time.time()-tstart) + 'seconds'

    # some stats
    print 'number of matfiles: ' + str(len(allfiles))
    print 'number of patterns: ' + str(len(featsNorm))

    # get one nice big matrix
    featsNorm = np.array(featsNorm)

    # and... we're done, let's launch the algo!


    import scipy.cluster
    import scipy.cluster.vq as SCVQ

    # run a 20 iteration, looking for a codebook of size 10
    tstart = time.time()
    codebook, distortion = SCVQ.kmeans2(featsNorm,10,20,minit='points')
    print 'kmeans performed in ' + str(time.time()-tstart) + 'seconds'
예제 #5
0
                        x2 = min(x2, self.currfeats.shape[1])
                    return self.currfeats[:, startidx:x2]

    def __iter__(self):
        """ Returns itself, part of the python iterator interface """
        return self


# debugging
if __name__ == '__main__':

    print 'debugging iterator'
    dt = DataIterator()

    # find all files from current dir
    allfiles = FU.get_all_matfiles('.')

    # SET THE DATA ITERATOR
    dt.setMatfiles(allfiles)  # set the matlab files
    dt.useBars(2)  # work based on bars, 2 bars at a time
    featsize = 4
    dt.setFeatsize(featsize)  # useless because we work on bars
    dt.stopAfterOnePass(True)  # stop after one pass, otherwise never stops

    print dt.stats()  # print basic stats

    cnt = 0
    maxIter = 500000
    #bestPattern = np.random.rand(12,featsize)
    #maxP = -1000
    #minP = 1000
예제 #6
0
            

    def __iter__(self):
        """ Returns itself, part of the python iterator interface """
        return self
        


# debugging
if __name__ == '__main__' :

    print 'debugging iterator'
    dt = DataIterator()

    # find all files from current dir
    allfiles = FU.get_all_matfiles('.')

    # SET THE DATA ITERATOR
    dt.setMatfiles(allfiles) # set the matlab files
    dt.useBars(2)            # work based on bars, 2 bars at a time
    featsize = 4
    dt.setFeatsize(featsize) # useless because we work on bars
    dt.stopAfterOnePass(True)# stop after one pass, otherwise never stops
    
    print dt.stats()         # print basic stats

    cnt = 0
    maxIter = 500000
    #bestPattern = np.random.rand(12,featsize)
    #maxP = -1000
    #minP = 1000