Exemple #1
0
def hamming_index(snd_cls, recent_id=None, writeonly=False):
    if recent_id:
        path = str(HammingIndexPath(recent_id))
    else:
        path = HammingIndexPath.most_recent_index()

    duration_cache = dict()
    web_url_cache = dict()

    def web_url(doc, ts):
        try:
            url = web_url_cache[doc._id]
        except KeyError:
            url = doc.meta['web_url']
            web_url_cache[doc._id] = url

        return url

    def total_duration(doc, ts):
        try:
            duration = duration_cache[doc._id]
        except KeyError:
            duration = doc.geom.dimensions[0].end / zounds.Seconds(1)
            duration_cache[doc._id] = duration
        return duration

    logger.debug(
        'loading hamming index from {path} in writeonly={writeonly} mode'.
        format(**locals()))

    return zounds.HammingIndex(snd_cls,
                               snd_cls.hashed,
                               version='1',
                               path=path,
                               listen=False,
                               writeonly=writeonly,
                               web_url=web_url,
                               total_duration=total_duration)
Exemple #2
0
        zounds.Learned,
        # this feature will be computed using the learned K-Means clusters
        learned=BarkKmeans(),
        needs=WithOnsets.bark,
        store=True)

    pooled = zounds.VariableRateTimeSeriesFeature(
        zounds.Pooled,
        needs=(bark_kmeans, WithOnsets.slices),
        op=np.max,
        axis=0,
        store=True)


if __name__ == '__main__':
    index = zounds.HammingIndex(WithCodes, WithCodes.pooled, listen=True)

    zounds.ingest(
        zounds.PhatDrumLoops(),
        WithOnsets,
        multi_threaded=True)

    # learn K-Means centroids from the drum hits
    if not BarkKmeans.exists():
        print('learning K-Means clusters')
        BarkKmeans.process(docs=(wo.bark for wo in WithOnsets))

    # bark_kmeans = BarkKmeans()

    # force the new pooled feature to be computed
    for doc in WithCodes:
Exemple #3
0
    url = 'https://archive.org/download/FlavioGaete/FlavioGaete22.zip'
    filename = os.path.split(urlparse(url).path)[-1]

    if not os.path.exists(filename):
        resp = requests.get(url, stream=True)

        print('Downloading {url} -> {filename}...'.format(**locals()))

        with open(filename, 'wb') as f:
            for chunk in resp.iter_content(chunk_size=1000000):
                f.write(chunk)

    return filename

if __name__ == '__main__':
    index = zounds.HammingIndex(
        WithCodes, WithCodes.bfcc_kmeans_pooled, listen=True)

    zip_filename = download_zip_archive()

    print('Processing Audio...')
    for zf in ff.iter_zip(zip_filename):

        if '._' in zf.filename:
            continue

        try:
            print('processing {zf.filename}'.format(**locals()))
            WithTimbre.process(
                _id=zf.filename, meta=zf, raise_if_exists=True)
        except ff.ModelExistsError as e:
            print(e)
Exemple #4
0
class Sound(BaseModel):

    fake_hash = zounds.ArrayWithUnitsFeature(produce_fake_hash,
                                             needs=BaseModel.fft,
                                             store=True)


if __name__ == '__main__':

    zounds.ingest(zounds.InternetArchive('Kevin_Gates_-_By_Any_Means-2014'),
                  Sound,
                  multi_threaded=True)

    def web_url(doc, ts):
        return doc.meta['web_url']

    def total_duration(doc, ts):
        return doc.fake_hash.dimensions[0].end / zounds.Seconds(1)

    index = zounds.HammingIndex(Sound,
                                Sound.fake_hash,
                                path='fake_hash_index',
                                web_url=web_url,
                                total_duration=total_duration)

    if not len(index):
        index.add_all()

    for i in xrange(1000):
        list(index.random_search(n_results=50, sort=True))