Ejemplo n.º 1
0
def test_neurosynth_words_vectorized():
    n_im = 5
    with tempfile.TemporaryDirectory() as temp_dir:
        words_files = [
            os.path.join(temp_dir, 'words_for_image_{0}.json'.format(i))
            for i in range(n_im)
        ]
        words = [str(i) for i in range(n_im)]
        for i, file_name in enumerate(words_files):
            word_weights = np.zeros(n_im)
            word_weights[i] = 1
            words_dict = {
                'data': {
                    'values':
                    dict([(k, v) for k, v in zip(words, word_weights)])
                }
            }
            with open(file_name, 'wb') as words_file:
                words_file.write(json.dumps(words_dict).encode('utf-8'))
        freq, voc = neurovault.neurosynth_words_vectorized(words_files)
        assert freq.shape == (n_im, n_im)
        assert ((freq.sum(axis=0) == np.ones(n_im)).all())
        with pytest.warns(UserWarning):
            neurovault.neurosynth_words_vectorized(
                (os.path.join(temp_dir, 'no_words_here.json'), ))
Ejemplo n.º 2
0
def test_neurosynth_words_vectorized():
    n_im = 5
    with _TestTemporaryDirectory() as temp_dir:
        words_files = [
            os.path.join(temp_dir, 'words_for_image_{0}.json'.format(i)) for
            i in range(n_im)]
        words = [str(i) for i in range(n_im)]
        for i, file_name in enumerate(words_files):
            word_weights = np.zeros(n_im)
            word_weights[i] = 1
            words_dict = {'data':
                          {'values':
                           dict([(k, v) for
                                 k, v in zip(words, word_weights)])}}
            with open(file_name, 'wb') as words_file:
                words_file.write(json.dumps(words_dict).encode('utf-8'))
        freq, voc = neurovault.neurosynth_words_vectorized(words_files)
        assert_equal(freq.shape, (n_im, n_im))
        assert((freq.sum(axis=0) == np.ones(n_im)).all())
        assert_warns(UserWarning, neurovault.neurosynth_words_vectorized,
                     (os.path.join(temp_dir, 'no_words_here.json'),))