Ejemplo n.º 1
0
def load_exp23(data_dir):
    exp_id = int((data_dir.split('/')[-2]).split('_')[0][-1])
    assert exp_id == 2 or exp_id == 3
    fld = data_dir
    # Run one participant
    data_files = sorted(glob.glob(fld + '/*'))
    dt_fls_grouped = [tuple(data_files[i:i + 2]) for i in
                      range(0, len(data_files), 2)]

    # for every file here data and meta
    for data_group in dt_fls_grouped:
        print('\t{}'.format(data_group))
        data_dict, metadata = load_data_meta(data_group)
        word_dict = dict()
        for sent, _ in data_dict.items():
            word_dict[sent] = extract_sent_embed(sent)
        yield data_group[0], data_dict, word_dict, metadata
Ejemplo n.º 2
0
def load_exp1(data_dir):
    w2vec_dict = load_pickle('./stimuli/word2vec.pkl')
    exp_id = int((data_dir.split('/')[-2]).split('_')[0][-1])
    assert exp_id == 1
    fld = data_dir
    # Run one participant
    data_files = sorted(glob.glob(fld + '/*'))
    dt_fls_grouped = [tuple(data_files[i:i + 2]) for i in
                      range(0, len(data_files), 2)]
    print(fld)
    disc_pr()

    # for every file wordcloud pictures and sentences cases
    for data_group in dt_fls_grouped:
        data_dict, metadata = load_data_meta(data_group)
        word_dict = dict()
        for word, _ in data_dict.items():
            word_dict[word] = w2vec_dict[word]
        yield data_group[0], data_dict, word_dict, metadata