line_f = f.readline() line_g = g.readline() continue train_dislikes = dislike_posts[0:int(cnt_dislikes*0.8)] test_dislikes = dislike_posts[int(cnt_dislikes * 0.8):] for dislike in train_dislikes: dislike_text_posts.write(dislike) dislike_text_posts.close() if not os.path.isdir('../OK_recommend/user' + user_id): mkdir('../OK_recommend/user' + user_id) build_words_graph.build_graph('like_texts.txt', '../OK_recommend/user' + user_id + '/words_degrees_like.txt') training.train_small('like_texts.txt', 'like') build_words_graph.build_graph('dislike_texts.txt', '../OK_recommend/user' + user_id + '/words_degrees_dislike.txt') training.train_small('dislike_texts.txt', 'dislike') types = ['like', 'dislike'] W_like = [0] * K W_dislike = [0] * K score_like = dict() score_dislike = dict() topic_of_words_like = dict() topic_of_words_dislike = dict() bad_user = False for typ in types: topic_score, word_in_topic, word_score, word_ids, id_words, newK = extract_text_topics.extract_text_topics(typ + '_texts.txt', typ, K, user_id, model)
if len(likes) < 20: continue g = open('texts.txt', 'w', encoding='utf-8') cnt_existing_posts = 0 for like in likes: Id = like.split(", ") group_id = Id[0] post_id = Id[1] text = read_post(group_id, post_id) if text == "-1": continue cnt_existing_posts += 1 g.write(text) g.close() if cnt_existing_posts < 15: continue if not os.path.isdir('../OK_results/user' + user_id): mkdir('../OK_results/user' + user_id) build_words_graph.build_graph( 'texts.txt', '../OK_results/user' + user_id + '/words_degrees.txt') training.train_small('texts.txt') extract_text_topics.extract_text_topics('texts.txt', K, user_id, model) cnt += 1 print("For %d users interests profile was built", cnt)
likes = likes.split("|") if len(likes) < 20: continue g = open("texts.txt", "w", encoding="utf-8") cnt_existing_posts = 0 for like in likes: Id = like.split(", ") group_id = Id[0] post_id = Id[1] text = read_post(group_id, post_id) if text == "-1": continue cnt_existing_posts += 1 g.write(text) g.close() if cnt_existing_posts < 15: continue if not os.path.isdir("../OK_results/user" + user_id): mkdir("../OK_results/user" + user_id) build_words_graph.build_graph("texts.txt", "../OK_results/user" + user_id + "/words_degrees.txt") training.train_small("texts.txt") extract_text_topics.extract_text_topics("texts.txt", K, user_id, model) cnt += 1 print("For %d users interests profile was built", cnt)
line_g = g.readline() continue train_dislikes = dislike_posts[0:int(cnt_dislikes * 0.8)] test_dislikes = dislike_posts[int(cnt_dislikes * 0.8):] for dislike in train_dislikes: dislike_text_posts.write(dislike) dislike_text_posts.close() if not os.path.isdir('../OK_recommend/user' + user_id): mkdir('../OK_recommend/user' + user_id) build_words_graph.build_graph( 'like_texts.txt', '../OK_recommend/user' + user_id + '/words_degrees_like.txt') training.train_small('like_texts.txt', 'like') build_words_graph.build_graph( 'dislike_texts.txt', '../OK_recommend/user' + user_id + '/words_degrees_dislike.txt') training.train_small('dislike_texts.txt', 'dislike') types = ['like', 'dislike'] W_like = [0] * K W_dislike = [0] * K score_like = dict() score_dislike = dict() topic_of_words_like = dict() topic_of_words_dislike = dict()