def filter_assignments(assignments, book_only): # query = "select id, gameboard_id, group_id, owner_user_id, creation_date from assignments order by creation_date asc" assignments["include"] = True print(assignments.shape) map = make_gb_question_map() meta = get_meta_data() for ix in range(assignments.shape[0]): include = True gr_id = assignments.loc[ix, "group_id"] if book_only: gb_id = assignments.loc[ix, "gameboard_id"] hexes = map[gb_id] for hx in hexes: hx = hx.split("|")[0] if not (hx.startswith("ch_") or hx.startswith("ch-i")): include = False break if include: students = get_student_list([gr_id]) if students.empty: include = False if include: include = False for psi in list(students["user_id"]): # print("checking",psi) atts = get_attempts_from_db(psi) if not atts.empty: # print("OK") include = True break if not include: assignments.loc[ix, "include"] = False # assignments = assignments[assignments["include"]==True] print(assignments.shape) return assignments
from hwgen.common import init_objects, get_meta_data from hwgen.concept_extract import concept_extract, page_to_concept_map from hwgen.model import make_model from hwgen.profiler import profile_student, get_attempts_from_db base = "../../../isaac_data_files/" #need to build a softmax classifier to recommend questions... #this would have qn output nodes n_users = -1 cats, cat_lookup, all_qids, users, diffs, levels, cat_ixs, cat_page_lookup, lev_page_lookup, all_page_ids = init_objects( n_users) hwdf = get_meta_data() concepts_all = set() hwdf.index = hwdf["question_id"] hwdf["related_concepts"] = hwdf["related_concepts"].map(str) for concepts_raw in hwdf["related_concepts"]: print(concepts_raw) if concepts_raw != "nan": concepts = eval(concepts_raw) if concepts is not None: concepts_all.update(concepts) concepts_all = list(concepts_all) asst_fname = base + "assignments.txt" #pkl" con_page_lookup = page_to_concept_map()