def collect_result(file_path, save_dir, user_name, user_pwd, keywords_set, problems_num, submission_url): st_dict = get_st_dict(file_path) crawler = PAT_crawler(user_name,user_pwd, submission_url) print "crawler start" st_dict = crawler.get_submissions(st_dict) print "crawler finished" problems = [x for x in string.ascii_uppercase][0:problems_num] tmp = {} for i in xrange(problems_num): tmp[problems[i]] = keywords_set[i] keywords_set = tmp print keywords_set for key in st_dict: for problem_id in problems: if problem_id in st_dict[key].submissions: st_dict[key].score = st_dict[key].score + st_dict[key].submissions[problem_id].score if crawler.code_filter(st_dict[key].submissions[problem_id].code_url,keywords_set[problem_id]) == True: st_dict[key].submissions[problem_id].pass_filter = True print key, problem_id print "filter finished" write_xls(save_dir, st_dict, problems) print "write xls finished" save_codes(crawler,st_dict, os.path.join(save_dir,"code") ) print "save_code finished"
def main(): st_dict = get_st_dict("norm2rand.txt") tmp = get_name_pwd_from_console() crawler = PAT_crawler(tmp[0],tmp[1]) while crawler.logined() == False: print "login error! try your account and password again!\n" tmp = get_name_pwd_from_console() crawler.login(tmp[0],tmp[1]) st_dict = crawler.get_submissions(st_dict) keywords_set = [ ['cin','cout'],['class'],['friend','class'],['operator','class'],['template'],['cout'],['class','virtual'],['class','virtual'] ] problems = [str(x+1001) for x in range(8)] tmp = {} for i in range(len(problems)): tmp[problems[i]] = keywords_set[i] #save_codes(crawler,st_dict) #return keywords_set = tmp print keywords_set for key in st_dict: for problem_id in problems: if problem_id in st_dict[key].submissions: st_dict[key].score = st_dict[key].score + st_dict[key].submissions[problem_id].score if crawler.code_filter(st_dict[key].submissions[problem_id].code_url,keywords_set[problem_id]) == True: st_dict[key].submissions[problem_id].pass_filter = True write_xls(st_dict) save_codes(crawler,st_dict)