def POST(self, courseid, groupid): """ Edit a group """ course, _ = get_course_and_check_rights(courseid, allow_all_staff=False) student_list, tutor_list = self.get_user_lists(course, groupid) group = get_database().groups.find_one({"_id": ObjectId(groupid), "course_id": courseid}) error = "" try: data = web.input(group_tutor=[], group_student=[]) if "delete" in data: get_database().groups.remove({"_id": ObjectId(groupid)}) raise web.seeother("/admin/"+courseid+"/students") else: data["group_tutor"] = [tutor for tutor in data["group_tutor"] if tutor in tutor_list] data["group_student"] = [student for student in data["group_student"] if student in student_list] if len(data["group_student"]) > int(data['size']): error = 'Too many students for given group size.' elif data['description']: group = get_database().groups.find_one_and_update( {"_id": ObjectId(groupid)}, {"$set": {"description": data["description"], "users": data["group_student"], "tutors": data["group_tutor"], "size": abs(int(data["size"]))}}, return_document=ReturnDocument.AFTER) else: error = 'No group description given.' except: error = 'User returned an invalid form.' return renderer.course_admin.edit_group(course, student_list, tutor_list, group, error, True)
def init_backend_interface(plugin_manager): """ inits everything that makes the backend working """ # Updates the submissions that have a jobid with the status error, as the server restarted """ get_database().submissions.update({'jobid': {"$exists": True}}, {"$unset": {'jobid': ""}, "$set": {'status': 'error', 'grade': 0.0, 'text': 'Internal error. Server restarted'}}, multi=True) # Create the job manager get_job_manager.job_manager = JobManager( INGIniousConfiguration["docker_instances"], INGIniousConfiguration["containers"], INGIniousConfiguration["tasks_directory"], INGIniousConfiguration.get( "callback_managers_threads", 1), INGIniousConfiguration.get( "slow_pool_size", 4), INGIniousConfiguration.get( "fast_pool_size", 4), INGIniousConfiguration.get( "containers_hard", []), plugin_manager)
def _job_done_callback(submissionid, task, job): """ Callback called by JobManager when a job is done. Updates the submission in the database with the data returned after the completion of the job """ submission = get_submission(submissionid, False) submission = get_input_from_submission(submission) job = _parse_text(task, job) data = { "status": ("done" if job["result"] == "success" or job["result"] == "failed" else "error"), # error only if error was made by INGInious "result": job["result"], "grade": job["grade"], "text": job.get("text", None), "tests": job.get("tests", None), "problems": (job["problems"] if "problems" in job else {}), "archive": (get_gridfs().put(base64.b64decode(job["archive"])) if "archive" in job else None) } # Store additional data dont_dump = ["task", "course", "input"] for index in job: if index not in data and index not in dont_dump: data[index] = job[index] # Save submission to database get_database().submissions.update( {"_id": submission["_id"]}, {"$set": data} ) for username in submission["username"]: UserData(username).update_stats(submission, job) PluginManager.get_instance().call_hook("submission_done", submission=submission, job=job)
def add_job(task, inputdata, debug=False): """ Add a job in the queue and returns a submission id. task is a Task instance and inputdata is the input as a dictionary If debug is true, more debug data will be saved """ if not User.is_logged_in(): raise Exception("A user must be logged in to submit an object") username = User.get_username() course = FrontendCourse(task.get_course_id()) obj = { "courseid": task.get_course_id(), "taskid": task.get_id(), "input": get_gridfs().put( json.dumps(inputdata)), "status": "waiting", "submitted_on": datetime.now()} if course.is_group_course() and username not in course.get_staff(True): group = get_database().groups.find_one({"course_id": task.get_course_id(), "users": username}) obj.update({"username": group["users"]}) else: obj.update({"username": [username]}) submissionid = get_database().submissions.insert(obj) PluginManager.get_instance().call_hook("new_submission", submissionid=submissionid, submission=obj, inputdata=inputdata) get_job_manager().new_job(task, inputdata, (lambda job: _job_done_callback(submissionid, task, job)), "Frontend - {}".format(username), debug) return submissionid
def is_user_registered(self, username, check_group=False): """ Returns True if the user is registered """ has_group = (not check_group) or \ (get_database().groups.find_one({"users": username, "course_id": self.get_id()}) is not None) return (get_database().registration.find_one({"username": username, "courseid": self.get_id()}) is not None)\ and has_group or username in self.get_staff()
def drop_batch_job(batch_job_id): """ Delete a **finished** batch job from the database """ job = get_database().batch_jobs.find_one({"_id": ObjectId(batch_job_id)}) if "result" not in job: raise Exception("Batch job is still running, cannot delete it") get_database().batch_jobs.remove({"_id": ObjectId(batch_job_id)}) if "file" in job["result"]: get_gridfs().delete(job["result"]["file"])
def register_user(self, username, password=None, force=False): """ Register a user to the course. Returns True if the registration succeeded, False else. """ if not force: if not self.is_registration_possible(username): return False if self.is_password_needed_for_registration() and self._registration_password != password: return False if self.is_open_to_user(username): return False # already registered? get_database().registration.insert({"username": username, "courseid": self.get_id(), "date": datetime.now()}) return True
def view_task(self, courseid, taskid): """ Set in the database that the user has viewed this task """ # Insert a new entry if no one exists get_database().user_tasks.update({"username": self.username, "courseid": courseid, "taskid": taskid}, {"$setOnInsert": {"username": self.username, "courseid": courseid, "taskid": taskid, "tried": 0, "succeeded": False, "grade": 0.0}}, upsert=True)
def init(plugin_manager): """ inits everything that makes the backend working """ # Updates the submissions that are waiting with the status error, as the server restarted get_database().submissions.update( {"status": "waiting"}, { "$unset": {"jobid": ""}, "$set": {"status": "error", "grade": 0.0, "text": "Internal error. Server restarted"}, }, multi=True, ) # Updates all batch job still running get_database().batch_jobs.update( {"result": {"$exists": False}}, {"$set": {"result": {"retval": -1, "stderr": "Internal error. Server restarted"}}}, multi=True, ) # Create the job manager backend_type = INGIniousConfiguration.get("backend", "local") if backend_type == "local": get_job_manager.job_manager = LocalJobManager( INGIniousConfiguration.get( "containers", {"default": "ingi/inginious-c-default", "sekexe": "ingi/inginious-c-sekexe"} ), INGIniousConfiguration.get("local_agent_tmp_dir", "/tmp/inginious_agent"), plugin_manager, ) elif backend_type == "remote": get_job_manager.job_manager = RemoteDockerJobManager( INGIniousConfiguration.get("docker_daemons", []), INGIniousConfiguration.get( "containers", {"default": "ingi/inginious-c-default", "sekexe": "ingi/inginious-c-sekexe"} ), plugin_manager, ) elif backend_type == "remote_manual": get_job_manager.job_manager = RemoteManualAgentJobManager( INGIniousConfiguration.get("agents", [{"host": "localhost", "port": 5001}]), INGIniousConfiguration.get( "containers", {"default": "ingi/inginious-c-default", "sekexe": "ingi/inginious-c-sekexe"} ), plugin_manager, ) else: raise Exception("Unknown backend {}".format(backend_type))
def get_user_submissions(task): """ Get all the user's submissions for a given task """ if not User.is_logged_in(): raise Exception("A user must be logged in to get his submissions") cursor = get_database().submissions.find({"username": User.get_username(), "taskid": task.get_id(), "courseid": task.get_course_id()}) cursor.sort([("submitted_on", -1)]) return list(cursor)
def add_job(task, inputdata, debug=False): """ Add a job in the queue and returns a submission id. task is a Task instance and inputdata is the input as a dictionary If debug is true, more debug data will be saved """ if not User.is_logged_in(): raise Exception("A user must be logged in to submit an object") username = User.get_username() jobid = get_job_manager().new_job_id() obj = { "username": username, "courseid": task.get_course_id(), "taskid": task.get_id(), "input": get_gridfs().put( json.dumps(inputdata)), "status": "waiting", "jobid": jobid, "submitted_on": datetime.now()} submissionid = get_database().submissions.insert(obj) PluginManager.get_instance().call_hook("new_submission", submissionid=submissionid, submission=obj, jobid=jobid, inputdata=inputdata) get_job_manager().new_job(task, inputdata, job_done_callback, "Frontend - {}".format(username), jobid, debug) return submissionid
def page(self, course, task): """ Get all data and display the page """ data = list(get_database().user_tasks.find({"courseid": course.get_id(), "taskid": task.get_id(), "username": {"$in": course.get_registered_users()}})) data = [dict(f.items() + [("url", self.submission_url_generator(course, task, f))]) for f in data] if "csv" in web.input(): return make_csv(data) return renderer.course_admin.task_info(course, task, data)
def download_course(self, course, include_old_submissions=False): """ Download all submissions for a course """ submissions = list(get_database().submissions.find( {"courseid": course.get_id(), "username": {"$in": course.get_registered_users()}, "status": {"$in": ["done", "error"]}})) if not include_old_submissions: submissions = self._keep_best_submission(submissions) return self.download_submission_set(submissions, '_'.join([course.get_id()]) + '.tgz', ['username', 'taskid'])
def download_student_task(self, course, username, taskid, include_old_submissions=True): """ Download all submissions for a user for given task """ submissions = list(get_database().submissions.find( {"username": username, "courseid": course.get_id(), "taskid": taskid, "status": {"$in": ["done", "error"]}})) if not include_old_submissions: submissions = self._keep_best_submission(submissions) return self.download_submission_set(submissions, '_'.join([username, course.get_id(), taskid]) + '.tgz', [])
def page(self, course, username, task): """ Get all data and display the page """ data = list(get_database().submissions.find({"username": username, "courseid": course.get_id(), "taskid": task.get_id()}).sort([("submitted_on", pymongo.DESCENDING)])) data = [dict(f.items() + [("url", self.submission_url_generator(course, str(f["_id"])))]) for f in data] if "csv" in web.input(): return make_csv(data) return renderer.course_admin.student_task(course, username, task, data)
def get_registered_users(self, with_admins=True): """ Get all the usernames that are registered to this course (in no particular order)""" l = [entry['username'] for entry in list(get_database().registration.find({"courseid": self.get_id()}, {"username": True, "_id": False}))] if with_admins: return list(set(l + self.get_staff())) else: return l
def POST(self, courseid): """ POST request """ course, _ = get_course_and_check_rights(courseid) if not course.is_group_course(): raise web.notfound() error = "" try: data = web.input() if not data['group_description']: error = 'No group description given.' else: get_database().groups.insert({"course_id": courseid, "users": [], "tutors": [], "size": 2, "description": data['group_description']}) except: error = 'User returned an invalid form.' return self.page(course, error, True)
def GET(self, courseid, groupid): """ Edit a group """ course, _ = get_course_and_check_rights(courseid, allow_all_staff=False) student_list, tutor_list = self.get_user_lists(course, groupid) group = get_database().groups.find_one({"_id": ObjectId(groupid), "course_id": courseid}) if group: return renderer.course_admin.edit_group(course, student_list, tutor_list, group, "", False) else: raise web.notfound()
def get_user_last_submissions(query, limit, one_per_task=False): """ Get last submissions of a user """ if not User.is_logged_in(): raise Exception("A user must be logged in to get his submissions") request = query.copy() request.update({"username": User.get_username()}) # We only want the last x task tried, modify the request if one_per_task is True: data = get_database().submissions.aggregate([ {"$match": request}, {"$sort": {"submitted_on": pymongo.DESCENDING}}, {"$group": {"_id": {"courseid": "$courseid", "taskid": "$taskid"}, "orig_id": {"$first": "$_id"}, "submitted_on": {"$first": "$submitted_on"}}}, {"$sort": {"submitted_on": pymongo.DESCENDING}}, {"$limit": limit} ]) request = {"_id": {"$in": [d["orig_id"] for d in list(data)]}} cursor = get_database().submissions.find(request) cursor.sort([("submitted_on", -1)]).limit(limit) return list(cursor)
def _batch_job_done_callback(batch_job_id, result): """ Called when the batch job with id jobid has finished. result is a dictionnary, containing: - {"retval": 0, "stdout": "...", "stderr": "...", "file": "..."} if everything went well.(where file is a tgz file containing the content of the / output folder from the container) - {"retval": "...", "stdout": "...", "stderr": "..."} if the container crashed (retval is an int != 0) - {"retval": -1, "stderr": "the error message"} if the container failed to start """ # If there is a tgz file to save, put it in gridfs if "file" in result: result["file"] = get_gridfs().put(result["file"].read()) # Save submission to database get_database().batch_jobs.update( {"_id": batch_job_id}, {"$set": {"result": result}} )
def init(app, session_test=None): """ Init the session. Should be call before starting the web.py server session_test is specified to emulate a session (used for tests) """ if session_test is None: if web.config.get('_session') is None: get_session.session = web.session.Session(app, MongoStore(get_database(), 'sessions')) web.config._session = get_session.session # pylint: disable=protected-access else: get_session.session = web.config._session # pylint: disable=protected-access else: get_session.session = AttrDict(copy.deepcopy(session_test))
def page(self, course, task): """ Get all data and display the page """ groups = [] if course.is_group_course(): groups = get_database().groups.find({"course_id": course.get_id()}) groups = sorted(groups, key=lambda item: item["description"]) groups.insert(0, {"_id": 0, "users": course.get_staff(), "description": "Course staff", "tutors": {}}) results = list(get_database().user_tasks.find({"courseid": course.get_id(), "taskid": task.get_id(), "username": {"$in": course.get_registered_users()}})) data = {} for user in results: user["url"] = self.submission_url_generator(course, task, user) data[user["username"]] = user user_csv = [dict(data[key].items()) for key in data.keys()] if "csv" in web.input(): return make_csv(user_csv) return renderer.course_admin.task_info(course, task, data, groups)
def get_all_batch_jobs_for_course(course_id): """ Returns all the batch jobs for the course course id. Batch jobs are dicts in the form {"courseid": "...", "container_name": "...", "submitted_on":"..."} if the job is still ongoing, and {"courseid": "...", "container_name": "...", "submitted_on":"...", "results": {}} if the job is done. the dict result can be either: - {"retval":0, "stdout": "...", "stderr":"...", "file":"..."} if everything went well. (file is an gridfs id to a tgz file) - {"retval":"...", "stdout": "...", "stderr":"..."} if the container crashed (retval is an int != 0) - {"retval":-1, "stderr": "the error message"} if the container failed to start """ return list(get_database().batch_jobs.find({"courseid": course_id}))
def get_user_lists(self, course, groupid): """ Get the available student and tutor lists for group edition""" student_list, tutor_list = course.get_registered_users(False), course.get_staff(False) # Remove grouped users from the accessible list for the group grouped_users = get_database().groups.aggregate([ {"$match": {"_id": {"$ne": ObjectId(groupid)}}}, {"$group": {"_id": "$_id", "gusers": {"$addToSet": "$users"}}}]) for result in grouped_users: for users in result["gusers"]: student_list = [student for student in student_list if student not in users] return student_list, tutor_list
def get_batch_job_status(batch_job_id): """ Returns the batch job with id batch_job_id Batch jobs are dicts in the form {"courseid": "...", "container_name": "..."} if the job is still ongoing, and {"courseid": "...", "container_name": "...", "results": {}} if the job is done. the dict result can be either: - {"retval":0, "stdout": "...", "stderr":"...", "file":"..."} if everything went well. (file is an gridfs id to a tgz file) - {"retval":"...", "stdout": "...", "stderr":"..."} if the container crashed (retval is an int != 0) - {"retval":-1, "stderr": "the error message"} if the container failed to start """ return get_database().batch_jobs.find_one({"_id": ObjectId(batch_job_id)})
def page(self, course): """ Get all data and display the page """ data = list(get_database().user_tasks.aggregate( [ { "$match": { "courseid": course.get_id(), "username": {"$in": course.get_registered_users()} } }, { "$group": { "_id": "$taskid", "viewed": {"$sum": 1}, "attempted": {"$sum": {"$cond": [{"$ne": ["$tried", 0]}, 1, 0]}}, "attempts": {"$sum": "$tried"}, "succeeded": {"$sum": {"$cond": ["$succeeded", 1, 0]}} } } ])) # Load tasks and verify exceptions files = get_readable_tasks(course.get_id()) output = {} errors = [] for task in files: try: output[task] = course.get_task(task) except Exception as inst: errors.append({"taskid": task, "error": str(inst)}) tasks = OrderedDict(sorted(output.items(), key=lambda t: t[1].get_order())) # Now load additionnal informations result = OrderedDict() for taskid in tasks: result[taskid] = {"name": tasks[taskid].get_name(), "viewed": 0, "attempted": 0, "attempts": 0, "succeeded": 0, "url": self.submission_url_generator(course, taskid)} for entry in data: if entry["_id"] in result: result[entry["_id"]]["viewed"] = entry["viewed"] result[entry["_id"]]["attempted"] = entry["attempted"] result[entry["_id"]]["attempts"] = entry["attempts"] result[entry["_id"]]["succeeded"] = entry["succeeded"] if "csv" in web.input(): return make_csv(result) return renderer.course_admin.task_list(course, result, errors)
def get_course_data_for_users(cls, courseid, users=None): """ Returns data of users for a specific course. users is a list of username. If users is none, data from all users will be returned. The returned value is a dict: {"username": {"task_tried": 0, "total_tries": 0, "task_succeeded": 0, "task_grades":{"task_1": 100.0, "task_2": 0.0, ...}}} Please note that only the task already seen at least one time will be present in the dict task_grades. """ from frontend.custom.courses import FrontendCourse course = FrontendCourse(courseid) match = {"courseid": courseid} if users is not None: match["username"] = {"$in": users} tasks = course.get_tasks() taskids = tasks.keys() match["taskid"] = {"$in": taskids} data = list(get_database().user_tasks.aggregate( [ {"$match": match}, {"$group": { "_id": "$username", "task_tried": {"$sum": {"$cond": [{"$ne": ["$tried", 0]}, 1, 0]}}, "total_tries": {"$sum": "$tried"}, "task_succeeded": {"$addToSet": {"$cond": ["$succeeded", "$taskid", False]}}, "task_grades": {"$addToSet": {"taskid": "$taskid", "grade": "$grade"}} }} ])) if len(data) != 0: return_data = {} for result in data: username = result["_id"] user_tasks = set([taskid for taskid, task in tasks.iteritems() if task.is_visible_by_user(username)]) result["total_tasks"] = len(user_tasks) result["task_succeeded"] = len(set(result["task_succeeded"]).intersection(user_tasks)) result["task_grades"] = {dg["taskid"]: dg["grade"] for dg in result["task_grades"] if dg["taskid"] in user_tasks} del result["_id"] return_data[username] = result return return_data else: return {}
def page(self, course, error="", post=False): """ Get all data and display the page """ groups = [] if course.is_group_course(): groups = get_database().groups.find({"course_id": course.get_id()}) groups = sorted(groups, key=lambda item: item["description"]) groups.insert(0, {"_id": 0, "users": course.get_staff(), "description": "Course staff", "tutors": {}}) user_data = UserData.get_course_data_for_users(course.get_id(), course.get_registered_users()) for user in user_data.keys(): user_data[user]["url"] = self.submission_url_generator(course, user) users_csv = [dict(f.items() + [("username", username)]) for username, f in user_data.iteritems()] if "csv" in web.input(): return make_csv(users_csv) return renderer.course_admin.student_list(course, user_data, groups, error, post)
def page(self, course, username): """ Get all data and display the page """ data = list(get_database().user_tasks.find({"username": username, "courseid": course.get_id()})) tasks = course.get_tasks() result = OrderedDict() for taskid in tasks: result[taskid] = {"name": tasks[taskid].get_name(), "submissions": 0, "status": "notviewed", "url": self.submission_url_generator(course, username, taskid)} for taskdata in data: if taskdata["taskid"] in result: result[taskdata["taskid"]]["submissions"] = taskdata["tried"] if taskdata["tried"] == 0: result[taskdata["taskid"]]["status"] = "notattempted" elif taskdata["succeeded"]: result[taskdata["taskid"]]["status"] = "succeeded" else: result[taskdata["taskid"]]["status"] = "failed" result[taskdata["taskid"]]["grade"] = taskdata["grade"] if "csv" in web.input(): return make_csv(result) return renderer.course_admin.student(course, username, result)
def add_batch_job(course, container_name, inputdata, launcher_name=None, skip_permission=False): """ Add a job in the queue and returns a batch job id. inputdata is a dict containing all the keys of get_batch_container_metadata(container_name)[2] BUT the keys "course" and "submission" IF their type is "file". (the content of the course and the submission will be automatically included by this function.) The values associated are file-like objects for "file" types and strings for "text" types. """ if not skip_permission: if not User.is_logged_in(): raise Exception("A user must be logged in to submit an object") username = User.get_username() launcher_name = launcher_name or username if username not in course.get_admins(): raise Exception("The user must be an administrator to start a batch job") if container_name not in INGIniousConfiguration.get("batch_containers", []): raise Exception("This batch container is not allowed to be started") container_args = get_job_manager().get_batch_container_metadata(container_name)[2] if container_args is None: raise Exception("This batch container is not available") # Download the course content and submissions and add them to the input if "course" in container_args and container_args["course"]["type"] == "file" and "course" not in inputdata: inputdata["course"] = _get_course_data(course) if "submissions" in container_args and container_args["submissions"]["type"] == "file" and "submissions" not in inputdata: inputdata["submissions"] = _get_submissions_data(course) obj = {"courseid": course.get_id(), 'container_name': container_name, "submitted_on": datetime.now()} batch_job_id = get_database().batch_jobs.insert(obj) launcher_name = launcher_name or "plugin" get_job_manager().new_batch_job(container_name, inputdata, lambda r: _batch_job_done_callback(batch_job_id, r), launcher_name="Frontend - {}".format(launcher_name)) return batch_job_id