def post_grade_to_lms(header, body): ''' Send grading results back to LMS header: JSON-serialized xqueue_header (string) body: grader reply (string) Returns: success: Flag indicating successful exchange (Boolean) ''' header_dict = json.loads(header) lms_callback_url = header_dict['lms_callback_url'] payload = {'xqueue_header': header, 'xqueue_body': body} # Quick kludge retries to fix prod problem with 6.00x push graders. We're # seeing abrupt disconnects when servers are taken out of the ELB, causing # in flight lms_ack requests to fail. This just tries five times before # giving up. attempts = 0 success = False while (not success) and attempts < 5: (success, lms_reply) = _http_post(lms_callback_url, payload, settings.REQUESTS_TIMEOUT) attempts += 1 if success: statsd.increment('xqueue.consumer.post_grade_to_lms.success') else: log.error( "Unable to return to LMS: lms_callback_url: {0}, payload: {1}, lms_reply: {2}" .format(lms_callback_url, payload, lms_reply)) statsd.increment('xqueue.consumer.post_grade_to_lms.failure') return success
def purchased_callback(self): """ When purchased, this should enroll the user in the course. We are assuming that course settings for enrollment date are configured such that only if the (user.email, course_id) pair is found in CourseEnrollmentAllowed will the user be allowed to enroll. Otherwise requiring payment would in fact be quite silly since there's a clear back door. """ try: course_loc = CourseDescriptor.id_to_location(self.course_id) course_exists = modulestore().has_item(self.course_id, course_loc) except ValueError: raise PurchasedCallbackException( "The customer purchased Course {0}, but that course doesn't exist!".format(self.course_id)) if not course_exists: raise PurchasedCallbackException( "The customer purchased Course {0}, but that course doesn't exist!".format(self.course_id)) CourseEnrollment.enroll(user=self.user, course_id=self.course_id, mode=self.mode) log.info("Enrolled {0} in paid course {1}, paid ${2}".format(self.user.email, self.course_id, self.line_cost)) org, course_num, run = self.course_id.split("/") statsd.increment("shoppingcart.PaidCourseRegistration.purchased_callback.enrollment", tags=["org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run)])
def post_services(args): """ Sample Request data: { "name": NAME, "data": { "terms": [ { "policy_name" : POLICY_NAME, "policy_desc": POLICY_DESC, "policy_values": POLICY_OPTIONS }, ], "full_terms_url": URL } } """ data = request.get_json() try: result = g.firebase.put('/services', args.get('name', None), args.get('data', {}), params={'print': 'silent'}, headers={'X_FANCY_HEADER': 'VERY FANCY'}) statsd.increment('firebase.services.put') except: pass return jsonify({'success': True})
def get_single_qitem(queue_name): ''' Retrieve a single queued item, if one exists, from the named queue Returns (success, qitem): success: Flag whether retrieval is successful (Boolean) If no items in the queue, then return False qitem: Retrieved item ''' queue_name = str(queue_name) # Pull a single submission (if one exists) from the named queue credentials = pika.PlainCredentials(settings.RABBITMQ_USER, settings.RABBITMQ_PASS) connection = pika.BlockingConnection(pika.ConnectionParameters( heartbeat_interval=5, credentials=credentials, host=settings.RABBIT_HOST, virtual_host=settings.RABBIT_VHOST)) channel = connection.channel() channel.queue_declare(queue=queue_name, durable=True) # qitem is the item from the queue method, header, qitem = channel.basic_get(queue=queue_name) if method is None or method.NAME == 'Basic.GetEmpty': # Got nothing connection.close() return (False, '') else: channel.basic_ack(method.delivery_tag) connection.close() statsd.increment('xqueue.consumer.get_single_qitem', tags=['queue:{0}'.format(queue_name)]) return (True, qitem)
def reset_skipped_subs(): """ Reset submissions marked skipped to return them to the queue. """ # Mihara: There's no reason not to do that which I can see. counter = Submission.objects.filter( state=SubmissionState.skipped, posted_results_back_to_queue=False ).update(state=SubmissionState.waiting_to_be_graded) # Mihara: Seriously, why did they write it like that? #counter=0 #unique_locations=[x['location'] for x in Submission.objects.all().values('location').distinct()] #for location in unique_locations: # subs_pending_total= Submission.objects.filter( # location=location, # state=SubmissionState.skipped # ).order_by('-date_created') # for sub in subs_pending_total: # sub.state=SubmissionState.waiting_to_be_graded # counter+=1 if counter>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_skipped_subs", tags=["counter:{0}".format(counter)]) log.debug("Reset {0} submission from skipped state".format(counter))
def get_services(): """ Get the collection of all services """ result = g.firebase.get('/services', None) statsd.increment('firebase.services.get') return jsonify(dict(services=result))
def _process(self, submission_id, on_done): log.info( "Processing submission from queue_name: {0}, submission_id: {1}". format(self.queue_name, submission_id)) try: with transaction.atomic(): submission = self._get_submission(submission_id) if submission is None: statsd.increment( 'xqueue.consumer.consumer_callback.submission_does_not_exist', tags=['queue:{0}'.format(self.queue_name)]) log.error( "Queued pointer refers to nonexistent entry in Submission DB: queue_name: {0}, submission_id: {1}" .format(self.queue_name, submission_id)) # if item has been retired, skip grading if submission and not submission.retired: self._deliver_submission(submission) except Exception as e: # catch and acknowledge the message if something goes wrong statsd.increment('xqueue.consumer.consumer_callback.unknown_error', tags=['queue:{0}'.format(self.queue_name)]) log.error( "Error processing submission_id: {0} on queue_name: {1}, {2}". format( submission_id, self.queue_name, e, )) finally: # acknowledge that the message was processed on_done()
def finalize_expired_submission(sub): """ Expire submissions by posting back to LMS with error message. Input: timed_out_list from check_if_expired method Output: Success code. """ grader_dict = { 'score': 0, 'feedback': error_template.format(errors="Error scoring submission."), 'status': GraderStatus.failure, 'grader_id': "0", 'grader_type': sub.next_grader_type, 'confidence': 1, 'submission_id' : sub.id, } sub.state = SubmissionState.finished sub.save() grade = create_grader(grader_dict,sub) statsd.increment("open_ended_assessment.grading_controller.expire_submissions.finalize_expired_submission", tags=[ "course:{0}".format(sub.course_id), "location:{0}".format(sub.location), 'grader_type:{0}'.format(sub.next_grader_type) ]) return True
def purchased_callback(self): """ When purchased, this should enroll the user in the course. We are assuming that course settings for enrollment date are configured such that only if the (user.email, course_id) pair is found in CourseEnrollmentAllowed will the user be allowed to enroll. Otherwise requiring payment would in fact be quite silly since there's a clear back door. """ try: course_loc = CourseDescriptor.id_to_location(self.course_id) course_exists = modulestore().has_item(self.course_id, course_loc) except ValueError: raise PurchasedCallbackException( "The customer purchased Course {0}, but that course doesn't exist!" .format(self.course_id)) if not course_exists: raise PurchasedCallbackException( "The customer purchased Course {0}, but that course doesn't exist!" .format(self.course_id)) CourseEnrollment.enroll(user=self.user, course_id=self.course_id, mode=self.mode) log.info("Enrolled {0} in paid course {1}, paid ${2}".format( self.user.email, self.course_id, self.line_cost)) org, course_num, run = self.course_id.split("/") statsd.increment( "shoppingcart.PaidCourseRegistration.purchased_callback.enrollment", tags=[ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run) ])
def post_grade_to_lms(header, body): ''' Send grading results back to LMS header: JSON-serialized xqueue_header (string) body: grader reply (string) Returns: success: Flag indicating successful exchange (Boolean) ''' header_dict = json.loads(header) lms_callback_url = header_dict['lms_callback_url'] payload = {'xqueue_header': header, 'xqueue_body': body} # Quick kludge retries to fix prod problem with 6.00x push graders. We're # seeing abrupt disconnects when servers are taken out of the ELB, causing # in flight lms_ack requests to fail. This just tries five times before # giving up. attempts = 0 success = False while (not success) and attempts < 5: (success, lms_reply) = _http_post(lms_callback_url, payload, settings.REQUESTS_TIMEOUT) attempts += 1 if success: statsd.increment('xqueue.consumer.post_grade_to_lms.success') else: log.error("Unable to return to LMS: lms_callback_url: {0}, payload: {1}, lms_reply: {2}".format(lms_callback_url, payload, lms_reply)) statsd.increment('xqueue.consumer.post_grade_to_lms.failure') return success
def _process(self, submission_id, on_done): log.info("Processing submission from queue_name: {0}, submission_id: {1}".format(self.queue_name, submission_id)) try: submission = self._get_submission(submission_id) if submission is None: statsd.increment('xqueue.consumer.consumer_callback.submission_does_not_exist', tags=['queue:{0}'.format(self.queue_name)]) log.error("Queued pointer refers to nonexistent entry in Submission DB: queue_name: {0}, submission_id: {1}".format( self.queue_name, submission_id )) # if item has been retired, skip grading if submission and not submission.retired: self._deliver_submission(submission) # close transaction transaction.commit() except Exception as e: # We need a wide catch here to correctly rollback the # transaction and acknowledge the message if something # goes wrong statsd.increment('xqueue.consumer.consumer_callback.unknown_error', tags=['queue:{0}'.format(self.queue_name)]) log.error("Error processing submission_id: {0} on queue_name: {1}, {2}" .format( submission_id, self.queue_name, e, )) transaction.rollback() finally: # acknowledge that the message was processed on_done()
def publish(event): """A function that allows XModules to publish events. This only supports grade changes right now.""" if event.get('event_name') != 'grade': return # Construct the key for the module key = KeyValueStore.Key(scope=Scope.user_state, student_id=user.id, block_scope_id=descriptor.location, field_name='grade') student_module = field_data_cache.find_or_create(key) # Update the grades student_module.grade = event.get('value') student_module.max_grade = event.get('max_value') # Save all changes to the underlying KeyValueStore student_module.save() # Bin score into range and increment stats score_bucket = get_score_bucket(student_module.grade, student_module.max_grade) org, course_num, run = course_id.split("/") tags = [ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run), "score_bucket:{0}".format(score_bucket) ] if grade_bucket_type is not None: tags.append('type:%s' % grade_bucket_type) statsd.increment("lms.courseware.question_answered", tags=tags)
def grade_essays(): """ Polls grading controller for essays to grade and tries to grade them. """ controller_session = util.controller_login() log.info(' [*] Polling grading controller...') try: #See if there are any submissions waiting success, pending_count = ml_grader.get_pending_length_from_controller( controller_session) while success and pending_count > 0: success = ml_grader.handle_single_item(controller_session) transaction.commit() except Exception as err: log.exception("Error getting submission: {0}".format(err)) statsd.increment( "open_ended_assessment.grading_controller.call_ml_grader", tags=["success:Exception"]) db.reset_queries() # Log out of the controller session, which deletes the database row. util.controller_logout(controller_session) transaction.commit()
def publish(event): if event.get('event_name') != 'grade': return student_module, created = StudentModule.objects.get_or_create( course_id=course_id, student=user, module_type=descriptor.location.category, module_state_key=descriptor.location.url(), defaults={'state': '{}'}, ) student_module.grade = event.get('value') student_module.max_grade = event.get('max_value') student_module.save() #Bin score into range and increment stats score_bucket = get_score_bucket(student_module.grade, student_module.max_grade) org, course_num, run = course_id.split("/") tags = [ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run), "score_bucket:{0}".format(score_bucket) ] if grade_bucket_type is not None: tags.append('type:%s' % grade_bucket_type) statsd.increment("lms.courseware.question_answered", tags=tags)
def reset_ml_subs_to_in(): """ Reset submissions marked ML to instructor if there are not enough instructor submissions to grade This happens if the instructor skips too many submissions """ counter = 0 unique_locations = [ x['location'] for x in list(Submission.objects.values('location').distinct()) ] for location in unique_locations: subs_graded, subs_pending = staff_grading_util.count_submissions_graded_and_pending_instructor( location) subs_pending_total = Submission.objects.filter( location=location, state=SubmissionState.waiting_to_be_graded, preferred_grader_type="ML").order_by( '-date_created')[:settings.MIN_TO_USE_ML] if ((subs_graded + subs_pending) < settings.MIN_TO_USE_ML and subs_pending_total.count() > subs_pending): for sub in subs_pending_total: if sub.next_grader_type == "ML" and sub.get_unsuccessful_graders( ).count() == 0: staff_grading_util.set_ml_grading_item_back_to_instructor( sub) counter += 1 if (counter + subs_graded + subs_pending) > settings.MIN_TO_USE_ML: break if counter > 0: statsd.increment( "open_ended_assessment.grading_controller.expire_submissions.reset_ml_subs_to_in", tags=["counter:{0}".format(counter)]) log.debug("Reset {0} submission from ML to IN".format(counter))
def reset_timed_out_submissions(): """ Check if submissions have timed out, and reset them to waiting to grade state if they have Input: subs - A QuerySet of submissions Output: status code indicating success """ now = timezone.now() min_time = datetime.timedelta(seconds=settings.RESET_SUBMISSIONS_AFTER) # have to split into 2 queries now, because we are giving some finished submissions to peer graders when # there's nothing to grade reset_waiting_count = (Submission.objects .filter(date_modified__lt=now-min_time, state=SubmissionState.being_graded, posted_results_back_to_queue=False) .update(state=SubmissionState.waiting_to_be_graded)) reset_finished_count = (Submission.objects .filter(date_modified__lt=now-min_time, state=SubmissionState.being_graded, posted_results_back_to_queue=True) .update(state=SubmissionState.finished)) reset_count = reset_waiting_count + reset_finished_count if reset_count>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_timed_out_submissions", tags=["counter:{0}".format(reset_count)]) log.debug("Reset {0} submissions that had timed out in their current grader. {1}->W {2}->F" .format(reset_count, reset_waiting_count, reset_finished_count)) return True
def send_nearby_carts(lat_long, category="Anything"): if category not in TAGS_BY_TRUCK: return jsonify(data="Valid categories: %s" % str(TAGS_BY_TRUCK.keys())) lat_long = lat_long.split(",") latitude = float(lat_long[0]) longitude = float(lat_long[1]) unsorted_result = [] unsorted_result_lat_long = [] result_feet = [] result_miles = [] for index in find_nearby_carts(longitude, latitude, IDX): if index in TAGS_BY_TRUCK[category]: unsorted_result.append(CARTS[index]) unsorted_result_lat_long.append(CARTS[index]["latitude"] + "," + CARTS[index]["longitude"]) distances, addresses = get_distances_and_addresses(lat_long[0] + "," + lat_long[1], unsorted_result_lat_long) if len(distances) == len(unsorted_result): for i in range(len(unsorted_result)): unsorted_result[i]["distance"] = distances[i] unsorted_result[i]["address"] = addresses[i] if "ft" in unsorted_result[i]["distance"]: result_feet.append(unsorted_result[i]) else: result_miles.append(unsorted_result[i]) result_feet.sort(key=operator.itemgetter("distance")) result_miles.sort(key=operator.itemgetter("distance")) result = result_feet + result_miles else: result = unsorted_result statsd.increment("cart_api.requests", tags=["support", "page:nearby_carts"]) return jsonify(data=result)
def register(): statsd.increment('api_calls.register') form = SignupForm(request.form) logger.info(request.form) if not form.validate(): msg = { 'success': False, 'msg': form.errors} return jsonify(msg) user = session.query(Person).\ filter(Person.email == form.email.data ).first() if user: msg = { 'success': False, 'msg': user.email + ' is already registered!', 'parameter': 'email', } return jsonify(msg) u = Person(form) session.add(u) session.commit() try: pm = PostMonkey(apikey=MC_APIKEY, timeout=10) pm.listSubscribe( id=MC_LISTID, email_address=form.email.data) except MailChimpException, e: app.logger.error(str(e))
def get_service(service_id): """ Get a single service's data """ result = g.firebase.get('/services', service_id) statsd.increment('firebase.services.get') return jsonify(dict(service=result))
def score(): statsd.increment('api_calls.score.%s' % request.method) if request.method == 'GET': row = session.query(Score).filter( Score.person_id == g.user._id) res = [] for pt in row: res.append(pt.json_data()) return jsonify({ 'msg': res, 'success': False}) '''post function''' logger.info(request.form) form = ScoreForm(request.form) if not form.validate(): msg = { 'success': False, 'msg': form.errors} return jsonify(msg) session.add(Score(form, g.user._id)) session.commit() msg = { 'success': True, 'msg': 'Added', 'id': form.id.data, } return jsonify(msg)
def reset_ml_subs_to_in(): """ Reset submissions marked ML to instructor if there are not enough instructor submissions to grade This happens if the instructor skips too many submissions """ counter=0 unique_locations=[x['location'] for x in list(Submission.objects.values('location').distinct())] for location in unique_locations: subs_graded, subs_pending = staff_grading_util.count_submissions_graded_and_pending_instructor(location) subs_pending_total= Submission.objects.filter( location=location, state=SubmissionState.waiting_to_be_graded, preferred_grader_type="ML" ).order_by('-date_created')[:settings.MIN_TO_USE_ML] if ((subs_graded+subs_pending) < settings.MIN_TO_USE_ML and subs_pending_total.count() > subs_pending): for sub in subs_pending_total: if sub.next_grader_type=="ML" and sub.get_unsuccessful_graders().count()==0: staff_grading_util.set_ml_grading_item_back_to_instructor(sub) counter+=1 if (counter+subs_graded + subs_pending)> settings.MIN_TO_USE_ML: break if counter>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_ml_subs_to_in", tags=["counter:{0}".format(counter)]) log.debug("Reset {0} submission from ML to IN".format(counter))
def reset_timed_out_submissions(subs): """ Check if submissions have timed out, and reset them to waiting to grade state if they have Input: subs - A QuerySet of submissions Output: status code indicating success """ now = timezone.now() min_time = datetime.timedelta(seconds=settings.RESET_SUBMISSIONS_AFTER) timed_out_subs = subs.filter(date_modified__lt=now - min_time) timed_out_sub_count = timed_out_subs.count() count = 0 for i in xrange(0, timed_out_sub_count): sub = subs[i] if sub.state == SubmissionState.being_graded: sub.state = SubmissionState.waiting_to_be_graded sub.save() count += 1 if count > 0: statsd.increment( "open_ended_assessment.grading_controller.expire_submissions.reset_timed_out_submissions", tags=["counter:{0}".format(count)]) log.debug( "Reset {0} submissions that had timed out in their current grader." .format(count)) return True
def reset_timed_out_submissions(subs): """ Check if submissions have timed out, and reset them to waiting to grade state if they have Input: subs - A QuerySet of submissions Output: status code indicating success """ now = timezone.now() min_time = datetime.timedelta(seconds=settings.RESET_SUBMISSIONS_AFTER) timed_out_subs=subs.filter(date_modified__lt=now-min_time) timed_out_sub_count=timed_out_subs.count() count = 0 for i in xrange(0, timed_out_sub_count): sub = subs[i] if sub.state == SubmissionState.being_graded: sub.state = SubmissionState.waiting_to_be_graded sub.save() count += 1 if count>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_timed_out_submissions", tags=["counter:{0}".format(count)]) log.debug("Reset {0} submissions that had timed out in their current grader.".format(count)) return True
def check_if_grading_finished_for_duplicates(): duplicate_submissions = Submission.objects.filter( preferred_grader_type = "PE", is_duplicate= True, posted_results_back_to_queue=False, ) log.info(duplicate_submissions) counter=0 for sub in duplicate_submissions: if sub.duplicate_submission_id is not None: try: original_sub=Submission.objects.get(id=sub.duplicate_submission_id) if original_sub.state == SubmissionState.finished: finalize_grade_for_duplicate_peer_grader_submissions(sub, original_sub) counter+=1 log.debug("Finalized one duplicate submission: Original: {0} Duplicate: {1}".format(original_sub,sub)) except: log.error("Could not finalize grade for submission with id {0}".format(sub.duplicate_submission_id)) statsd.increment("open_ended_assessment.grading_controller.expire_submissions.check_if_duplicate_grading_finished", tags=[ "counter:{0}".format(counter), ]) log.info("Finalized {0} duplicate submissions".format(counter)) return True
def check_if_grading_finished_for_duplicates(): duplicate_submissions = Submission.objects.filter( preferred_grader_type="PE", is_duplicate=True, posted_results_back_to_queue=False, ) log.info(duplicate_submissions) counter = 0 for sub in duplicate_submissions: if sub.duplicate_submission_id is not None: try: original_sub = Submission.objects.get( id=sub.duplicate_submission_id) if original_sub.state == SubmissionState.finished: finalize_grade_for_duplicate_peer_grader_submissions( sub, original_sub) counter += 1 log.debug( "Finalized one duplicate submission: Original: {0} Duplicate: {1}" .format(original_sub, sub)) except: log.error( "Could not finalize grade for submission with id {0}". format(sub.duplicate_submission_id)) statsd.increment( "open_ended_assessment.grading_controller.expire_submissions.check_if_duplicate_grading_finished", tags=[ "counter:{0}".format(counter), ]) log.info("Finalized {0} duplicate submissions".format(counter)) return True
def publish(event): """A function that allows XModules to publish events. This only supports grade changes right now.""" if event.get("event_name") != "grade": return # Construct the key for the module key = KeyValueStore.Key( scope=Scope.user_state, student_id=user.id, block_scope_id=descriptor.location, field_name="grade" ) student_module = field_data_cache.find_or_create(key) # Update the grades student_module.grade = event.get("value") student_module.max_grade = event.get("max_value") # Save all changes to the underlying KeyValueStore student_module.save() # Bin score into range and increment stats score_bucket = get_score_bucket(student_module.grade, student_module.max_grade) org, course_num, run = course_id.split("/") tags = [ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run), "score_bucket:{0}".format(score_bucket), ] if grade_bucket_type is not None: tags.append("type:%s" % grade_bucket_type) statsd.increment("lms.courseware.question_answered", tags=tags)
def publish(event): if event.get('event_name') != 'grade': return student_module, created = StudentModule.objects.get_or_create( course_id=course_id, student=user, module_type=descriptor.location.category, module_state_key=descriptor.location.url(), defaults={'state': '{}'}, ) student_module.grade = event.get('value') student_module.max_grade = event.get('max_value') student_module.save() # Bin score into range and increment stats score_bucket = get_score_bucket(student_module.grade, student_module.max_grade) org, course_num, run = course_id.split("/") tags = ["org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run), "score_bucket:{0}".format(score_bucket)] if grade_bucket_type is not None: tags.append('type:%s' % grade_bucket_type) statsd.increment("lms.courseware.question_answered", tags=tags)
def showIndex(): options = { "/location/<lat_long>": 'Pass comma separated latitude,longitude value to get info for nearby carts.Values returned are in the format:{"data": [list of carts containing address, applicant, distance, facilitytype, fooditems, latitude, and longitude]}', "/location/<lat_long>/<category>": "Returns all carts near comma separated latitude longitude matching a particular category. List of available categories can be found using the /categories option. Results formatted the same as /location/<lat_long>", "/categories": 'Returns categories in format "data"=[list of categories]', } statsd.increment("cart-api.requests", tags=["support", "page:options"]) return jsonify(options)
def onChannelHangup(self, ev): statsd.increment('freeswitch.channels.finished') if (ev.Hangup_Cause in config.freeSwitch.normalHangupCauses): statsd.increment('freeswitch.channels.finished.normally') statsd.increment('freeswitch.channels.finished.normally.'+ev.Hangup_Cause.lower()) else: statsd.increment('freeswitch.channels.finished.abnormally') statsd.increment('freeswitch.channels.finished.abnormally.'+ev.Hangup_Cause.lower())
def onChannelHangupComplete(self, ev): try: statsd.histogram('freeswitch.rtp.skipped_packet.in', ev.variable_rtp_audio_in_skip_packet_count) statsd.histogram('freeswitch.rtp.skipped_packet.out', ev.variable_rtp_audio_out_skip_packet_count) except: log.msg("Unable to read variable_rtp_audio_in_skip_packet_count and / or variable_rtp_audio_out_skip_packet_count ") statsd.increment('freeswitch.caller.context.'+ev.Caller_Context) statsd.increment('freeswitch.caller.source.'+ev.Caller_Source) self.g729_metrics()
def main(request, path): ''' Search for a long link matching the `path` and redirect ''' if len(path) and path[-1] == '/': # Removing trailing slash so "/jobs/" and "/jobs" redirect identically path = path[:-1] link = Link.find_by_hash(path) if link is None: # Try to find a matching short link by removing valid "catchall" suffixes path_prefix, redirect_suffix = suffix_catchall.get_hash_from(path) if redirect_suffix is not None: # If we found a suffix, we try to find a link again with the prefix link = Link.find_by_hash(path_prefix) else: redirect_suffix = None # Instrumentation prefix_tag = 'prefix:' + link.prefix if link else 'Http404' statsd.increment('workforus.clicks', tags=[prefix_tag]) statsd.set('workforus.unique_links', link.hash if link else 'Http404', tags=[prefix_tag]) statsd.set('workforus.unique_ips', get_client_ip(request), tags=['browser:' + get_browser(request)]) # 404 if link not found or register a click if the DB is not in readonly mode if link is None: raise Http404 elif not settings.SITE_READ_ONLY: link.click() # Tweak the redirection link based on the query string, redirection suffix, etc. # FIXME: Handle multiple parameters with the same name in the `url` query = request.GET.copy() if redirect_suffix is not None: query[REDIRECT_PARAM_NAME] = redirect_suffix if bool(query) and REF_PARAM_NAME not in query: # If we specify a non empty query, indicate that the shortener tweaked the url query[REF_PARAM_NAME] = REF_PARAM_DEFAULT_VALUE target_url = url_append_parameters( link.long_url, params_to_replace=query, defaults={REF_PARAM_NAME: REF_PARAM_DEFAULT_VALUE}) # Either redirect the user, or load the target page and display it directly return (proxy if link.act_as_proxy else redirect)(target_url)
def onChannelHangup(self, ev): statsd.increment('freeswitch.channels.finished') if (ev.Hangup_Cause in config.freeSwitch.normalHangupCauses): statsd.increment('freeswitch.channels.finished.normally') statsd.increment('freeswitch.channels.finished.normally.' + ev.Hangup_Cause.lower()) else: statsd.increment('freeswitch.channels.finished.abnormally') statsd.increment('freeswitch.channels.finished.abnormally.' + ev.Hangup_Cause.lower())
def main_page(): statsd.increment('web.page_count', tags=['page:main']) #time diff for histograms start_time = time.time() #putting randomly to sleep to generate delays. time.sleep(random.randint(1, 10)) duration = time.time() - start_time #paging data for histogram statsd.histogram('page.load.hist_timer', duration, tags=['type:support', 'page:main']) return "App Main Page"
def leaderboard(): statsd.increment('api_calls.leaderboard') msg = { 'success': True, 'msg': 'leaderboards', 'all_time': _alltime_leaderboard(), 'month': _get_tops(30), 'week': _get_tops(7), 'day': _get_tops(1), } return jsonify(msg)
def inapp_products(): statsd.increment('api_calls.inapp_products') prods = session.query(InAppProducts) prod_array = [] for prod in prods: prod_array.append(prod.json_data()) msg = { 'success': True, 'msg': prod_array, } return jsonify(msg)
def main(request, path): """Search for a long link matching the `path` and redirect""" path = _extract_valid_path(path) link = Link.find_by_hash(path) redirect_suffix = None if link is None: # Try to find a matching prefix parts = path.split('/', 1) if len(parts) == 2: path_prefix, redirect_suffix = parts # If there was a prefix, we try to find a link again link = Link.find_by_hash(path_prefix) # Instrumentation prefix_tag = 'prefix:' + link.prefix if link else 'Http404' statsd.increment('workforus.clicks', tags=[prefix_tag]) statsd.set('workforus.unique_links', link.hash if link else 'Http404', tags=[prefix_tag]) statsd.set('workforus.unique_ips', get_client_ip(request), tags=['browser:' + get_browser(request)]) # 404 if link not found or register a click if the DB is not in readonly mode if link is None: raise Http404 elif mongoengine_is_primary(): link.click() # Tweak the redirection link based on the query string, redirection suffix, etc. # FIXME: Handle multiple parameters with the same name in the `url` query = request.GET.copy() if redirect_suffix is not None: query[REDIRECT_PARAM_NAME] = redirect_suffix if bool(query) and REF_PARAM_NAME not in query: # If we specify a non empty query, indicate that the shortener tweaked the url query[REF_PARAM_NAME] = REF_PARAM_DEFAULT_VALUE target_url = url_append_parameters( link.long_url, params_to_replace=query, defaults={REF_PARAM_NAME: REF_PARAM_DEFAULT_VALUE} ) # Either redirect the user, or load the target page and display it directly if link.act_as_proxy: return proxy(target_url) return redirect(target_url, permanent=True)
def expire_submissions_task(): flag = True log.debug("Starting check for expired subs.") #Sleep for some time to allow other processes to get ahead/behind time_sleep_value = random.uniform(0, 100) time.sleep(time_sleep_value) try: gc.collect() db.reset_queries() transaction.commit() #Comment out submission expiration for now. Not really needed while testing. expire_submissions.reset_timed_out_submissions() """ expired_list = expire_submissions.get_submissions_that_have_expired(subs) if len(expired_list) > 0: success = grader_util.finalize_expired_submissions(expired_list) statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs", tags=["success:{0}".format(success)]) """ try: expire_submissions.reset_ml_subs_to_in() transaction.commit() except Exception: log.exception("Could not reset ml to in!") try: expire_submissions.reset_skipped_subs() transaction.commit() except Exception: log.exception("Could not reset skipped submissions!") try: generate_student_metrics.regenerate_student_data() transaction.commit() except Exception: log.exception("Could not regenerate student data!") try: #Remove old ML grading models expire_submissions.remove_old_model_files() transaction.commit() except Exception: log.exception("Could not remove ml grading models!") log.debug("Finished looping through.") except Exception as err: log.exception("Could not get submissions to expire! Error: {0}".format(err)) statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs", tags=["success:Exception"]) util.log_connection_data() transaction.commit()
def user_scores(): statsd.increment('api_calls.user_scores') scrs = session.query(Score).\ filter(Score.person_id == g.user.id) scr_ary = [] for scr in scrs: scr_ary.append(scr.json_data()) msg = { 'success': True, 'msg': 'user\'s scores', 'scores': scr_ary} return jsonify(msg)
def main(request, path): ''' Search for a long link matching the `path` and redirect ''' if len(path) and path[-1] == '/': # Removing trailing slash so "/jobs/" and "/jobs" redirect identically path = path[:-1] link = Link.find_by_hash(path) if link is None: # Try to find a matching short link by removing valid "catchall" suffixes path_prefix, redirect_suffix = suffix_catchall.get_hash_from(path) if redirect_suffix is not None: # If we found a suffix, we try to find a link again with the prefix link = Link.find_by_hash(path_prefix) else: redirect_suffix = None # Instrumentation prefix_tag = 'prefix:' + link.prefix if link else 'Http404' statsd.increment('workforus.clicks', tags=[prefix_tag]) statsd.set('workforus.unique_links', link.hash if link else 'Http404', tags=[prefix_tag]) statsd.set('workforus.unique_ips', get_client_ip(request), tags=['browser:' + get_browser(request)]) # 404 if link not found or register a click if the DB is not in readonly mode if link is None: raise Http404 elif mongoengine_is_primary(): link.click() # Tweak the redirection link based on the query string, redirection suffix, etc. # FIXME: Handle multiple parameters with the same name in the `url` query = request.GET.copy() if redirect_suffix is not None: query[REDIRECT_PARAM_NAME] = redirect_suffix if bool(query) and REF_PARAM_NAME not in query: # If we specify a non empty query, indicate that the shortener tweaked the url query[REF_PARAM_NAME] = REF_PARAM_DEFAULT_VALUE target_url = url_append_parameters( link.long_url, params_to_replace=query, defaults={REF_PARAM_NAME: REF_PARAM_DEFAULT_VALUE} ) # Either redirect the user, or load the target page and display it directly return (proxy if link.act_as_proxy else redirect)(target_url)
def onChannelHangupComplete(self, ev): try: statsd.histogram('freeswitch.rtp.skipped_packet.in', ev.variable_rtp_audio_in_skip_packet_count) statsd.histogram('freeswitch.rtp.skipped_packet.out', ev.variable_rtp_audio_out_skip_packet_count) except: log.msg( "Unable to read variable_rtp_audio_in_skip_packet_count and / or variable_rtp_audio_out_skip_packet_count " ) statsd.increment('freeswitch.caller.context.' + ev.Caller_Context) statsd.increment('freeswitch.caller.source.' + ev.Caller_Source) self.g729_metrics()
def forgot_password(): statsd.increment('api_calls.forgot_password') if request.method == 'POST': logger.info(request.form) form = EmailForm(request.form) if not form.validate(): msg = { 'success': False, 'msg': form.errors, } return jsonify(msg) if secret_key != base64.b64decode(form.sig.data): msg = { 'success': False, 'msg': 'invalid signature'} return jsonify(msg) u = session.query(Person).\ filter(Person.email == form.email.data).\ first() if not u: msg = { 'success': False, 'msg': 'Email not registered!'} return jsonify(msg) m = Message( "Reset Password", recipients=[form.email.data]) content = ( 'Click <a href="http://' + request.headers['Host'] + '/forgot_password?' + 'key=' + urllib.quote(u.pw_hash) + '&email=' + urllib.quote(form.email.data) + '">HERE</a>' + ' to reset your password') m.html = content mail.send(m) msg = { 'success': True, 'msg': 'Mail Sent!'} return jsonify(msg) logger.info(request.args) key = urllib.unquote(request.args['key']) email = urllib.unquote(request.args['email']) u = session.query(Person).\ filter(Person.email == email).first() if u and key == u.pw_hash: response = redirect(url_for('reset_page')) response.set_cookie('email', value=email) response.set_cookie('key', value=urllib.quote(key)) return response return 'Invalid key!'
def showDistance(lat_long): start_point = lat_long end_point = "37.7841316511211,-122.39591339799" distance_url = ( "http://maps.googleapis.com/maps/api/distancematrix/json?origins=" + start_point + "&destinations=" + end_point + "&mode=walking&language=en-EN&sensor=false&units=imperial&key=" + API_KEY ) result = json.load(urllib.urlopen(distance_url)) statsd.increment("cart-api.requests", tags=["support", "page:distance_test"]) return jsonify(result)
def report(self, metric_type, value, **kwargs): if not statsd_installed: return if not self.stats_connected: statsd.connect(self.host, self.port) self.stats_connected = True key = "spike.test" tags = ["%s:%s" % (k, v) for k, v in kwargs.iteritems()] if "postfix" in kwargs: key = ".".join([key, kwargs["postfix"]]) del kwargs["postfix"] if metric_type == "counter": statsd.increment(key, value, tags=tags) elif metric_type == "gauge": statsd.gauge(key, value, tags=tags)
def post_one_submission_back_to_queue(submission, xqueue_session): (success, msg) = util.post_results_to_xqueue( xqueue_session, submission["xqueue_header"], submission["xqueue_body"], ) statsd.increment("open_ended_assessment.grading_controller.post_to_xqueue", tags=["success:{0}".format(success)]) if success: log.debug( "Successful post back to xqueue! Success: {0} Message: {1} Xqueue Header: {2} Xqueue body: {3}" .format(success, msg, submission["xqueue_header"], submission["xqueue_body"])) else: log.warning("Could not post back. Error: {0}".format(msg))
def reset_subs_in_basic_check(): #Reset submissions that are stuck in basic check state subs_stuck_in_basic_check=Submission.objects.filter( next_grader_type="BC", state__in=[SubmissionState.waiting_to_be_graded, SubmissionState.being_graded] ) count=0 for sub in subs_stuck_in_basic_check: handle_submission(sub) count+=1 if count>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_subs_in_basic_check", tags=["counter:{0}".format(count)]) log.debug("Reset {0} basic check subs properly.".format(count)) return True
def post_failure_to_lms(header): ''' Send notification to the LMS (and the student) that the submission has failed, and that the problem should be resubmitted ''' # This is the only part of the XQueue that assumes knowledge of # the external grader message format. # TODO: Make the notification message-format agnostic msg = '<div class="capa_alert">' msg += 'Your submission could not be graded. ' msg += 'Please recheck your submission and try again. ' msg += 'If the problem persists, please notify the course staff.' msg += '</div>' failure_msg = {'correct': None, 'score': 0, 'msg': msg} statsd.increment('xqueue.consumer.post_failure_to_lms') return post_grade_to_lms(header, json.dumps(failure_msg))
def process_item(self, content, queue=None): try: statsd.increment('xqueuewatcher.process-item') body = content['xqueue_body'] files = content['xqueue_files'] # Delivery from the lms body = json.loads(body) student_response = body['student_response'] payload = body['grader_payload'] # try: # grader_config = json.loads(payload) # except ValueError as err: # # If parsing json fails, erroring is fine--something is wrong in the content. # # However, for debugging, still want to see what the problem is # statsd.increment('xqueuewatcher.grader_payload_error') # self.log.debug("error parsing: '{0}' -- {1}".format(payload, err)) # raise # self.log.debug("Processing submission, grader payload: {0}".format(payload)) # relative_grader_path = grader_config['grader'] # grader_path = (self.grader_root / relative_grader_path).abspath() start = time.time() # results = self.grade(grader_path, grader_config, student_response) results = self.grade(payload, files, student_response) statsd.histogram('xqueuewatcher.grading-time', time.time() - start) # Make valid JSON message reply = {'correct': results['correct'], 'score': results['score'], 'msg': self.render_results(results)} statsd.increment('xqueuewatcher.replies (non-exception)') except Exception as e: self.log.exception("process_item") if queue: queue.put(e) else: raise else: if queue: queue.put(reply) return reply
def reset_failed_subs_in_basic_check(): #Reset submissions that are stuck in basic check state subs_failed_basic_check=Submission.objects.filter( grader__grader_type="BC", grader__status_code= GraderStatus.failure, state=SubmissionState.waiting_to_be_graded, ).exclude(grader__status_code=GraderStatus.success) count=0 for sub in subs_failed_basic_check: handle_submission(sub) count+=1 if count>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_subs_failed_basic_check", tags=["counter:{0}".format(count)]) log.debug("Reset {0} basic check failed subs properly.".format(count)) return True
def create_message(message_dict): """ Creates a message object. Input: Dictionary with keys specified below Output: Boolean true/false, message id or error message """ for tag in [ 'grader_id', 'originator', 'submission_id', 'message', 'recipient', 'message_type', 'score' ]: if not message_dict.has_key(tag): return False, "Needed tag '{0}' missing".format(tag) grade = Grader.objects.get(id=message_dict['grader_id']) submission = Submission.objects.get(id=message_dict['submission_id']) msg = Message(grader=grade, message=message_dict['message'], originator=message_dict['originator'], recipient=message_dict['recipient'], message_type=message_dict['message_type'], score=message_dict['score']) try: msg.save() except: error = "Could not save the message" log.exception(error) return False, error statsd.increment( "open_ended_assessment.grading_controller.create_message", tags=[ "course:{0}".format(submission.course_id), "location:{0}".format(submission.location), "grader_type:{0}".format(submission.previous_grader_type), "grade:{0}".format(grade.score), "message_type:{0}".format(message_dict['message_type']), "message_score:{0}".format(message_dict['score']) ]) return True, msg.id
def post_one_submission_back_to_queue(submission, xqueue_session): xqueue_header, xqueue_body = util.create_xqueue_header_and_body(submission) (success, msg) = util.post_results_to_xqueue( xqueue_session, json.dumps(xqueue_header), json.dumps(xqueue_body), ) statsd.increment("open_ended_assessment.grading_controller.post_to_xqueue", tags=["success:{0}".format(success)]) if success: log.debug( "Successful post back to xqueue! Success: {0} Message: {1} Xqueue Header: {2} Xqueue body: {3}" .format(success, msg, xqueue_header, xqueue_body)) submission.posted_results_back_to_queue = True submission.save() else: log.warning("Could not post back. Error: {0}".format(msg))
def reset_in_subs_to_ml(): count=0 in_subs=Submission.objects.filter( state=SubmissionState.waiting_to_be_graded, next_grader_type="IN", preferred_grader_type="ML" ) for sub in in_subs: #If an instructor checks out a submission after ML grading has started, # this resets it to ML if the instructor times out success= ml_grading_util.check_for_all_model_and_rubric_success(sub.location) if (sub.next_grader_type=="IN" and success): sub.next_grader_type="ML" sub.save() count+=1 if count>0: statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_in_subs_to_ml", tags=["counter:{0}".format(count)]) log.debug("Reset {0} instructor subs to ML".format(count)) return True
def pull_from_single_grading_queue(queue_name, controller_session, xqueue_session, post_url, status_url): try: #Get and parse queue objects success, queue_length = get_queue_length(queue_name, xqueue_session) #Check to see if the grading_controller server is up so that we can post to it (is_alive, status_string) = util._http_get( controller_session, urlparse.urljoin(settings.GRADING_CONTROLLER_INTERFACE['url'], status_url)) #Only post while we were able to get a queue length from the xqueue, there are items in the queue, and the grading controller is up for us to post to. while success and queue_length > 0 and is_alive: #Sleep for some time to allow other pull_from_xqueue processes to get behind/ahead time_sleep_value = random.uniform(0, .1) time.sleep(time_sleep_value) success, queue_item = get_from_queue(queue_name, xqueue_session) success, content = util.parse_xobject(queue_item, queue_name) #Post to grading controller here! if success: #Post to controller log.debug("Trying to post.") post_data = util._http_post( controller_session, urlparse.urljoin( settings.GRADING_CONTROLLER_INTERFACE['url'], post_url), content, settings.REQUESTS_TIMEOUT, ) log.debug(post_data) statsd.increment( "open_ended_assessment.grading_controller.pull_from_xqueue", tags=["success:True", "queue_name:{0}".format(queue_name)]) else: log.info("Error getting queue item or no queue items to get.") statsd.increment( "open_ended_assessment.grading_controller.pull_from_xqueue", tags=[ "success:False", "queue_name:{0}".format(queue_name) ]) success, queue_length = get_queue_length(queue_name, xqueue_session) except Exception as err: log.debug("Error getting submission: {0}".format(err)) statsd.increment( "open_ended_assessment.grading_controller.pull_from_xqueue", tags=["success:Exception", "queue_name:{0}".format(queue_name)])
def do_POST(data): statsd.increment('xserver.post-requests') # This server expects jobs to be pushed to it from the queue xpackage = json.loads(data) body = xpackage['xqueue_body'] files = xpackage['xqueue_files'] # Delivery from the lms files = files and json.loads(files) or {} body = json.loads(body) student_response = body['student_response'] payload = body['grader_payload'] try: grader_config = json.loads(payload) except ValueError as err: # If parsing json fails, erroring is fine--something is wrong in the content. # However, for debugging, still want to see what the problem is statsd.increment('xserver.grader_payload_error') log.debug("error parsing: '{0}' -- {1}".format(payload, err)) raise log.debug("Processing submission, grader payload: {0}".format(payload)) relative_grader_path = grader_config['grader'] grader_path = os.path.join(settings.GRADER_ROOT, relative_grader_path) start = time() results = grade.grade(grader_path, grader_config, student_response, sandbox, files) statsd.histogram('xserver.grading-time', time() - start) # Make valid JSON message reply = { 'correct': results['correct'], 'score': results['score'], 'msg': render_results(results) } statsd.increment('xserver.post-replies (non-exception)') return json.dumps(reply)
def submit_message(request): """ Submits a message to the grading controller. """ if request.method != 'POST': return util._error_response("'submit_message' must use HTTP POST", _INTERFACE_VERSION) reply_is_valid, header, body = _is_valid_reply_message(request.POST.copy()) if not reply_is_valid: log.error("Invalid xqueue object added: request_ip: {0} request.POST: {1}".format( util.get_request_ip(request), request.POST, )) statsd.increment("open_ended_assessment.grading_controller.controller.xqueue_interface.submit_message", tags=["success:Exception"]) return util._error_response('Incorrect format', _INTERFACE_VERSION) message = body['feedback'] message = util.sanitize_html(message) grader_id = body['grader_id'] submission_id = body['submission_id'] originator = body['student_info']['anonymous_student_id'] try: if 'score' in body: score = int(body['score']) else: score = None except Exception: error_message = "Score was not an integer, received \"{0}\" instead.".format(score) log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) try: grade = Grader.objects.get(id=grader_id) except Exception: error_message = "Could not find a grader object for message from xqueue" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) try: submission = Submission.objects.get(id=submission_id) except Exception: error_message = "Could not find a submission object for message from xqueue" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) if grade.submission.id != submission.id: error_message = "Grader id does not match submission id that was passed in" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) if originator not in [submission.student_id, grade.grader_id]: error_message = "Message originator is not the grader, or the person being graded" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) if grade.grader_type in ["ML", "IN"]: recipient_type = "controller" recipient = "controller" else: recipient_type = "human" if recipient_type != 'controller': if originator == submission.student_id: recipient = grade.grader_id elif originator == grade.grader_id: recipient = submission.student_id if recipient not in [submission.student_id, grade.grader_id, 'controller']: error_message = "Message recipient is not the grader, the person being graded, or the controller" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) if originator == recipient: error_message = "Message recipient is the same as originator" log.exception(error_message) return util._error_response(error_message, _INTERFACE_VERSION) message_dict = { 'grader_id': grader_id, 'originator': originator, 'submission_id': submission_id, 'message': message, 'recipient': recipient, 'message_type': "feedback", 'score': score } success, error = message_util.create_message(message_dict) if not success: return util._error_response(error, _INTERFACE_VERSION) return util._success_response({'message_id': error}, _INTERFACE_VERSION)
def login_user(request, error=""): ''' AJAX request to log in the user. ''' if 'email' not in request.POST or 'password' not in request.POST: return HttpResponse( json.dumps({ 'success': False, 'value': 'There was an error receiving your login information. Please email us.' })) # TODO: User error message email = request.POST['email'] password = request.POST['password'] try: user = User.objects.get(email=email) except User.DoesNotExist: log.warning(u"Login failed - Unknown user email: {0}".format(email)) return HttpResponse( json.dumps({ 'success': False, 'value': 'Email or password is incorrect.' })) # TODO: User error message username = user.username user = authenticate(username=username, password=password) if user is None: log.warning( u"Login failed - password for {0} is invalid".format(email)) return HttpResponse( json.dumps({ 'success': False, 'value': 'Email or password is incorrect.' })) if user is not None and user.is_active: try: login(request, user) if request.POST.get('remember') == 'true': request.session.set_expiry(604800) log.debug("Setting user session to never expire") else: request.session.set_expiry(0) except Exception as e: log.critical( "Login failed - Could not create session. Is memcached running?" ) log.exception(e) log.info(u"Login success - {0} ({1})".format(username, email)) try_change_enrollment(request) statsd.increment("common.student.successful_login") response = HttpResponse(json.dumps({'success': True})) # set the login cookie for the edx marketing site # we want this cookie to be accessed via javascript # so httponly is set to None if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) response.set_cookie(settings.EDXMKTG_COOKIE_NAME, 'true', max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, path='/', secure=None, httponly=None) return response log.warning( u"Login failed - Account not active for user {0}, resending activation" .format(username)) reactivation_email_for_user(user) not_activated_msg = "This account has not been activated. We have " + \ "sent another activation message. Please check your " + \ "e-mail for the activation instructions." return HttpResponse( json.dumps({ 'success': False, 'value': not_activated_msg }))
def create_account(request, post_override=None): ''' JSON call to create new edX account. Used by form in signup_modal.html, which is included into navigation.html ''' js = {'success': False} post_vars = post_override if post_override else request.POST # if doing signup for an external authorization, then get email, password, name from the eamap # don't use the ones from the form, since the user could have hacked those DoExternalAuth = 'ExternalAuthMap' in request.session if DoExternalAuth: eamap = request.session['ExternalAuthMap'] email = eamap.external_email name = eamap.external_name password = eamap.internal_password post_vars = dict(post_vars.items()) post_vars.update(dict(email=email, name=name, password=password)) log.debug('extauth test: post_vars = %s' % post_vars) # Confirm we have a properly formed request for a in ['username', 'email', 'password', 'name']: if a not in post_vars: js['value'] = "Error (401 {field}). E-mail us.".format(field=a) js['field'] = a return HttpResponse(json.dumps(js)) if post_vars.get('honor_code', 'false') != u'true': js['value'] = "To enroll, you must follow the honor code.".format( field=a) js['field'] = 'honor_code' return HttpResponse(json.dumps(js)) if post_vars.get('terms_of_service', 'false') != u'true': js['value'] = "You must accept the terms of service.".format(field=a) js['field'] = 'terms_of_service' return HttpResponse(json.dumps(js)) # Confirm appropriate fields are there. # TODO: Check e-mail format is correct. # TODO: Confirm e-mail is not from a generic domain (mailinator, etc.)? Not sure if # this is a good idea # TODO: Check password is sane for a in [ 'username', 'email', 'name', 'password', 'terms_of_service', 'honor_code' ]: if len(post_vars[a]) < 2: error_str = { 'username': '******', 'email': 'A properly formatted e-mail is required.', 'name': 'Your legal name must be a minimum of two characters long.', 'password': '******', 'terms_of_service': 'Accepting Terms of Service is required.', 'honor_code': 'Agreeing to the Honor Code is required.' } js['value'] = error_str[a] js['field'] = a return HttpResponse(json.dumps(js)) try: validate_email(post_vars['email']) except ValidationError: js['value'] = "Valid e-mail is required.".format(field=a) js['field'] = 'email' return HttpResponse(json.dumps(js)) try: validate_slug(post_vars['username']) except ValidationError: js['value'] = "Username should only consist of A-Z and 0-9, with no spaces.".format( field=a) js['field'] = 'username' return HttpResponse(json.dumps(js)) # Ok, looks like everything is legit. Create the account. ret = _do_create_account(post_vars) if isinstance(ret, HttpResponse): # if there was an error then return that return ret (user, profile, registration) = ret d = { 'name': post_vars['name'], 'key': registration.activation_key, } # composes activation email subject = render_to_string('emails/activation_email_subject.txt', d) # Email subject *must not* contain newlines subject = ''.join(subject.splitlines()) message = render_to_string('emails/activation_email.txt', d) try: if settings.MITX_FEATURES.get('REROUTE_ACTIVATION_EMAIL'): dest_addr = settings.MITX_FEATURES['REROUTE_ACTIVATION_EMAIL'] message = ("Activation for %s (%s): %s\n" % (user, user.email, profile.name) + '-' * 80 + '\n\n' + message) send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [dest_addr], fail_silently=False) elif not settings.GENERATE_RANDOM_USER_CREDENTIALS: res = user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL) except: log.warning('Unable to send activation email to user', exc_info=True) js['value'] = 'Could not send activation e-mail.' return HttpResponse(json.dumps(js)) # Immediately after a user creates an account, we log them in. They are only # logged in until they close the browser. They can't log in again until they click # the activation link from the email. login_user = authenticate(username=post_vars['username'], password=post_vars['password']) login(request, login_user) request.session.set_expiry(0) try_change_enrollment(request) if DoExternalAuth: eamap.user = login_user eamap.dtsignup = datetime.datetime.now() eamap.save() log.debug('Updated ExternalAuthMap for %s to be %s' % (post_vars['username'], eamap)) if settings.MITX_FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'): log.debug('bypassing activation email') login_user.is_active = True login_user.save() statsd.increment("common.student.account_created") js = {'success': True} HttpResponse(json.dumps(js), mimetype="application/json") response = HttpResponse(json.dumps({'success': True})) # set the login cookie for the edx marketing site # we want this cookie to be accessed via javascript # so httponly is set to None if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) response.set_cookie(settings.EDXMKTG_COOKIE_NAME, 'true', max_age=max_age, expires=expires, domain=settings.SESSION_COOKIE_DOMAIN, path='/', secure=None, httponly=None) return response
Output: Returns status code indicating success (0) or failure (1) and message ''' if request.method != 'POST': return util._error_response("'submit' must use HTTP POST", _INTERFACE_VERSION) else: #Minimal parsing of reply reply_is_valid, header, body = _is_valid_reply(request.POST.copy()) if not reply_is_valid: log.error("Invalid xqueue object added: request_ip: {0} request.POST: {1}".format( util.get_request_ip(request), request.POST, )) statsd.increment("open_ended_assessment.grading_controller.controller.xqueue_interface.submit", tags=["success:Exception"]) return util._error_response('Incorrect format', _INTERFACE_VERSION) else: try: #Retrieve individual values from xqueue body and header. prompt = util._value_or_default(body['grader_payload']['prompt'], "") rubric = util._value_or_default(body['grader_payload']['rubric'], "") student_id = util._value_or_default(body['student_info']['anonymous_student_id']) location = util._value_or_default(body['grader_payload']['location']) course_id = util._value_or_default(body['grader_payload']['course_id']) problem_id = util._value_or_default(body['grader_payload']['problem_id'], location) grader_settings = util._value_or_default(body['grader_payload']['grader_settings'], "") student_response = util._value_or_default(body['student_response']) student_response = util.sanitize_html(student_response) xqueue_submission_id = util._value_or_default(header['submission_id']) xqueue_submission_key = util._value_or_default(header['submission_key'])
def change_enrollment(request): """ Modify the enrollment status for the logged-in user. The request parameter must be a POST request (other methods return 405) that specifies course_id and enrollment_action parameters. If course_id or enrollment_action is not specified, if course_id is not valid, if enrollment_action is something other than "enroll" or "unenroll", if enrollment_action is "enroll" and enrollment is closed for the course, or if enrollment_action is "unenroll" and the user is not enrolled in the course, a 400 error will be returned. If the user is not logged in, 403 will be returned; it is important that only this case return 403 so the front end can redirect the user to a registration or login page when this happens. This function should only be called from an AJAX request or as a post-login/registration helper, so the error messages in the responses should never actually be user-visible. """ if request.method != "POST": return HttpResponseNotAllowed(["POST"]) user = request.user if not user.is_authenticated(): return HttpResponseForbidden() action = request.POST.get("enrollment_action") course_id = request.POST.get("course_id") if course_id is None: return HttpResponseBadRequest("Course id not specified") if action == "enroll": # Make sure the course exists # We don't do this check on unenroll, or a bad course id can't be unenrolled from try: course = course_from_id(course_id) except ItemNotFoundError: log.warning( "User {0} tried to enroll in non-existent course {1}".format( user.username, course_id)) return HttpResponseBadRequest("Course id is invalid") if not has_access(user, course, 'enroll'): return HttpResponseBadRequest("Enrollment is closed") org, course_num, run = course_id.split("/") statsd.increment("common.student.enrollment", tags=[ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run) ]) try: enrollment, created = CourseEnrollment.objects.get_or_create( user=user, course_id=course.id) except IntegrityError: # If we've already created this enrollment in a separate transaction, # then just continue pass return HttpResponse() elif action == "unenroll": try: enrollment = CourseEnrollment.objects.get(user=user, course_id=course_id) enrollment.delete() org, course_num, run = course_id.split("/") statsd.increment("common.student.unenrollment", tags=[ "org:{0}".format(org), "course:{0}".format(course_num), "run:{0}".format(run) ]) return HttpResponse() except CourseEnrollment.DoesNotExist: return HttpResponseBadRequest( "You are not enrolled in this course") else: return HttpResponseBadRequest("Enrollment action is invalid")