示例#1
0
文件: consumer.py 项目: mitocw/xqueue
    def _process(self, submission_id, on_done):
        log.info("Processing submission from queue_name: {0}, submission_id: {1}".format(self.queue_name, submission_id))
        try:
            submission = self._get_submission(submission_id)

            if submission is None:
                statsd.increment('xqueue.consumer.consumer_callback.submission_does_not_exist',
                                 tags=['queue:{0}'.format(self.queue_name)])
                log.error("Queued pointer refers to nonexistent entry in Submission DB: queue_name: {0}, submission_id: {1}".format(
                    self.queue_name,
                    submission_id
                ))

            # if item has been retired, skip grading
            if submission and not submission.retired:
                self._deliver_submission(submission)

            # close transaction
            transaction.commit()
        except Exception as e:
            # We need a wide catch here to correctly rollback the
            # transaction and acknowledge the message if something
            # goes wrong
            statsd.increment('xqueue.consumer.consumer_callback.unknown_error',
                             tags=['queue:{0}'.format(self.queue_name)])
            log.error("Error processing submission_id: {0} on queue_name: {1}, {2}" .format(
                submission_id,
                self.queue_name,
                e,
            ))
            transaction.rollback()
        finally:
            # acknowledge that the message was processed
            on_done()
示例#2
0
文件: consumer.py 项目: mitocw/xqueue
def post_grade_to_lms(header, body):
    '''
    Send grading results back to LMS
        header:  JSON-serialized xqueue_header (string)
        body:    grader reply (string)

    Returns:
        success: Flag indicating successful exchange (Boolean)
    '''
    header_dict = json.loads(header)
    lms_callback_url = header_dict['lms_callback_url']

    payload = {'xqueue_header': header, 'xqueue_body': body}

    # Quick kludge retries to fix prod problem with 6.00x push graders. We're
    # seeing abrupt disconnects when servers are taken out of the ELB, causing
    # in flight lms_ack requests to fail. This just tries five times before
    # giving up.
    attempts = 0
    success = False
    while (not success) and attempts < 5:
        (success, lms_reply) = _http_post(lms_callback_url,
                                          payload,
                                          settings.REQUESTS_TIMEOUT)
        attempts += 1

    if success:
        statsd.increment('xqueue.consumer.post_grade_to_lms.success')
    else:
        log.error("Unable to return to LMS: lms_callback_url: {0}, payload: {1}, lms_reply: {2}".format(lms_callback_url, payload, lms_reply))
        statsd.increment('xqueue.consumer.post_grade_to_lms.failure')

    return success
示例#3
0
文件: consumer.py 项目: mitocw/xqueue
def get_single_qitem(queue_name):
    '''
    Retrieve a single queued item, if one exists, from the named queue

    Returns (success, qitem):
        success: Flag whether retrieval is successful (Boolean)
                 If no items in the queue, then return False
        qitem:   Retrieved item
    '''
    queue_name = str(queue_name)

    # Pull a single submission (if one exists) from the named queue
    credentials = pika.PlainCredentials(settings.RABBITMQ_USER,
                                        settings.RABBITMQ_PASS)

    connection = pika.BlockingConnection(pika.ConnectionParameters(
        heartbeat_interval=5,
        credentials=credentials, host=settings.RABBIT_HOST,
        virtual_host=settings.RABBIT_VHOST))
    channel = connection.channel()
    channel.queue_declare(queue=queue_name, durable=True)

    # qitem is the item from the queue
    method, header, qitem = channel.basic_get(queue=queue_name)

    if method is None or method.NAME == 'Basic.GetEmpty':  # Got nothing
        connection.close()
        return (False, '')
    else:
        channel.basic_ack(method.delivery_tag)
        connection.close()
        statsd.increment('xqueue.consumer.get_single_qitem',
                         tags=['queue:{0}'.format(queue_name)])
        return (True, qitem)
示例#4
0
    def purchased_callback(self):
        """
        When purchased, this should enroll the user in the course.  We are assuming that
        course settings for enrollment date are configured such that only if the (user.email, course_id) pair is found
        in CourseEnrollmentAllowed will the user be allowed to enroll.  Otherwise requiring payment
        would in fact be quite silly since there's a clear back door.
        """
        try:
            course_loc = CourseDescriptor.id_to_location(self.course_id)
            course_exists = modulestore().has_item(self.course_id, course_loc)
        except ValueError:
            raise PurchasedCallbackException(
                "The customer purchased Course {0}, but that course doesn't exist!".format(self.course_id))

        if not course_exists:
            raise PurchasedCallbackException(
                "The customer purchased Course {0}, but that course doesn't exist!".format(self.course_id))

        CourseEnrollment.enroll(user=self.user, course_id=self.course_id, mode=self.mode)

        log.info("Enrolled {0} in paid course {1}, paid ${2}".format(self.user.email, self.course_id, self.line_cost))
        org, course_num, run = self.course_id.split("/")
        statsd.increment("shoppingcart.PaidCourseRegistration.purchased_callback.enrollment",
                         tags=["org:{0}".format(org),
                               "course:{0}".format(course_num),
                               "run:{0}".format(run)])
def send_nearby_carts(lat_long, category="Anything"):
    if category not in TAGS_BY_TRUCK:
        return jsonify(data="Valid categories: %s" % str(TAGS_BY_TRUCK.keys()))
    lat_long = lat_long.split(",")
    latitude = float(lat_long[0])
    longitude = float(lat_long[1])
    unsorted_result = []
    unsorted_result_lat_long = []
    result_feet = []
    result_miles = []
    for index in find_nearby_carts(longitude, latitude, IDX):
        if index in TAGS_BY_TRUCK[category]:
            unsorted_result.append(CARTS[index])
            unsorted_result_lat_long.append(CARTS[index]["latitude"] + "," + CARTS[index]["longitude"])
    distances, addresses = get_distances_and_addresses(lat_long[0] + "," + lat_long[1], unsorted_result_lat_long)
    if len(distances) == len(unsorted_result):
        for i in range(len(unsorted_result)):
            unsorted_result[i]["distance"] = distances[i]
            unsorted_result[i]["address"] = addresses[i]
            if "ft" in unsorted_result[i]["distance"]:
                result_feet.append(unsorted_result[i])
            else:
                result_miles.append(unsorted_result[i])
        result_feet.sort(key=operator.itemgetter("distance"))
        result_miles.sort(key=operator.itemgetter("distance"))
        result = result_feet + result_miles
    else:
        result = unsorted_result
    statsd.increment("cart_api.requests", tags=["support", "page:nearby_carts"])
    return jsonify(data=result)
def get_service(service_id):
    """ Get a single service's data
    """

    result = g.firebase.get('/services', service_id)
    statsd.increment('firebase.services.get')
    return jsonify(dict(service=result))
def get_services():
    """ Get the collection of all services
    """
    
    result = g.firebase.get('/services', None)
    statsd.increment('firebase.services.get')
    return jsonify(dict(services=result))
示例#8
0
    def publish(event):
        if event.get('event_name') != 'grade':
            return

        student_module, created = StudentModule.objects.get_or_create(
            course_id=course_id,
            student=user,
            module_type=descriptor.location.category,
            module_state_key=descriptor.location.url(),
            defaults={'state': '{}'},
        )
        student_module.grade = event.get('value')
        student_module.max_grade = event.get('max_value')
        student_module.save()

        # Bin score into range and increment stats
        score_bucket = get_score_bucket(student_module.grade, student_module.max_grade)
        org, course_num, run = course_id.split("/")

        tags = ["org:{0}".format(org),
                "course:{0}".format(course_num),
                "run:{0}".format(run),
                "score_bucket:{0}".format(score_bucket)]

        if grade_bucket_type is not None:
            tags.append('type:%s' % grade_bucket_type)

        statsd.increment("lms.courseware.question_answered", tags=tags)
示例#9
0
文件: api.py 项目: dhilipsiva/moback
def register():
    statsd.increment('api_calls.register')
    form = SignupForm(request.form)
    logger.info(request.form)
    if not form.validate():
        msg = {
            'success': False,
            'msg': form.errors}
        return jsonify(msg)
    user = session.query(Person).\
        filter(Person.email == form.email.data
               ).first()
    if user:
        msg = {
            'success': False,
            'msg': user.email + ' is already registered!',
            'parameter': 'email', }
        return jsonify(msg)
    u = Person(form)
    session.add(u)
    session.commit()
    try:
        pm = PostMonkey(apikey=MC_APIKEY, timeout=10)
        pm.listSubscribe(
            id=MC_LISTID, email_address=form.email.data)
    except MailChimpException, e:
        app.logger.error(str(e))
示例#10
0
文件: api.py 项目: dhilipsiva/moback
def score():
    statsd.increment('api_calls.score.%s' % request.method)
    if request.method == 'GET':
        row = session.query(Score).filter(
            Score.person_id == g.user._id)
        res = []
        for pt in row:
            res.append(pt.json_data())
        return jsonify({
            'msg': res,
            'success': False})

    '''post function'''
    logger.info(request.form)
    form = ScoreForm(request.form)
    if not form.validate():
        msg = {
            'success': False,
            'msg': form.errors}
        return jsonify(msg)
    session.add(Score(form, g.user._id))
    session.commit()
    msg = {
        'success': True,
        'msg': 'Added',
        'id': form.id.data, }
    return jsonify(msg)
示例#11
0
def finalize_expired_submission(sub):
    """
    Expire submissions by posting back to LMS with error message.
    Input:
        timed_out_list from check_if_expired method
    Output:
        Success code.
    """

    grader_dict = {
        'score': 0,
        'feedback': error_template.format(errors="Error scoring submission."),
        'status': GraderStatus.failure,
        'grader_id': "0",
        'grader_type': sub.next_grader_type,
        'confidence': 1,
        'submission_id' : sub.id,
        }

    sub.state = SubmissionState.finished
    sub.save()

    grade = create_grader(grader_dict,sub)

    statsd.increment("open_ended_assessment.grading_controller.expire_submissions.finalize_expired_submission",
                     tags=[
                         "course:{0}".format(sub.course_id),
                         "location:{0}".format(sub.location),
                         'grader_type:{0}'.format(sub.next_grader_type)
                     ])

    return True
示例#12
0
def reset_skipped_subs():
    """
    Reset submissions marked skipped to return them to the queue.
    """
    
    # Mihara: There's no reason not to do that which I can see.    
    counter = Submission.objects.filter(
        state=SubmissionState.skipped,
        posted_results_back_to_queue=False
        ).update(state=SubmissionState.waiting_to_be_graded)
    
    # Mihara: Seriously, why did they write it like that?
    #counter=0
    #unique_locations=[x['location'] for x in Submission.objects.all().values('location').distinct()]
    #for location in unique_locations:
    #    subs_pending_total= Submission.objects.filter(
    #        location=location,
    #        state=SubmissionState.skipped
    #    ).order_by('-date_created')
    #    for sub in subs_pending_total:
    #        sub.state=SubmissionState.waiting_to_be_graded
    #        counter+=1
    if counter>0:
        statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_skipped_subs",
            tags=["counter:{0}".format(counter)])
        log.debug("Reset {0} submission from skipped state".format(counter))
def post_services(args):
    """
    Sample Request data:

    {
      "name": NAME,
      "data": {
        "terms": [
          {
            "policy_name" : POLICY_NAME,
            "policy_desc": POLICY_DESC,
            "policy_values": POLICY_OPTIONS
          },
        ],
        "full_terms_url": URL
      }
    }
    """
    
    data = request.get_json()
    
    try:
        result = g.firebase.put('/services',
                                args.get('name', None),
                                args.get('data', {}),
                                params={'print': 'silent'},
                                headers={'X_FANCY_HEADER': 'VERY FANCY'})
        statsd.increment('firebase.services.put')
    except:
        pass
    
    return jsonify({'success': True})
示例#14
0
def reset_ml_subs_to_in():
    """
    Reset submissions marked ML to instructor if there are not enough instructor submissions to grade
    This happens if the instructor skips too many submissions
    """
    counter=0
    unique_locations=[x['location'] for x in list(Submission.objects.values('location').distinct())]
    for location in unique_locations:
        subs_graded, subs_pending = staff_grading_util.count_submissions_graded_and_pending_instructor(location)
        subs_pending_total= Submission.objects.filter(
            location=location,
            state=SubmissionState.waiting_to_be_graded,
            preferred_grader_type="ML"
        ).order_by('-date_created')[:settings.MIN_TO_USE_ML]
        if ((subs_graded+subs_pending) < settings.MIN_TO_USE_ML and subs_pending_total.count() > subs_pending):
            for sub in subs_pending_total:
                if sub.next_grader_type=="ML" and sub.get_unsuccessful_graders().count()==0:
                    staff_grading_util.set_ml_grading_item_back_to_instructor(sub)
                    counter+=1
                if (counter+subs_graded + subs_pending)> settings.MIN_TO_USE_ML:
                    break
    if counter>0:
        statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_ml_subs_to_in",
            tags=["counter:{0}".format(counter)])
        log.debug("Reset {0} submission from ML to IN".format(counter))
    def publish(event):
        """A function that allows XModules to publish events. This only supports grade changes right now."""
        if event.get("event_name") != "grade":
            return

        # Construct the key for the module
        key = KeyValueStore.Key(
            scope=Scope.user_state, student_id=user.id, block_scope_id=descriptor.location, field_name="grade"
        )

        student_module = field_data_cache.find_or_create(key)
        # Update the grades
        student_module.grade = event.get("value")
        student_module.max_grade = event.get("max_value")
        # Save all changes to the underlying KeyValueStore
        student_module.save()

        # Bin score into range and increment stats
        score_bucket = get_score_bucket(student_module.grade, student_module.max_grade)
        org, course_num, run = course_id.split("/")

        tags = [
            "org:{0}".format(org),
            "course:{0}".format(course_num),
            "run:{0}".format(run),
            "score_bucket:{0}".format(score_bucket),
        ]

        if grade_bucket_type is not None:
            tags.append("type:%s" % grade_bucket_type)

        statsd.increment("lms.courseware.question_answered", tags=tags)
示例#16
0
def check_if_grading_finished_for_duplicates():
    duplicate_submissions = Submission.objects.filter(
        preferred_grader_type = "PE",
        is_duplicate= True,
        posted_results_back_to_queue=False,
    )
    log.info(duplicate_submissions)
    counter=0
    for sub in duplicate_submissions:
        if sub.duplicate_submission_id is not None:
            try:
                original_sub=Submission.objects.get(id=sub.duplicate_submission_id)
                if original_sub.state == SubmissionState.finished:
                    finalize_grade_for_duplicate_peer_grader_submissions(sub, original_sub)
                    counter+=1
                    log.debug("Finalized one duplicate submission: Original: {0} Duplicate: {1}".format(original_sub,sub))
            except:
                log.error("Could not finalize grade for submission with id {0}".format(sub.duplicate_submission_id))

    statsd.increment("open_ended_assessment.grading_controller.expire_submissions.check_if_duplicate_grading_finished",
        tags=[
            "counter:{0}".format(counter),
        ])
    log.info("Finalized {0} duplicate submissions".format(counter))
    return True
示例#17
0
def reset_timed_out_submissions(subs):
    """
    Check if submissions have timed out, and reset them to waiting to grade state if they have
    Input:
        subs - A QuerySet of submissions
    Output:
        status code indicating success
    """
    now = timezone.now()
    min_time = datetime.timedelta(seconds=settings.RESET_SUBMISSIONS_AFTER)
    timed_out_subs=subs.filter(date_modified__lt=now-min_time)
    timed_out_sub_count=timed_out_subs.count()
    count = 0

    for i in xrange(0, timed_out_sub_count):
        sub = subs[i]
        if sub.state == SubmissionState.being_graded:
            sub.state = SubmissionState.waiting_to_be_graded
            sub.save()
            count += 1

    if count>0:
        statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_timed_out_submissions",
            tags=["counter:{0}".format(count)])
        log.debug("Reset {0} submissions that had timed out in their current grader.".format(count))

    return True
示例#18
0
def reset_timed_out_submissions():
    """
    Check if submissions have timed out, and reset them to waiting to grade state if they have
    Input:
        subs - A QuerySet of submissions
    Output:
        status code indicating success
    """
    now = timezone.now()
    min_time = datetime.timedelta(seconds=settings.RESET_SUBMISSIONS_AFTER)

    # have to split into 2 queries now, because we are giving some finished submissions to peer graders when
    # there's nothing to grade

    reset_waiting_count = (Submission.objects
                           .filter(date_modified__lt=now-min_time,
                                   state=SubmissionState.being_graded,
                                   posted_results_back_to_queue=False)
                           .update(state=SubmissionState.waiting_to_be_graded))

    reset_finished_count = (Submission.objects
                            .filter(date_modified__lt=now-min_time,
                                    state=SubmissionState.being_graded,
                                    posted_results_back_to_queue=True)
                            .update(state=SubmissionState.finished))

    reset_count = reset_waiting_count + reset_finished_count
    if reset_count>0:
        statsd.increment("open_ended_assessment.grading_controller.expire_submissions.reset_timed_out_submissions",
            tags=["counter:{0}".format(reset_count)])
        log.debug("Reset {0} submissions that had timed out in their current grader.  {1}->W {2}->F"
                  .format(reset_count, reset_waiting_count, reset_finished_count))

    return True
		    def onChannelHangup(self, ev):
		        statsd.increment('freeswitch.channels.finished')
		        if (ev.Hangup_Cause in config.freeSwitch.normalHangupCauses):
		            statsd.increment('freeswitch.channels.finished.normally')
		            statsd.increment('freeswitch.channels.finished.normally.'+ev.Hangup_Cause.lower())
		        else:
		            statsd.increment('freeswitch.channels.finished.abnormally')
		            statsd.increment('freeswitch.channels.finished.abnormally.'+ev.Hangup_Cause.lower())
def showIndex():
    options = {
        "/location/<lat_long>": 'Pass comma separated latitude,longitude value to get info for nearby carts.Values returned are in the format:{"data": [list of carts containing address, applicant, distance, facilitytype, fooditems, latitude, and longitude]}',
        "/location/<lat_long>/<category>": "Returns all carts near comma separated latitude longitude matching a particular category. List of available categories can be found using the /categories option. Results formatted the same as /location/<lat_long>",
        "/categories": 'Returns categories in format "data"=[list of categories]',
    }
    statsd.increment("cart-api.requests", tags=["support", "page:options"])
    return jsonify(options)
		    def onChannelHangupComplete(self, ev):
		        try:
		            statsd.histogram('freeswitch.rtp.skipped_packet.in', ev.variable_rtp_audio_in_skip_packet_count)
		            statsd.histogram('freeswitch.rtp.skipped_packet.out', ev.variable_rtp_audio_out_skip_packet_count)
		        except:
		            log.msg("Unable to read variable_rtp_audio_in_skip_packet_count and / or variable_rtp_audio_out_skip_packet_count ")
		        statsd.increment('freeswitch.caller.context.'+ev.Caller_Context)
		        statsd.increment('freeswitch.caller.source.'+ev.Caller_Source)
		        self.g729_metrics()
示例#22
0
文件: api.py 项目: dhilipsiva/moback
def leaderboard():
    statsd.increment('api_calls.leaderboard')
    msg = {
        'success': True,
        'msg': 'leaderboards',
        'all_time': _alltime_leaderboard(),
        'month': _get_tops(30),
        'week': _get_tops(7),
        'day': _get_tops(1), }
    return jsonify(msg)
示例#23
0
文件: api.py 项目: dhilipsiva/moback
def inapp_products():
    statsd.increment('api_calls.inapp_products')
    prods = session.query(InAppProducts)
    prod_array = []
    for prod in prods:
        prod_array.append(prod.json_data())
    msg = {
        'success': True,
        'msg': prod_array, }
    return jsonify(msg)
示例#24
0
def main_page():
    statsd.increment('web.page_count', tags=['page:main'])
    #time diff for histograms
    start_time = time.time()
    #putting randomly to sleep to generate delays.
    time.sleep(random.randint(1, 10))
    duration = time.time() - start_time
    #paging data for histogram
    statsd.histogram('page.load.hist_timer', duration, tags=['type:support', 'page:main'])
    return "App Main Page"
示例#25
0
def main(request, path):
    """Search for a long link matching the `path` and redirect"""

    path = _extract_valid_path(path)

    link = Link.find_by_hash(path)

    redirect_suffix = None

    if link is None:
        # Try to find a matching prefix
        parts = path.split('/', 1)

        if len(parts) == 2:
            path_prefix, redirect_suffix = parts

            # If there was a prefix, we try to find a link again
            link = Link.find_by_hash(path_prefix)

    # Instrumentation
    prefix_tag = 'prefix:' + link.prefix if link else 'Http404'

    statsd.increment('workforus.clicks', tags=[prefix_tag])
    statsd.set('workforus.unique_links', link.hash if link else 'Http404', tags=[prefix_tag])
    statsd.set('workforus.unique_ips', get_client_ip(request), tags=['browser:' + get_browser(request)])

    # 404 if link not found or register a click if the DB is not in readonly mode
    if link is None:
        raise Http404
    elif mongoengine_is_primary():
        link.click()

    # Tweak the redirection link based on the query string, redirection suffix, etc.
    # FIXME: Handle multiple parameters with the same name in the `url`
    query = request.GET.copy()

    if redirect_suffix is not None:
        query[REDIRECT_PARAM_NAME] = redirect_suffix

    if bool(query) and REF_PARAM_NAME not in query:
        # If we specify a non empty query, indicate that the shortener tweaked the url
        query[REF_PARAM_NAME] = REF_PARAM_DEFAULT_VALUE

    target_url = url_append_parameters(
        link.long_url,
        params_to_replace=query,
        defaults={REF_PARAM_NAME: REF_PARAM_DEFAULT_VALUE}
    )

    # Either redirect the user, or load the target page and display it directly
    if link.act_as_proxy:
        return proxy(target_url)

    return redirect(target_url, permanent=True)
示例#26
0
文件: tasks.py 项目: kursitet/edx-ora
def expire_submissions_task():
    flag = True
    log.debug("Starting check for expired subs.")
    #Sleep for some time to allow other processes to get ahead/behind
    time_sleep_value = random.uniform(0, 100)
    time.sleep(time_sleep_value)
    try:
        gc.collect()
        db.reset_queries()
        transaction.commit()

        #Comment out submission expiration for now.  Not really needed while testing.
        expire_submissions.reset_timed_out_submissions()
        """
        expired_list = expire_submissions.get_submissions_that_have_expired(subs)
        if len(expired_list) > 0:
            success = grader_util.finalize_expired_submissions(expired_list)
            statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
                tags=["success:{0}".format(success)])
        """

        try:
            expire_submissions.reset_ml_subs_to_in()
            transaction.commit()
        except Exception:
            log.exception("Could not reset ml to in!")

        try:
            expire_submissions.reset_skipped_subs()
            transaction.commit()
        except Exception:
            log.exception("Could not reset skipped submissions!")

        try:
            generate_student_metrics.regenerate_student_data()
            transaction.commit()
        except Exception:
            log.exception("Could not regenerate student data!")

        try:
            #Remove old ML grading models
            expire_submissions.remove_old_model_files()
            transaction.commit()
        except Exception:
            log.exception("Could not remove ml grading models!")

        log.debug("Finished looping through.")

    except Exception as err:
        log.exception("Could not get submissions to expire! Error: {0}".format(err))
        statsd.increment("open_ended_assessment.grading_controller.remove_expired_subs",
                         tags=["success:Exception"])
    util.log_connection_data()
    transaction.commit()
示例#27
0
def main(request, path):
    '''
    Search for a long link matching the `path` and redirect
    '''

    if len(path) and path[-1] == '/':
        # Removing trailing slash so "/jobs/" and "/jobs" redirect identically
        path = path[:-1]

    link = Link.find_by_hash(path)

    if link is None:
        # Try to find a matching short link by removing valid "catchall" suffixes
        path_prefix, redirect_suffix = suffix_catchall.get_hash_from(path)

        if redirect_suffix is not None:
            # If we found a suffix, we try to find a link again with the prefix
            link = Link.find_by_hash(path_prefix)
    else:
        redirect_suffix = None

    # Instrumentation
    prefix_tag = 'prefix:' + link.prefix if link else 'Http404'

    statsd.increment('workforus.clicks', tags=[prefix_tag])
    statsd.set('workforus.unique_links', link.hash if link else 'Http404', tags=[prefix_tag])
    statsd.set('workforus.unique_ips', get_client_ip(request), tags=['browser:' + get_browser(request)])

    # 404 if link not found or register a click if the DB is not in readonly mode
    if link is None:
        raise Http404
    elif mongoengine_is_primary():
        link.click()

    # Tweak the redirection link based on the query string, redirection suffix, etc.
    # FIXME: Handle multiple parameters with the same name in the `url`
    query = request.GET.copy()

    if redirect_suffix is not None:
        query[REDIRECT_PARAM_NAME] = redirect_suffix

    if bool(query) and REF_PARAM_NAME not in query:
        # If we specify a non empty query, indicate that the shortener tweaked the url
        query[REF_PARAM_NAME] = REF_PARAM_DEFAULT_VALUE

    target_url = url_append_parameters(
        link.long_url,
        params_to_replace=query,
        defaults={REF_PARAM_NAME: REF_PARAM_DEFAULT_VALUE}
    )

    # Either redirect the user, or load the target page and display it directly
    return (proxy if link.act_as_proxy else redirect)(target_url)
示例#28
0
文件: api.py 项目: dhilipsiva/moback
def user_scores():
    statsd.increment('api_calls.user_scores')
    scrs = session.query(Score).\
        filter(Score.person_id == g.user.id)
    scr_ary = []
    for scr in scrs:
        scr_ary.append(scr.json_data())
    msg = {
        'success': True,
        'msg': 'user\'s scores',
        'scores': scr_ary}
    return jsonify(msg)
def showDistance(lat_long):
    start_point = lat_long
    end_point = "37.7841316511211,-122.39591339799"
    distance_url = (
        "http://maps.googleapis.com/maps/api/distancematrix/json?origins="
        + start_point
        + "&destinations="
        + end_point
        + "&mode=walking&language=en-EN&sensor=false&units=imperial&key="
        + API_KEY
    )
    result = json.load(urllib.urlopen(distance_url))
    statsd.increment("cart-api.requests", tags=["support", "page:distance_test"])
    return jsonify(result)
示例#30
0
文件: api.py 项目: dhilipsiva/moback
def forgot_password():
    statsd.increment('api_calls.forgot_password')
    if request.method == 'POST':
        logger.info(request.form)
        form = EmailForm(request.form)
        if not form.validate():
            msg = {
                'success': False,
                'msg': form.errors, }
            return jsonify(msg)
        if secret_key != base64.b64decode(form.sig.data):
            msg = {
                'success': False,
                'msg': 'invalid signature'}
            return jsonify(msg)
        u = session.query(Person).\
            filter(Person.email == form.email.data).\
            first()
        if not u:
            msg = {
                'success': False,
                'msg': 'Email not registered!'}
            return jsonify(msg)
        m = Message(
            "Reset Password",
            recipients=[form.email.data])
        content = (
            'Click <a href="http://' +
            request.headers['Host'] + '/forgot_password?' +
            'key=' + urllib.quote(u.pw_hash) + '&email=' +
            urllib.quote(form.email.data) + '">HERE</a>' +
            ' to reset your password')
        m.html = content
        mail.send(m)
        msg = {
            'success': True,
            'msg': 'Mail Sent!'}
        return jsonify(msg)

    logger.info(request.args)
    key = urllib.unquote(request.args['key'])
    email = urllib.unquote(request.args['email'])
    u = session.query(Person).\
        filter(Person.email == email).first()
    if u and key == u.pw_hash:
        response = redirect(url_for('reset_page'))
        response.set_cookie('email', value=email)
        response.set_cookie('key', value=urllib.quote(key))
        return response
    return 'Invalid key!'