Exemple #1
0
    def register_operations_wrapped(self, import_key: str, operation: str,
                                    *pks: Any) -> int:
        """
        This is a wrapper for register_operation method, checking main parameters.
        This method should be called from inner functions.
        :param import_key: A key, returned by ClickHouseModel.get_import_key() method
        :param operation: One of insert, update, delete
        :param pks: Primary keys to find records in main database. Should be string-serializable with str() method.
        :return: Number of registered operations
        """
        if operation not in {'insert', 'update', 'delete'}:
            raise ValueError(
                'operation must be one of [insert, update, delete]')

        statsd_key = "%s.sync.%s.register_operations" % (config.STATSD_PREFIX,
                                                         import_key)
        statsd.incr(statsd_key + '.%s' % operation, len(pks))
        with statsd.timer(statsd_key):
            ops_count = self.register_operations(import_key, operation, *pks)

        statsd_key = "%s.sync.%s.queue" % (config.STATSD_PREFIX, import_key)
        statsd.gauge(statsd_key, ops_count, delta=True)
        logger.debug(
            'django-clickhouse: registered %s on %d items (%s) to storage' %
            (operation, len(pks), import_key))

        return ops_count
Exemple #2
0
    def parse(self, stream, media_type=None, parser_context=None):
        """
        Simply return a string representing the body of the request.
        """
        body_text = stream.read()
        data = []
        try:
            alerts = CAPParser(body_text, recover=True).as_dict()
        except:
            statsd.incr('api.CAPXMLParser.parse.error')
            logging.error(str(body_text))
            raise ParseError

        for alert in alerts:
            alert_obj = dict()

            cap_slug = '%030x' % random.randrange(16**30)
            alert_obj['cap_slug'] = cap_slug

            for alert_key, alert_value in alert.items():
                if alert_key in SIMPLE_CAP_TYPES:
                    if 'cap_sent' == alert_key:
                        alert_value = dparser.parse(str(alert_value))
                    alert_obj[alert_key] = str(alert_value)

                elif alert_key == 'cap_info':
                    item_obj_list = []
                    for item_obj in alert['cap_info']:
                        # TODO run as background task
                        processed = self.process_item_obj(item_obj)
                        item_obj_list.append(processed)
                    alert_obj['info_set'] = item_obj_list
            data.append(alert_obj)

        return data
Exemple #3
0
def signup(request):
    template = get_template("polls/signup.html")

    stdlogger.info("In signup page")

    statsd.incr('fitcycle.signup',1)
    foo_timer = statsd.timer('signupTimer')
    
    if request.method == 'POST':
        form=PostForm(request.POST)
        if form.is_valid():
            firstname=request.POST.get('firstname','')
            lastname=request.POST.get('lastname','')
            email=request.POST.get('email','')
            stdlogger.info("creating object for saving to db")
            prospect_obj=prospect(firstname=firstname, lastname=lastname, email=email)
            try:
               stdlogger.info("About to save")
               foo_timer.start()
               prospect_obj.save()
               foo_timer.stop()
            except Exception, e:
               stdlogger.error("Error in saving: %s" % e)

            return HttpResponseRedirect(reverse('index'))
Exemple #4
0
def heartbeat(request):
    all_checks = checks_registry.get_checks(include_deployment_checks=not settings.DEBUG)

    details = {}
    statuses = {}
    level = 0

    for check in all_checks:
        detail = heartbeat_check_detail(check)
        statuses[check.__name__] = detail['status']
        level = max(level, detail['level'])
        if detail['level'] > 0:
            details[check.__name__] = detail

    if level < checks_messages.WARNING:
        res_status = status.HTTP_200_OK
        statsd.incr('heartbeat.pass')
    else:
        res_status = status.HTTP_500_INTERNAL_SERVER_ERROR
        statsd.incr('heartbeat.fail')

    return Response({
        'status': heartbeat_level_to_text(level),
        'checks': statuses,
        'details': details,
    }, status=res_status)
Exemple #5
0
def heartbeat(request):
    all_checks = checks_registry.get_checks(include_deployment_checks=not settings.DEBUG)

    details = {}
    statuses = {}
    level = 0

    for check in all_checks:
        detail = heartbeat_check_detail(check)
        statuses[check.__name__] = detail['status']
        level = max(level, detail['level'])
        if detail['level'] > 0:
            details[check.__name__] = detail

    if level < checks_messages.WARNING:
        res_status = status.HTTP_200_OK
        statsd.incr('heartbeat.pass')
    else:
        res_status = status.HTTP_500_INTERNAL_SERVER_ERROR
        statsd.incr('heartbeat.fail')

    return Response({
        'status': heartbeat_level_to_text(level),
        'checks': statuses,
        'details': details,
    }, status=res_status)
Exemple #6
0
    def parse(self, stream, media_type=None, parser_context=None):
        """
        Simply return a string representing the body of the request.
        """
        body_text = stream.read()
        data = []
        try:
            alerts = CAPParser(body_text, recover=True).as_dict()
        except:
            statsd.incr('api.CAPXMLParser.parse.error')
            logging.error(str(body_text))
            raise ParseError

        for alert in alerts:
            alert_obj = dict()

            cap_slug = '%030x' % random.randrange(16**30)
            alert_obj['cap_slug'] = cap_slug

            for alert_key, alert_value in alert.items():
                if alert_key in SIMPLE_CAP_TYPES:
                    if 'cap_sent' == alert_key:
                        alert_value = dparser.parse(str(alert_value))
                    alert_obj[alert_key] = str(alert_value)

                elif alert_key == 'cap_info':
                    item_obj_list = []
                    for item_obj in alert['cap_info']:
                        # TODO run as background task
                        processed = self.process_item_obj(item_obj)
                        item_obj_list.append(processed)
                    alert_obj['info_set'] = item_obj_list
            data.append(alert_obj)

        return data
Exemple #7
0
 def pre_sync(self, import_key, **kwargs):
     # Block process to be single threaded. Default sync delay is 10 * default sync delay.
     # It can be changed for model, by passing `lock_timeout` argument to pre_sync
     lock = self.get_lock(import_key, **kwargs)
     lock_pid_key = self.REDIS_KEY_LOCK_PID.format(import_key=import_key)
     try:
         lock.acquire()
         self._redis.set(lock_pid_key, os.getpid())
     except RedisLockTimeoutError:
         statsd.incr('%s.sync.%s.lock.timeout' %
                     (config.STATSD_PREFIX, import_key))
         # Lock is busy. But If the process has been killed, I don't want to wait any more.
         # Let's check if pid exists
         pid = int(self._redis.get(lock_pid_key) or 0)
         if pid and not check_pid(pid):
             statsd.incr('%s.sync.%s.lock.hard_release' %
                         (config.STATSD_PREFIX, import_key))
             logger.warning(
                 'django-clickhouse: hard releasing lock "%s" locked by pid %d (process is dead)'
                 % (import_key, pid))
             self._redis.delete(lock_pid_key)
             lock.hard_release()
             self.pre_sync(import_key, **kwargs)
         else:
             raise
Exemple #8
0
    def sync_batch_from_storage(cls):
        """
        Gets one batch from storage and syncs it.
        :return:
        """
        import_key = cls.get_import_key()
        storage = cls.get_storage()
        statsd_key = "%s.sync.%s.{0}" % (config.STATSD_PREFIX, import_key)

        try:
            with statsd.timer(statsd_key.format('total')):
                with statsd.timer(statsd_key.format('steps.pre_sync')):
                    storage.pre_sync(import_key, lock_timeout=cls.get_lock_timeout())

                with statsd.timer(statsd_key.format('steps.get_operations')):
                    operations = storage.get_operations(import_key, cls.get_sync_batch_size())
                    statsd.incr(statsd_key.format('operations'), len(operations))

                if operations:
                    with statsd.timer(statsd_key.format('steps.get_sync_objects')):
                        import_objects = cls.get_sync_objects(operations)
                else:
                    import_objects = []

                statsd.incr(statsd_key.format('import_objects'), len(import_objects))

                if import_objects:
                    batches = {}
                    with statsd.timer(statsd_key.format('steps.get_insert_batch')):
                        def _sub_model_func(model_cls):
                            model_statsd_key = "%s.sync.%s.{0}" % (config.STATSD_PREFIX, model_cls.__name__)
                            with statsd.timer(model_statsd_key.format('steps.get_insert_batch')):
                                # NOTE I don't use generator pattern here, as it move all time into insert.
                                # That makes hard to understand where real problem is in monitoring
                                batch = tuple(model_cls.get_insert_batch(import_objects))
                                return model_cls, batch

                        res = exec_multi_arg_func(_sub_model_func, cls.sub_models, threads_count=len(cls.sub_models))
                        batches = dict(res)

                    with statsd.timer(statsd_key.format('steps.insert')):
                        def _sub_model_func(model_cls):
                            model_statsd_key = "%s.sync.%s.{0}" % (config.STATSD_PREFIX, model_cls.__name__)
                            with statsd.timer(model_statsd_key.format('steps.insert')):
                                model_cls.insert_batch(batches[model_cls])

                        exec_multi_arg_func(_sub_model_func, cls.sub_models, threads_count=len(cls.sub_models))

                with statsd.timer(statsd_key.format('steps.post_sync')):
                    storage.post_sync(import_key)

        except RedisLockTimeoutError:
            pass  # skip this sync round if lock is acquired by another thread
        except Exception as ex:
            with statsd.timer(statsd_key.format('steps.post_sync')):
                storage.post_sync_failed(import_key)
            raise ex
Exemple #9
0
    def get(self, request):
        """
        Perform geospatial search for alerts. Also, respond to pubsubhubbub challenges.

        Geospatial searches require a lat and lng query parameter, both in WSG84 format. For instance:

        /api/v1/alerts/?lat=-33.5,lng=151

        """

        # This is to allow PubSubHubbub
        if 'hub.challenge' in request.QUERY_PARAMS:
            # This is just normal HttpResponse so it doesn't have quotes
            return HttpResponse(str(request.QUERY_PARAMS['hub.challenge']))

        try:
            lat = float(request.QUERY_PARAMS['lat'])
            lng = float(request.QUERY_PARAMS['lng'])
        except:
            raise Http404

        if 'cap_date_received' in request.QUERY_PARAMS:
            cap_date_received = str(request.QUERY_PARAMS['cap_date_received'])
        else:
            cap_date_received = None

        pnt = Point(lng, lat)

        if cap_date_received is not None:
            alert = Alert.objects.filter(
                cap_date_received__gte=cap_date_received
            )
        else:
            alert = Alert.objects.all()

        alert = alert.filter(
            info__area__geom__dwithin=(pnt, 0.02)
        ).filter(
            info__cap_expires__gte=datetime.now()
        ).select_related('info')

        """
        alert = alert.filter(
            info__area__geom__dwithin=(pnt, 0.02)
        ).filter(
            info__cap_expires__gte=datetime.now()
        )
        """

        if len(alert) > 0:
            serializer = AlertSerializer(alert)
            statsd.incr('api.AlertListAPI.get.success')
            return Response(serializer.data)
        else:
            statsd.incr('api.AlertListAPI.get.failure')
            raise Http404
def increment(metric, request=None, ignore_staff=True, ignore_anonymous=False):
    if request and ignore_staff and request.user.is_authenticated and request.user.is_staff:
        return

    if request and ignore_anonymous and not request.user.is_authenticated:
        return

    statsd.incr("platform.{}".format(metric))
    if DATADOG_METRICS:
        datadog_statsd.increment("platform.{}".format(metric))
Exemple #11
0
 def rest_error(self, post_data, errors, log_errors=True):
     statsd.incr('heartbeat.error')
     if log_errors:
         log_error(post_data, errors)
     return rest_framework.response.Response(status=400,
                                             data={
                                                 'msg':
                                                 'bad request; see errors',
                                                 'errors': errors
                                             })
Exemple #12
0
        def _wrapped(request, *args, **kwargs):
            already_limited = getattr(request, 'limited', False)
            ratelimited = is_ratelimited(
                request=request, group=rulename,
                key=keyfun, rate=rate, method=['POST'],
                increment=True)

            if not already_limited and ratelimited:
                statsd.incr('throttled.' + rulename)

            return fn(request, *args, **kwargs)
Exemple #13
0
 def rest_error(self, post_data, errors, log_errors=True):
     statsd.incr('heartbeat.error')
     if log_errors:
         log_error(post_data, errors)
     return rest_framework.response.Response(
         status=400,
         data={
             'msg': 'bad request; see errors',
             'errors': errors
         }
     )
Exemple #14
0
        def _wrapped(request, *args, **kwargs):
            already_limited = getattr(request, 'limited', False)
            ratelimited = is_ratelimited(request=request,
                                         group=rulename,
                                         key=keyfun,
                                         rate=rate,
                                         method=['POST'],
                                         increment=True)

            if not already_limited and ratelimited:
                statsd.incr('throttled.' + rulename)

            return fn(request, *args, **kwargs)
Exemple #15
0
    def allow_request(self, request, view):
        already_limited = getattr(request, 'limited', False)
        ratelimited = is_ratelimited(
            request=request, group=self.rulename,
            key=self.keyfun, rate=self.rate, method=self.methods,
            increment=True)

        if ratelimited:
            if not already_limited:
                statsd.incr('throttled.' + self.rulename)
            return self.throttle_failure()

        # Did not trigger rate-limiting, so this request is allowed.
        return self.throttle_success()
Exemple #16
0
    def post(self, request):
        """
        Create a new alert (POST). This endpoint accepts Common Alerting Protocal (CAP) 1.1 and 1.2, but does NOT accept
        ATOM/RSS feeds. In general, simply POST the entire XML message as the content of your request.

        """

        statsd.incr('api.AlertListAPI.post')
        timer = statsd.timer('api.AlertListAPI.post')
        timer.start()
        data = request.DATA
        try:
            for item in data:
                item['contributor'] = request.user.pk
        except Exception, e:
            logging.error(e)
Exemple #17
0
    def allow_request(self, request, view):
        already_limited = getattr(request, 'limited', False)
        ratelimited = is_ratelimited(request=request,
                                     group=self.rulename,
                                     key=self.keyfun,
                                     rate=self.rate,
                                     method=self.methods,
                                     increment=True)

        if ratelimited:
            if not already_limited:
                statsd.incr('throttled.' + self.rulename)
            return self.throttle_failure()

        # Did not trigger rate-limiting, so this request is allowed.
        return self.throttle_success()
Exemple #18
0
    def post(self, request, format=None):
        statsd.incr('api.LocationList.post')
        data = request.DATA

        data['user'] = request.user.pk

        if data['source'] == "current":
            loc, created = Location.objects.get_or_create(source='current', user=request.user)
            serializer = LocationSerializer(loc, data=request.DATA)
        else:
            serializer = LocationSerializer(data=data)

        if serializer.is_valid():
            serializer.save()
            return Response(serializer.data, status=status.HTTP_201_CREATED)
        return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Exemple #19
0
    def post(self, request):
        """
        Create a new alert (POST). This endpoint accepts Common Alerting Protocal (CAP) 1.1 and 1.2, but does NOT accept
        ATOM/RSS feeds. In general, simply POST the entire XML message as the content of your request.

        """

        statsd.incr('api.AlertListAPI.post')
        timer = statsd.timer('api.AlertListAPI.post')
        timer.start()
        data = request.data
        try:
            for item in data:
                item['contributor'] = request.user.pk
        except Exception, e:
            logging.error(e)
Exemple #20
0
    def post(self, request, format=None):
        statsd.incr('api.LocationList.post')
        data = request.data

        data['user'] = request.user.pk

        if data['source'] == "current":
            loc, created = Location.objects.get_or_create(source='current',
                                                          user=request.user)
            serializer = LocationSerializer(loc, data=request.data)
        else:
            serializer = LocationSerializer(data=data)

        if serializer.is_valid():
            serializer.save()
            return Response(serializer.data, status=status.HTTP_201_CREATED)
        return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def request_stop(metrics, response):
    metrics['Request-Timer'].stop()
    metrics['Request-Metric-Millis'] = metrics['Request-Timer'].ms

    statsd.incr(metrics['Request-Metric-ID'])
    statsd.incr("{}.{}".format(metrics['Request-Metric-ID'],
                               response.status_code))

    if DATADOG_METRICS:
        datadog_statsd.increment(metrics['Request-Metric-ID'])
        datadog_statsd.increment("{}.{}".format(metrics['Request-Metric-ID'],
                                                response.status_code))

        datadog_statsd.timing(metrics['Request-Metric-ID'],
                              metrics['Request-Timer'].ms)

    metrics.pop('Request-Timer')

    for name, value in metrics.items():
        response._headers[name] = (name, str(value))
Exemple #22
0
def fix_oldandroid(request):
    """Fixes old android requests

    Old versions of Firefox for Android have an in-product feedback form that
    generates POSTS directly to Input and is always "sad" or "idea". The POST
    data matches what the old Input used to do. The new Input doesn't have a
    ``_type`` field and doesn't have "idea" feedback, so we switch "idea" to be
    "sad", put it in the right field and then additionally tag the feedback
    with a source if it doesn't already have one.

    FIXME: Measure usage of this and nix it when we can. See bug #964292.

    :arg request: a Request object

    :returns: a fixed Request object

    """
    # Firefox for Android only sends up sad and idea responses, but it
    # uses the old `_type` variable from old Input. Tweak the data to do
    # what FfA means, not what it says.

    # Make `request.POST` mutable.
    request.POST = request.POST.copy()

    # For _type, 1 is happy, 2 is sad, 3 is idea. We convert that so
    # that _type = 1 -> happy = 1 and everything else -> happy = 0.
    if request.POST.get('_type') == '1':
        happy = 1
    else:
        happy = 0
    request.POST['happy'] = happy

    # If there's no source, then we tag it with a source so we can distinguish
    # these from other feedback and know when we can remove this code.
    if not ('src' in request.GET or 'utm_source' in request.GET):
        request.GET = request.GET.copy()
        request.GET['utm_source'] = 'oldfennec-in-product'

    statsd.incr('feedback.oldandroid')

    return request
Exemple #23
0
def fix_oldandroid(request):
    """Fixes old android requests

    Old versions of Firefox for Android have an in-product feedback form that
    generates POSTS directly to Input and is always "sad" or "idea". The POST
    data matches what the old Input used to do. The new Input doesn't have a
    ``_type`` field and doesn't have "idea" feedback, so we switch "idea" to be
    "sad", put it in the right field and then additionally tag the feedback
    with a source if it doesn't already have one.

    FIXME: Measure usage of this and nix it when we can. See bug #964292.

    :arg request: a Request object

    :returns: a fixed Request object

    """
    # Firefox for Android only sends up sad and idea responses, but it
    # uses the old `_type` variable from old Input. Tweak the data to do
    # what FfA means, not what it says.

    # Make `request.POST` mutable.
    request.POST = request.POST.copy()

    # For _type, 1 is happy, 2 is sad, 3 is idea. We convert that so
    # that _type = 1 -> happy = 1 and everything else -> happy = 0.
    if request.POST.get('_type') == '1':
        happy = 1
    else:
        happy = 0
    request.POST['happy'] = happy

    # If there's no source, then we tag it with a source so we can distinguish
    # these from other feedback and know when we can remove this code.
    if not ('src' in request.GET or 'utm_source' in request.GET):
        request.GET = request.GET.copy()
        request.GET['utm_source'] = 'oldfennec-in-product'

    statsd.incr('feedback.oldandroid')

    return request
Exemple #24
0
def request_stop(metrics, response):
    metrics['Request-Timer'].stop()
    metrics['Request-Metric-Millis'] = metrics['Request-Timer'].ms

    statsd.incr(metrics['Request-Metric-ID'])
    statsd.incr(f"{metrics['Request-Metric-ID']}.{response.status_code}")

    if DATADOG_METRICS:
        datadog_statsd.increment(metrics['Request-Metric-ID'],
                                 tags=DATADOG_TAGS)
        datadog_statsd.increment(
            f"{metrics['Request-Metric-ID']}.{response.status_code}",
            tags=DATADOG_TAGS)

        datadog_statsd.timing(metrics['Request-Metric-ID'],
                              metrics['Request-Timer'].ms,
                              tags=DATADOG_TAGS)

    metrics.pop('Request-Timer')

    for name, value in metrics.items():
        response._headers[name] = (name, str(value))
Exemple #25
0
        def gen():
            buf = BytesIO()
            buf.write(query_enc)
            buf.write(tuple_to_csv(first_tuple).encode('utf-8'))

            # Collect lines in batches of batch_size
            lines = 1
            for t in tuples_iterator:
                buf.write(tuple_to_csv(t).encode('utf-8'))

                lines += 1
                if batch_size is not None and lines >= batch_size:
                    # Return the current batch of lines
                    statsd.incr(statsd_key, lines)
                    yield buf.getvalue()
                    # Start a new batch
                    buf = BytesIO()
                    buf.write(query_enc)
                    lines = 0

            # Return any remaining lines in partial batch
            if lines:
                statsd.incr(statsd_key, lines)
                yield buf.getvalue()
Exemple #26
0
    def translate(self, instance, src_lang, src_field, dst_lang, dst_field):
        # If gengosystem is disabled, we just return immediately. We
        # can backfill later.
        if not waffle.switch_is_active('gengosystem'):
            return

        text = getattr(instance, src_field)
        metadata = {
            'tier': self.gengo_tier,
            'locale': instance.locale,
            'length': len(text),
            'body': text[:50].encode('utf-8')
        }

        gengo_api = FjordGengo()

        # Guess the language. If we can't guess the language, then we
        # don't create a GengoJob.
        try:
            lc_src = gengo_api.guess_language(text)
            if lc_src not in gengo_api.get_languages():
                raise GengoUnsupportedLanguage(
                    'unsupported language: {0}'.format(lc_src))

        except GengoUnknownLanguage as exc:
            # FIXME: This might be an indicator that this response is
            # spam. At some point p, we can write code to account for
            # that.
            self.log_error(instance, action='guess-language', msg=unicode(exc),
                           metadata=metadata)
            statsd.incr('translation.{0}.unknown'.format(self.name))
            return

        except GengoUnsupportedLanguage as exc:
            # FIXME: This is a similar boat to GengoUnknownLanguage
            # where for now, we're just going to ignore it because I'm
            # not sure what to do about it and I'd like more data.
            self.log_error(instance, action='translate', msg=unicode(exc),
                           metadata=metadata)
            statsd.incr('translation.{0}.unsupported'.format(self.name))
            return

        # If the locale doesn't equal the guessed language, then
        # that's interesting since the user is writing feedback in a
        # language other than what the ui is showing. We want to log
        # that for metrics purposes.
        if not locale_equals_language(instance.locale, lc_src):
            self.log_error(
                instance,
                action='guess-language',
                msg='locale "{0}" != guessed language "{1}"'.format(
                    instance.locale, lc_src),
                metadata=metadata)

        # If the source language is English, we just copy it over and
        # we're done.
        if locale_equals_language(dst_lang, lc_src):
            setattr(instance, dst_field, text)
            instance.save()
            self.log_info(
                instance, action='translate',
                msg=u'lc_src == dst_lang, so we copy src to dst',
                metadata=metadata)
            return

        if ((self.gengo_check_supported_machine_lc_dst
             and lc_src in GENGO_UNSUPPORTED_MACHINE_LC_SRC)):
            return

        # If src/dst isn't a supported pair, log an issue for metrics
        # purposes and move on.
        if ((self.gengo_check_supported_language_pair
             and (lc_src, dst_lang) not in gengo_api.get_language_pairs())):
            self.log_error(
                instance, action='translate',
                msg=u'(lc_src {0}, dst_lang {1}) not supported'.format(
                    lc_src, dst_lang),
                metadata=metadata)
            return

        job = GengoJob(
            tier=self.gengo_tier,
            content_object=instance,
            src_lang=lc_src,
            src_field=src_field,
            dst_lang=dst_lang,
            dst_field=dst_field
        )
        job.save()
Exemple #27
0
def heartbeat_failed_handler(sender, level, **kwargs):
    statsd.incr("heartbeat.fail")
Exemple #28
0
def heartbeat_passed_handler(sender, level, **kwargs):
    statsd.incr("heartbeat.pass")
Exemple #29
0
def post_save(sender, instance, **kwargs):
    statsd.incr('%s.sync.post_save' % config.STATSD_PREFIX, 1)
    if issubclass(sender, ClickHouseSyncModel):
        instance.post_save(kwargs.get('created', False),
                           using=kwargs.get('using'))
Exemple #30
0
    def translate(self, instance, src_lang, src_field, dst_lang, dst_field):
        # If gengosystem is disabled, we just return immediately. We
        # can backfill later.
        if not waffle.switch_is_active('gengosystem'):
            return

        text = getattr(instance, src_field)
        metadata = {
            'tier': self.gengo_tier,
            'locale': instance.locale,
            'length': len(text),
            'body': text[:50].encode('utf-8')
        }

        gengo_api = FjordGengo()

        # Guess the language. If we can't guess the language, then we
        # don't create a GengoJob.
        try:
            lc_src = gengo_api.guess_language(text)
            if lc_src not in gengo_api.get_languages():
                raise GengoUnsupportedLanguage(
                    'unsupported language: {0}'.format(lc_src))

        except GengoUnknownLanguage as exc:
            # FIXME: This might be an indicator that this response is
            # spam. At some point p, we can write code to account for
            # that.
            self.log_error(instance,
                           action='guess-language',
                           msg=unicode(exc),
                           metadata=metadata)
            statsd.incr('translation.{0}.unknown'.format(self.name))
            return

        except GengoUnsupportedLanguage as exc:
            # FIXME: This is a similar boat to GengoUnknownLanguage
            # where for now, we're just going to ignore it because I'm
            # not sure what to do about it and I'd like more data.
            self.log_error(instance,
                           action='translate',
                           msg=unicode(exc),
                           metadata=metadata)
            statsd.incr('translation.{0}.unsupported'.format(self.name))
            return

        # If the locale doesn't equal the guessed language, then
        # that's interesting since the user is writing feedback in a
        # language other than what the ui is showing. We want to log
        # that for metrics purposes.
        if not locale_equals_language(instance.locale, lc_src):
            self.log_error(instance,
                           action='guess-language',
                           msg='locale "{0}" != guessed language "{1}"'.format(
                               instance.locale, lc_src),
                           metadata=metadata)

        # If the source language is English, we just copy it over and
        # we're done.
        if locale_equals_language(dst_lang, lc_src):
            setattr(instance, dst_field, text)
            instance.save()
            self.log_info(instance,
                          action='translate',
                          msg=u'lc_src == dst_lang, so we copy src to dst',
                          metadata=metadata)
            return

        if ((self.gengo_check_supported_machine_lc_dst
             and lc_src in GENGO_UNSUPPORTED_MACHINE_LC_SRC)):
            return

        # If src/dst isn't a supported pair, log an issue for metrics
        # purposes and move on.
        if ((self.gengo_check_supported_language_pair
             and (lc_src, dst_lang) not in gengo_api.get_language_pairs())):
            self.log_error(
                instance,
                action='translate',
                msg=u'(lc_src {0}, dst_lang {1}) not supported'.format(
                    lc_src, dst_lang),
                metadata=metadata)
            return

        job = GengoJob(tier=self.gengo_tier,
                       content_object=instance,
                       src_lang=lc_src,
                       src_field=src_field,
                       dst_lang=dst_lang,
                       dst_field=dst_field)
        job.save()
Exemple #31
0
def feedback_router(request,
                    product=None,
                    version=None,
                    channel=None,
                    *args,
                    **kwargs):
    """Determine a view to use, and call it.

    If product is given, reference `product_routes` to look up a view.
    If `product` is not passed, or isn't found in `product_routes`,
    asssume the user is either a stable desktop Firefox or a stable
    mobile Firefox based on the parsed UA, and serve them the
    appropriate page. This is to handle the old formname way of doing
    things. At some point P, we should measure usage of the old
    formnames and deprecate them.

    This also handles backwards-compatability with the old Firefox for
    Android form which can't have a CSRF token.

    .. Note::

       1. We never want to cache this view

       2. Pages returned from this view will get an::

              X-Frame-Options: DENY

          HTTP header. That's important because these pages have magic
          powers and should never be used in frames. Please do not
          change this!

    """
    view = None

    if '_type' in request.POST:
        # Checks to see if `_type` is in the POST data and if so this
        # is coming from Firefox for Android which doesn't know
        # anything about csrf tokens. If that's the case, we send it
        # to a view specifically for FfA Otherwise we pass it to one
        # of the normal views, which enforces CSRF. Also, nix the
        # product just in case we're crossing the streams and
        # confusing new-style product urls with old-style backwards
        # compatability for the Android form.
        #
        # FIXME: Remove this hairbrained monstrosity when we don't need to
        # support the method that Firefox for Android currently uses to
        # post feedback which worked with the old input.mozilla.org.
        view = android_about_feedback
        product = None

        # This lets us measure how often this section of code kicks
        # off and thus how often old android stuff is happening. When
        # we're not seeing this anymore, we can nix all the old
        # android stuff.
        statsd.incr('feedback.oldandroid')

        return android_about_feedback(request, request.locale)

    # FIXME - validate these better
    product = smart_str(product, fallback=None)
    version = smart_str(version)
    channel = smart_str(channel).lower()

    if product == 'fxos' or request.BROWSER.browser == 'Firefox OS':
        # Firefox OS gets shunted to a different form which has
        # different Firefox OS specific questions.
        view = firefox_os_stable_feedback
        product = 'fxos'

    elif product in PRODUCT_OVERRIDE:
        # If the product is really a form name, we use that
        # form specifically.
        view = PRODUCT_OVERRIDE[product]
        product = None

    elif (product is None
          or product not in models.Product.objects.get_product_map()):

        picker_products = models.Product.objects.filter(enabled=True,
                                                        on_picker=True)
        return render(request, 'feedback/picker.html',
                      {'products': picker_products})

    product = models.Product.objects.from_slug(product)

    if view is None:
        view = generic_feedback

    return view(request, request.locale, product, version, channel, *args,
                **kwargs)
Exemple #32
0
 def rest_success(self):
     statsd.incr('heartbeat.success')
     return rest_framework.response.Response(
         status=201,
         data={'msg': 'success!'})
Exemple #33
0
def _handle_feedback_post(request, locale=None, product=None,
                          version=None, channel=None):
    """Saves feedback post to db accounting for throttling

    :arg request: request we're handling the post for
    :arg locale: locale specified in the url
    :arg product: None or the Product
    :arg version: validated and sanitized version specified in the url
    :arg channel: validated and sanitized channel specified in the url

    """
    if getattr(request, 'limited', False):
        # If we're throttled, then return the thanks page, but don't
        # add the response to the db.
        return HttpResponseRedirect(reverse('thanks'))

    # Get the form and run is_valid() so it goes through the
    # validation and cleaning machinery. We don't really care if it's
    # valid, though, since we will take what we got and do the best we
    # can with it. Error validation is now in JS.
    form = ResponseForm(request.POST)
    form.is_valid()

    get_data = request.GET.copy()

    data = form.cleaned_data

    description = data.get('description', u'').strip()
    if not description:
        # If there's no description, then there's nothing to do here,
        # so thank the user and move on.
        return HttpResponseRedirect(reverse('thanks'))

    opinion = models.Response(
        # Data coming from the user
        happy=data['happy'],
        url=clean_url(data.get('url', u'').strip()),
        description=description,

        # Pulled from the form data or the url
        locale=data.get('locale', locale),

        # Data from mobile devices which is probably only
        # applicable to mobile devices
        manufacturer=data.get('manufacturer', ''),
        device=data.get('device', ''),
    )

    # Add user_agent and inferred data.
    user_agent = request.META.get('HTTP_USER_AGENT', '')
    if user_agent:
        browser = request.BROWSER

        opinion.browser = browser.browser[:30]
        opinion.browser_version = browser.browser_version[:30]
        bp = browser.platform
        if bp == 'Windows':
            bp += (' ' + browser.platform_version)
        opinion.browser_platform = bp[:30]
        opinion.user_agent = user_agent[:config.USER_AGENT_LENGTH]

    # source is src or utm_source
    source = (
        get_data.pop('src', [u''])[0] or
        get_data.pop('utm_source', [u''])[0]
    )
    if source:
        opinion.source = source[:100]

    campaign = get_data.pop('utm_campaign', [u''])[0]
    if campaign:
        opinion.campaign = campaign[:100]

    # If they sent "happy=1"/"happy=0" in the querystring, it will get
    # picked up by the javascript in the form and we can just drop it
    # here.
    get_data.pop('happy', None)

    platform = u''

    if product:
        # If we have a product at this point, then it came from the
        # url and it's a Product instance and we need to turn it into
        # the product.db_name which is a string.
        product_db_name = product.db_name
    else:
        # Check the POST data for the product.
        product_db_name = data.get('product', '')

    # For the version, we try the url data, then the POST data.
    version = version or data.get('version', '')

    # At this point, we have a bunch of values, but we might be
    # missing some values, too. We're going to cautiously infer data
    # from the user agent where we're very confident it's appropriate
    # to do so.
    if request.BROWSER != UNKNOWN:
        # If we don't have a product, try to infer that from the user
        # agent information.
        if not product_db_name:
            product_db_name = models.Response.infer_product(request.BROWSER)

        # If we have a product and it matches the user agent browser,
        # then we can infer the version and platform from the user
        # agent if they're missing.
        if product_db_name:
            product = models.Product.objects.get(db_name=product_db_name)
            if product.browser and product.browser == request.BROWSER.browser:
                if not version:
                    version = request.BROWSER.browser_version
                if not platform:
                    platform = models.Response.infer_platform(
                        product_db_name, request.BROWSER)

    # Make sure values are at least empty strings--no Nones.
    opinion.product = (product_db_name or u'')[:30]
    opinion.version = (version or u'')[:30]
    opinion.channel = (channel or u'')[:30]
    opinion.platform = (platform or u'')[:30]

    opinion.save()

    # If there was an email address, save that separately.
    if data.get('email_ok') and data.get('email'):
        e = models.ResponseEmail(email=data['email'], opinion=opinion)
        e.save()
        statsd.incr('feedback.emaildata.optin')

    # If there's browser data, save that separately.
    if data.get('browser_ok'):
        # This comes in as a JSON string. Because we're using
        # JSONObjectField, we need to convert it back to Python and
        # then save it. This is kind of silly, but it does guarantee
        # we have valid JSON.
        try:
            browser_data = data['browser_data']
            browser_data = json.loads(browser_data)

        except ValueError:
            # Handles empty string and any non-JSON value.
            statsd.incr('feedback.browserdata.badvalue')

        except KeyError:
            # Handles the case where it's missing from the data
            # dict. If it's missing, we don't want to do anything
            # including metrics.
            pass

        else:
            # If browser_data isn't an empty dict, then save it.
            if browser_data:
                rti = models.ResponsePI(
                    data=browser_data, opinion=opinion)
                rti.save()
                statsd.incr('feedback.browserdata.optin')

    if get_data:
        # There was extra context in the query string, so we grab that
        # with some restrictions and save it separately.
        slop = {}

        # We capture at most the first 20 key/val pairs
        get_data_items = sorted(get_data.items())[:20]

        for key, val in get_data_items:
            # Keys can be at most 20 characters long.
            key = key[:20]
            if len(val) == 1:
                val = val[0]

            # Values can be at most 20 characters long.
            val = val[:100]
            slop[key.encode('utf-8')] = val.encode('utf-8')

        context = models.ResponseContext(data=slop, opinion=opinion)
        context.save()
        statsd.incr('feedback.contextdata.optin')

    if data['happy']:
        statsd.incr('feedback.happy')
    else:
        statsd.incr('feedback.sad')

    request.session['response_id'] = opinion.id

    return HttpResponseRedirect(reverse('thanks'))
Exemple #34
0
 def _es_error_statsd(*args, **kwargs):
     try:
         return fun(*args, **kwargs)
     except ElasticsearchException:
         statsd.incr('elasticsearch.error')
         raise
def say_hello(request):
    statsd.incr('hello.requests.total')
    return HttpResponse('Hello, World!')
Exemple #36
0
 def on_task_revoked(event):
     task = _get_task(event)
     statsd_client.incr(f"celery.{task.name}.revoked")
Exemple #37
0
 def on_worker_offline(event):
     statsd_client.incr(f'celery.workers.{event["hostname"]}.offline')
Exemple #38
0
def _handle_feedback_post(request, locale=None, product=None,
                          version=None, channel=None):
    """Saves feedback post to db accounting for throttling

    :arg request: request we're handling the post for
    :arg locale: locale specified in the url
    :arg product: None or the Product
    :arg version: validated and sanitized version specified in the url
    :arg channel: validated and sanitized channel specified in the url

    """
    if getattr(request, 'limited', False):
        # If we're throttled, then return the thanks page, but don't
        # add the response to the db.
        return HttpResponseRedirect(reverse('thanks'))

    # Get the form and run is_valid() so it goes through the
    # validation and cleaning machinery. We don't really care if it's
    # valid, though, since we will take what we got and do the best we
    # can with it. Error validation is now in JS.
    form = ResponseForm(request.POST)
    form.is_valid()

    get_data = request.GET.copy()

    data = form.cleaned_data

    description = data.get('description', u'').strip()
    if not description:
        # If there's no description, then there's nothing to do here,
        # so thank the user and move on.
        return HttpResponseRedirect(reverse('thanks'))

    opinion = models.Response(
        # Data coming from the user
        happy=data['happy'],
        url=clean_url(data.get('url', u'').strip()),
        description=description,

        # Pulled from the form data or the url
        locale=data.get('locale', locale),

        # Data from mobile devices which is probably only
        # applicable to mobile devices
        manufacturer=data.get('manufacturer', ''),
        device=data.get('device', ''),
    )

    # Add user_agent and inferred data.
    user_agent = request.META.get('HTTP_USER_AGENT', '')
    if user_agent:
        browser = request.BROWSER

        opinion.browser = browser.browser[:30]
        opinion.browser_version = browser.browser_version[:30]
        bp = browser.platform
        if bp == 'Windows':
            bp += (' ' + browser.platform_version)
        opinion.browser_platform = bp[:30]
        opinion.user_agent = user_agent[:config.USER_AGENT_LENGTH]

    # source is src or utm_source
    source = (
        get_data.pop('src', [u''])[0] or
        get_data.pop('utm_source', [u''])[0]
    )
    if source:
        opinion.source = source[:100]

    campaign = get_data.pop('utm_campaign', [u''])[0]
    if campaign:
        opinion.campaign = campaign[:100]

    # If they sent "happy=1"/"happy=0" in the querystring, it will get
    # picked up by the javascript in the form and we can just drop it
    # here.
    get_data.pop('happy', None)

    platform = u''

    if product:
        # If we have a product at this point, then it came from the
        # url and it's a Product instance and we need to turn it into
        # the product.db_name which is a string.
        product_db_name = product.db_name
    else:
        # Check the POST data for the product.
        product_db_name = data.get('product', '')

    # For the version, we try the url data, then the POST data.
    version = version or data.get('version', '')

    # At this point, we have a bunch of values, but we might be
    # missing some values, too. We're going to cautiously infer data
    # from the user agent where we're very confident it's appropriate
    # to do so.
    if request.BROWSER != UNKNOWN:
        # If we don't have a product, try to infer that from the user
        # agent information.
        if not product_db_name:
            product_db_name = models.Response.infer_product(request.BROWSER)

        # If we have a product and it matches the user agent browser,
        # then we can infer the version and platform from the user
        # agent if they're missing.
        if product_db_name:
            product = models.Product.objects.get(db_name=product_db_name)
            if product.browser and product.browser == request.BROWSER.browser:
                if not version:
                    version = request.BROWSER.browser_version
                if not platform:
                    platform = models.Response.infer_platform(
                        product_db_name, request.BROWSER)

    # Make sure values are at least empty strings--no Nones.
    opinion.product = (product_db_name or u'')[:30]
    opinion.version = (version or u'')[:30]
    opinion.channel = (channel or u'')[:30]
    opinion.platform = (platform or u'')[:30]

    opinion.save()

    # If there was an email address, save that separately.
    if data.get('email_ok') and data.get('email'):
        e = models.ResponseEmail(email=data['email'], opinion=opinion)
        e.save()
        statsd.incr('feedback.emaildata.optin')

    # If there's browser data, save that separately.
    if data.get('browser_ok'):
        # This comes in as a JSON string. Because we're using
        # JSONObjectField, we need to convert it back to Python and
        # then save it. This is kind of silly, but it does guarantee
        # we have valid JSON.
        try:
            browser_data = data['browser_data']
            browser_data = json.loads(browser_data)

        except ValueError:
            # Handles empty string and any non-JSON value.
            statsd.incr('feedback.browserdata.badvalue')

        except KeyError:
            # Handles the case where it's missing from the data
            # dict. If it's missing, we don't want to do anything
            # including metrics.
            pass

        else:
            # If browser_data isn't an empty dict, then save it.
            if browser_data:
                rti = models.ResponsePI(
                    data=browser_data, opinion=opinion)
                rti.save()
                statsd.incr('feedback.browserdata.optin')

    if get_data:
        # There was extra context in the query string, so we grab that
        # with some restrictions and save it separately.
        slop = {}

        # We capture at most the first 20 key/val pairs
        get_data_items = sorted(get_data.items())[:20]

        for key, val in get_data_items:
            # Keys can be at most 20 characters long.
            key = key[:20]
            if len(val) == 1:
                val = val[0]

            # Values can be at most 20 characters long.
            val = val[:100]
            slop[key.encode('utf-8')] = val.encode('utf-8')

        context = models.ResponseContext(data=slop, opinion=opinion)
        context.save()
        statsd.incr('feedback.contextdata.optin')

    if data['happy']:
        statsd.incr('feedback.happy')
    else:
        statsd.incr('feedback.sad')

    request.session['response_id'] = opinion.id

    return HttpResponseRedirect(reverse('thanks'))
Exemple #39
0
def feedback_router(request, product=None, version=None, channel=None,
                    *args, **kwargs):
    """Determine a view to use, and call it.

    If product is given, reference `product_routes` to look up a view.
    If `product` is not passed, or isn't found in `product_routes`,
    asssume the user is either a stable desktop Firefox or a stable
    mobile Firefox based on the parsed UA, and serve them the
    appropriate page. This is to handle the old formname way of doing
    things. At some point P, we should measure usage of the old
    formnames and deprecate them.

    This also handles backwards-compatability with the old Firefox for
    Android form which can't have a CSRF token.

    .. Note::

       1. We never want to cache this view

       2. Pages returned from this view will get an::

              X-Frame-Options: DENY

          HTTP header. That's important because these pages have magic
          powers and should never be used in frames. Please do not
          change this!

    """
    view = None

    if '_type' in request.POST:
        # Checks to see if `_type` is in the POST data and if so this
        # is coming from Firefox for Android which doesn't know
        # anything about csrf tokens. If that's the case, we send it
        # to a view specifically for FfA Otherwise we pass it to one
        # of the normal views, which enforces CSRF. Also, nix the
        # product just in case we're crossing the streams and
        # confusing new-style product urls with old-style backwards
        # compatability for the Android form.
        #
        # FIXME: Remove this hairbrained monstrosity when we don't need to
        # support the method that Firefox for Android currently uses to
        # post feedback which worked with the old input.mozilla.org.
        view = android_about_feedback
        product = None

        # This lets us measure how often this section of code kicks
        # off and thus how often old android stuff is happening. When
        # we're not seeing this anymore, we can nix all the old
        # android stuff.
        statsd.incr('feedback.oldandroid')

        return android_about_feedback(request, request.locale)

    # FIXME - validate these better
    product = smart_str(product, fallback=None)
    version = smart_str(version)
    channel = smart_str(channel).lower()

    if product == 'fxos' or request.BROWSER.browser == 'Firefox OS':
        # Firefox OS gets shunted to a different form which has
        # different Firefox OS specific questions.
        view = firefox_os_stable_feedback
        product = 'fxos'

    elif product in PRODUCT_OVERRIDE:
        # If the product is really a form name, we use that
        # form specifically.
        view = PRODUCT_OVERRIDE[product]
        product = None

    elif (product is None
          or product not in models.Product.objects.get_product_map()):

        picker_products = models.Product.objects.on_picker()
        return render(request, 'feedback/picker.html', {
            'products': picker_products
        })

    product = models.Product.objects.from_slug(product)

    if view is None:
        view = generic_feedback

    return view(request, request.locale, product, version, channel,
                *args, **kwargs)
Exemple #40
0
    def post(self, request):
        post_data = dict(request.data)

        # If it's an empty packet, bail immediately with a nicer message.
        if not post_data:
            statsd.incr('heartbeat.emptypacket')
            return self.rest_error(post_data, 'empty packet')

        # Stopgap fix for 1195747 where the hb client is sending
        # "unknown" which fails validation because the column has
        # max_length 4.
        if post_data.get('country', '') == 'unknown':
            post_data['country'] = 'UNK'

        serializer = AnswerSerializer(data=post_data)
        if not serializer.is_valid():
            statsd.incr('heartbeat.invaliddata')
            return self.rest_error(post_data, serializer.errors)

        valid_data = serializer.validated_data

        try:
            # Pin to master db to avoid replication lag issues and stale data.
            pin_this_thread()

            # Try to save it and if it kicks up an integrity error, then
            # we already have this object and we should update it with the
            # existing stuff.
            #
            # Note: This is like get_or_create(), but does it in the
            # reverse order so as to eliminate the race condition by
            # having the db enforce integrity.
            try:
                with transaction.atomic():
                    serializer.save()
                    return self.rest_success()
            except IntegrityError:
                pass

            # Failing the save() above means there's an existing Answer,
            # so we fetch the existing answer to update.
            ans = Answer.objects.get(
                person_id=valid_data['person_id'],
                survey_id=valid_data['survey_id'],
                flow_id=valid_data['flow_id']
            )

            # Check the updated timestamp. If it's the same or older, we
            # throw an error and skip it.
            if post_data['updated_ts'] <= ans.updated_ts:
                statsd.incr('heartbeat.oldtimestamp')
                return self.rest_error(
                    post_data,
                    {'updated_ts': ('updated timestamp is same or older than '
                                    'existing data')},
                    log_errors=False
                )

            # Everything is valid, so we update the Answer and save it.

            # Go through all the fields we want to save except
            # survey--that's already all set.
            for field in Answer._meta.fields:
                field_name = field.name
                if field_name in ('id', 'survey_id'):
                    continue

                if field_name in post_data:
                    setattr(ans, field_name, post_data[field_name])

            ans.save()

        finally:
            unpin_this_thread()

        return self.rest_success()
Exemple #41
0
    def generate_translation_jobs(self, system=None):
        """Returns a list of tuples, one for each translation job

        If the locale of this response is English, then we just copy over
        the description and we're done.

        If the product of this response isn't set up for
        auto-translation and no translation system was specified in
        the arguments, then we're done.

        If we already have a response with this text that's
        translated, we copy the most recent translation over.

        Otherwise we generate a list of jobs to be done.

        """
        if self.translated_description:
            return []

        # If the text is coming from an English-speaking locale, we
        # assume it's also in English and just copy it over. We do
        # this regardless of whether auto-translation is enabled or
        # not for this product.
        if self.locale and self.locale.startswith('en'):
            self.translated_description = self.description
            self.save()
            return []

        if not system:
            try:
                prod = Product.objects.get(db_name=self.product)
                system = prod.translation_system
            except Product.DoesNotExist:
                # If the product doesn't exist, then I don't know
                # what's going on. Regardless, we shouldn't create any
                # translation jobs.
                return []

        if not system:
            # If this product isn't set up for translation, don't
            # translate it.
            return []

        try:
            # See if this text has been translated already--if so, use
            # the most recent translation.
            existing_translation = (Response.objects.filter(
                description=self.description).filter(
                    locale=self.locale).exclude(
                        translated_description__isnull=True).exclude(
                            translated_description=u'').values_list(
                                'translated_description').latest('id'))
            self.translated_description = existing_translation[0]
            self.save()
            statsd.incr('feedback.translation.used_existing')
            return []
        except Response.DoesNotExist:
            pass

        return [
            # key, system, src language, src field, dst language, dst field
            (instance_to_key(self), system, self.locale, 'description', u'en',
             'translated_description')
        ]
Exemple #42
0
 def on_task_failed(event):
     task = _get_task(event)
     statsd_client.incr(f"celery.{task.name}.failed")
Exemple #43
0
 def on_worker_heartbeat(event):
     statsd_client.incr(f'celery.workers.{event["hostname"]}.heartbeat')
Exemple #44
0
    def generate_translation_jobs(self, system=None):
        """Returns a list of tuples, one for each translation job

        If the locale of this response is English, then we just copy over
        the description and we're done.

        If the product of this response isn't set up for
        auto-translation and no translation system was specified in
        the arguments, then we're done.

        If we already have a response with this text that's
        translated, we copy the most recent translation over.

        Otherwise we generate a list of jobs to be done.

        """
        if self.translated_description:
            return []

        # If the text is coming from an English-speaking locale, we
        # assume it's also in English and just copy it over. We do
        # this regardless of whether auto-translation is enabled or
        # not for this product.
        if self.locale and self.locale.startswith('en'):
            self.translated_description = self.description
            self.save()
            return []

        if not system:
            try:
                prod = Product.objects.get(db_name=self.product)
                system = prod.translation_system
            except Product.DoesNotExist:
                # If the product doesn't exist, then I don't know
                # what's going on. Regardless, we shouldn't create any
                # translation jobs.
                return []

        if not system:
            # If this product isn't set up for translation, don't
            # translate it.
            return []

        try:
            # See if this text has been translated already--if so, use
            # the most recent translation.
            existing_translation = (
                Response.objects
                .filter(description=self.description)
                .filter(locale=self.locale)
                .exclude(translated_description__isnull=True)
                .exclude(translated_description=u'')
                .values_list('translated_description')
                .latest('id')
            )
            self.translated_description = existing_translation[0]
            self.save()
            statsd.incr('feedback.translation.used_existing')
            return []
        except Response.DoesNotExist:
            pass

        return [
            # key, system, src language, src field, dst language, dst field
            (instance_to_key(self), system, self.locale, 'description',
             u'en', 'translated_description')
        ]
Exemple #45
0
 def on_task_succeeded(event):
     task = _get_task(event)
     task_info = task.info()
     statsd_client.incr(f"celery.{task.name}.succeeded")
     statsd_client.timing(f"celery.{task.name}.runtime", int(task_info["runtime"] * 1000))