Пример #1
0
    def setUp(self, switch_is_active):
        switch_is_active.return_value = True

        super(KarmaAPITests, self).setUp()

        try:
            self.mgr = KarmaManager()
            redis_client('karma').flushdb()
        except RedisError:
            raise SkipTest

        self.user1 = user(save=True)
        self.user2 = user(save=True)
        self.user3 = user(save=True)

        TestAction1(user=self.user1).save()
        TestAction2(user=self.user2).save()
        TestAction2(user=self.user2).save()
        TestAction1(user=self.user3).save()
        TestAction1(user=self.user3).save()
        TestAction1(user=self.user3).save()
        self.mgr.update_top()

        self.client.login(username=self.user1.username, password='******')
        add_permission(self.user1, models.Title, 'view_dashboard')
Пример #2
0
    def setUp(self, switch_is_active):
        switch_is_active.return_value = True

        super(KarmaManagerTests, self).setUp()

        try:
            self.mgr = KarmaManager()
            redis_client('karma').flushdb()
        except RedisError:
            raise SkipTest

        self.user1 = user(save=True)
        self.user2 = user(save=True)
        self.user3 = user(save=True)

        today = date.today()

        # user1 actions (3 + 3 + 7):
        TestAction1(user=self.user1, day=today).save()
        TestAction1(user=self.user1, day=today).save()
        TestAction2(user=self.user1, day=today).save()

        # user2 actions (3 + 7 + 7):
        TestAction1(user=self.user2, day=today - timedelta(days=8)).save()
        TestAction2(user=self.user2, day=today - timedelta(days=32)).save()
        TestAction2(user=self.user2, day=today - timedelta(days=360)).save()

        # user3 actions (3 + 3 + 3 + 7):
        TestAction1(user=self.user3, day=today - timedelta(days=10)).save()
        TestAction1(user=self.user3, day=today - timedelta(days=40)).save()
        TestAction1(user=self.user3, day=today - timedelta(days=190)).save()
        TestAction2(user=self.user3, day=today - timedelta(days=3)).save()
Пример #3
0
 def setUp(self):
     super(KarmaActionTests, self).setUp()
     self.user = user(save=True)
     try:
         self.mgr = KarmaManager()
         redis_client('karma').flushdb()
     except RedisError:
         raise SkipTest
Пример #4
0
def reindex_with_scoreboard(mapping_type_names):
    """Reindex all instances of a given mapping type with celery tasks.

    This will use Redis to keep track of outstanding tasks so nothing
    gets screwed up by two jobs running at once.
    """
    # TODO: If this gets fux0rd, then it's possible this could be
    # non-zero and we really want to just ignore it. Need the ability
    # to ignore it.
    try:
        client = redis_client('default')
        val = client.get(OUTSTANDING_INDEX_CHUNKS)
        if val is not None and int(val) > 0:
            raise ReindexError('There are %s outstanding chunks.' % val)

        # We don't know how many chunks we're building, but we do want
        # to make sure another reindex request doesn't slide in here
        # and kick off a bunch of chunks.
        #
        # There is a race condition here.
        client.set(OUTSTANDING_INDEX_CHUNKS, 1)
    except RedisError:
        log.warning('Redis not running. Can not check if there are '
                    'outstanding tasks.')

    batch_id = create_batch_id()

    # Break up all the things we want to index into chunks. This
    # chunkifies by class then by chunk size. Also generate
    # reconcile_tasks.
    chunks = []
    for cls, indexable in get_indexable(mapping_types=mapping_type_names):
        chunks.extend(
            (cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))

        reconcile_task.delay(cls.get_index(), batch_id,
                             cls.get_mapping_type_name())

    chunks_count = len(chunks)

    try:
        client = redis_client('default')
        client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
    except RedisError:
        log.warning('Redis not running. Can\'t denote outstanding tasks.')

    for chunk in chunks:
        index = chunk[0].get_index()
        index_chunk_task.delay(index, batch_id, chunk)
Пример #5
0
 def setUp(self):
     super(TestDocumentLocking, self).setUp()
     try:
         self.redis = redis_client('default')
         self.redis.flushdb()
     except RedisError:
         raise SkipTest
Пример #6
0
def get_bundle(request):
    if 'locale' not in request.GET or 'product' not in request.GET:
        return HttpResponseBadRequest(BAD_REQUEST, mimetype='application/json')

    locale = request.GET['locale']
    product = request.GET['product']
    if locale.lower() not in settings.LANGUAGES_DICT:
        return HttpResponseNotFound(INVALID_LOCALE,
                                    mimetype='application/json')

    name = redis_bundle_name(locale, product)
    try:
        redis = redis_client('default')
    except RedisError:
        return HttpResponse('not available yet', status=503)
    else:
        bundle = redis.hget(name, 'bundle')
        bundle_hash = redis.hget(name, 'hash')

    if bundle is None:
        return HttpResponseNotFound(NOT_FOUND, mimetype='application/json')

    response = HttpResponse(bundle, mimetype='application/json')
    response['Content-Length'] = len(bundle)
    response['X-Content-Hash'] = bundle_hash
    response['Access-Control-Expose-Headers'] = \
        'Content-Length, X-Content-Hash'

    return response
Пример #7
0
def redis_info(request):
    """Admin view that displays redis INFO+CONFIG output for all backends."""
    redis_info = {}
    for key in django_settings.REDIS_BACKENDS.keys():
        redis_info[key] = {}
        client = redis_client(key)
        redis_info[key]['connection'] = django_settings.REDIS_BACKENDS[key]
        try:
            cfg = client.config_get()
            redis_info[key]['config'] = [{
                'key': k,
                'value': cfg[k]
            } for k in sorted(cfg)]
            info = client.info()
            redis_info[key]['info'] = [{
                'key': k,
                'value': info[k]
            } for k in sorted(info)]
        except ConnectionError:
            redis_info[key]['down'] = True

    return render_to_response('kadmin/redis.html', {
        'redis_info': redis_info,
        'title': 'Redis Information'
    }, RequestContext(request, {}))
Пример #8
0
    def test_creator_nums_redis(self, switch_is_active):
        """Test creator_num_* pulled from karma data."""
        try:
            KarmaManager()
            redis_client('karma').flushdb()
        except RedisError:
            raise SkipTest

        switch_is_active.return_value = True
        a = answer(save=True)

        AnswerAction(a.creator).save()
        AnswerAction(a.creator).save()
        SolutionAction(a.creator).save()

        eq_(a.creator_num_solutions, 1)
        eq_(a.creator_num_answers, 3)
Пример #9
0
 def setUp(self):
     super(TopUnhelpfulArticlesCronTests, self).setUp()
     self.REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
     try:
         self.redis = redis_client('helpfulvotes')
         self.redis.flushdb()
     except RedisError:
         raise SkipTest
Пример #10
0
def init_karma():
    """Flushes the karma redis backend and populates with fresh data.

    Goes through all questions/answers/votes and save karma actions for them.
    """
    if not waffle.switch_is_active('karma'):
        return

    redis_client('karma').flushdb()

    questions = Question.objects.all()
    for chunk in chunked(questions.values_list('pk', flat=True), 200):
        _process_question_chunk.apply_async(args=[chunk])

    votes = AnswerVote.objects.all()
    for chunk in chunked(votes.values_list('pk', flat=True), 1000):
        _process_answer_vote_chunk.apply_async(args=[chunk])
Пример #11
0
 def setUp(self):
     super(TopUnhelpfulArticlesCronTests, self).setUp()
     self.REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
     try:
         self.redis = redis_client('helpfulvotes')
         self.redis.flushdb()
     except RedisError:
         raise SkipTest
Пример #12
0
 def __init__(self, redis=None):
     if not redis:
         try:
             redis = redis_client(name='karma')
         except RedisError as e:
             statsd.incr('redis.errror')
             log.error('Redis error: %s' % e)
     self.redis = redis
Пример #13
0
class UnhelpfulReadout(Readout):
    title = _lazy(u'Unhelpful Documents')

    short_title = _lazy(u'Unhelpful', 'document')
    details_link_text = _lazy(u'All unhelpful articles...')
    slug = 'unhelpful'
    column3_label = _lazy(u'Total Votes')
    column4_label = _lazy(u'Helpfulness')
    modes = []
    default_mode = None

    # This class is a namespace and doesn't get instantiated.
    key = settings.HELPFULVOTES_UNHELPFUL_KEY
    try:
        hide_readout = redis_client('helpfulvotes').llen(key) == 0
    except RedisError as e:
        log.error('Redis error: %s' % e)
        hide_readout = True

    def rows(self, max=None):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
        try:
            redis = redis_client('helpfulvotes')
            length = redis.llen(REDIS_KEY)
            max_get = max or length
            output = redis.lrange(REDIS_KEY, 0, max_get)
        except RedisError as e:
            log.error('Redis error: %s' % e)
            output = []

        data = []
        for r in output:
            row = self._format_row(r)
            if row:
                data.append(row)

        return data

    def _format_row(self, strresult):
        result = strresult.split('::')

        # Filter by product
        if self.product:
            doc = Document.objects.filter(products__in=[self.product],
                                          slug=result[5])
            if not doc.count():
                return None

        helpfulness = Markup('<span title="%+.1f%%">%.1f%%</span>' %
                             (float(result[3]) * 100, float(result[2]) * 100))
        return dict(title=result[6].decode('utf-8'),
                    url=reverse('wiki.document_revisions',
                                args=[unicode(result[5], "utf-8")],
                                locale=self.locale),
                    visits=int(float(result[1])),
                    custom=True,
                    column4_data=helpfulness)
Пример #14
0
def landing(request):
    """Customer Care Landing page."""

    # Get a redis client
    redis = None
    try:
        redis = redis_client(name='default')
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)

    contributor_stats = redis and redis.get(settings.CC_TOP_CONTRIB_CACHE_KEY)
    if contributor_stats:
        contributor_stats = json.loads(contributor_stats)
        statsd.incr('customercare.stats.contributors.hit')
    else:
        statsd.incr('customercare.stats.contributors.miss')

    twitter_user = None
    if request.twitter.authed:
        try:
            credentials = request.twitter.api.verify_credentials()
        except (TwythonError, TwythonAuthError):
            # Bad oauth token. Create a new session so user re-auths.
            request.twitter = twitter.Session()
        else:
            twitter_user = credentials['screen_name']

    yesterday = datetime.now() - timedelta(days=1)

    recent_replied_count = _count_answered_tweets(since=yesterday)

    return render(
        request, 'customercare/landing.html', {
            'contributor_stats':
            contributor_stats,
            'canned_responses':
            get_common_replies(request.LANGUAGE_CODE),
            'tweets':
            _get_tweets(locale=request.LANGUAGE_CODE,
                        filter='unanswered',
                        https=request.is_secure()),
            'authed':
            request.user.is_authenticated() and request.twitter.authed,
            'twitter_user':
            twitter_user,
            'filters':
            FILTERS,
            'filter':
            'unanswered',
            'time':
            datetime.now(),
            'goal':
            settings.CC_REPLIES_GOAL,
            'recent_replied_count':
            recent_replied_count
        })
Пример #15
0
def offline_admin(request):
    redis = redis_client('default')

    action = request.POST.get('action')
    if action == 'generate_all':
        log.info('Requested regenerating all bundles.')
        build_kb_bundles()
        messages.add_message(request, messages.SUCCESS,
                             'Bundles regenerated!')
    elif action == 'delete_all':
        if redis.delete(*redis.keys('osumo:*')):
            messages.add_message(request, messages.SUCCESS,
                                 'Deleted all bundles!')
        else:
            messages.add_message(request, messages.ERROR,
                                 'Bundle deleting failed.')

    keys = redis.keys('osumo:*')
    bundles = []
    totalsize = 0
    for key in keys:
        bundle = {}
        # reverse operation to redis_bundle_name, the schema is:
        # osumo:locale~product
        tmp = key.split(':')[1].split('~')

        locale, bundle['product'] = tuple(tmp)
        # to get the non .lower()'ed version.
        locale = settings.LANGUAGE_URL_MAP[locale]
        bundle['locale'] = settings.LOCALES[locale].english

        bundle['hash'] = redis.hget(key, 'hash')

        updated = redis.hget(key, 'updated')
        if updated is not None:
            updated = float(redis.hget(key, 'updated'))
            updated = datetime.datetime.fromtimestamp(updated)
            bundle['updated'] = updated.strftime('%Y-%m-%d %H:%M:%S')
        else:
            bundle['updated'] = 'N/A'

        bundle['size'] = round(len(redis.hget(key, 'bundle')) / 1024.0, 2)
        totalsize += bundle['size']

        bundles.append(bundle)

    # Sorting by by locale and then product
    bundles.sort(key=lambda x: x['locale'] + x['product'])

    totalsize /= 1024
    totalsize = round(totalsize, 2)

    return render(request,
                  'admin/offline.html',
                  {'title': 'Offline SUMO Administration',
                   'bundles': bundles,
                   'totalsize': totalsize})
Пример #16
0
def offline_admin(request):
    redis = redis_client('default')

    action = request.POST.get('action')
    if action == 'generate_all':
        log.info('Requested regenerating all bundles.')
        build_kb_bundles()
        messages.add_message(request, messages.SUCCESS, 'Bundles regenerated!')
    elif action == 'delete_all':
        if redis.delete(*redis.keys('osumo:*')):
            messages.add_message(request, messages.SUCCESS,
                                 'Deleted all bundles!')
        else:
            messages.add_message(request, messages.ERROR,
                                 'Bundle deleting failed.')

    keys = redis.keys('osumo:*')
    bundles = []
    totalsize = 0
    for key in keys:
        bundle = {}
        # reverse operation to redis_bundle_name, the schema is:
        # osumo:locale~product
        tmp = key.split(':')[1].split('~')

        locale, bundle['product'] = tuple(tmp)
        # to get the non .lower()'ed version.
        locale = settings.LANGUAGE_URL_MAP[locale]
        bundle['locale'] = settings.LOCALES[locale].english

        bundle['hash'] = redis.hget(key, 'hash')

        updated = redis.hget(key, 'updated')
        if updated is not None:
            updated = float(redis.hget(key, 'updated'))
            updated = datetime.datetime.fromtimestamp(updated)
            bundle['updated'] = updated.strftime('%Y-%m-%d %H:%M:%S')
        else:
            bundle['updated'] = 'N/A'

        bundle['size'] = round(len(redis.hget(key, 'bundle')) / 1024.0, 2)
        totalsize += bundle['size']

        bundles.append(bundle)

    # Sorting by by locale and then product
    bundles.sort(key=lambda x: x['locale'] + x['product'])

    totalsize /= 1024
    totalsize = round(totalsize, 2)

    return render(
        request, 'admin/offline.html', {
            'title': 'Offline SUMO Administration',
            'bundles': bundles,
            'totalsize': totalsize
        })
Пример #17
0
def reindex_with_scoreboard(mapping_type_names):
    """Reindex all instances of a given mapping type with celery tasks.

    This will use Redis to keep track of outstanding tasks so nothing
    gets screwed up by two jobs running at once.
    """
    # TODO: If this gets fux0rd, then it's possible this could be
    # non-zero and we really want to just ignore it. Need the ability
    # to ignore it.
    try:
        client = redis_client('default')
        val = client.get(OUTSTANDING_INDEX_CHUNKS)
        if val is not None and int(val) > 0:
            raise ReindexError('There are %s outstanding chunks.' % val)

        # We don't know how many chunks we're building, but we do want
        # to make sure another reindex request doesn't slide in here
        # and kick off a bunch of chunks.
        #
        # There is a race condition here.
        client.set(OUTSTANDING_INDEX_CHUNKS, 1)
    except RedisError:
        log.warning('Redis not running. Can not check if there are '
                    'outstanding tasks.')

    batch_id = create_batch_id()

    # Break up all the things we want to index into chunks. This
    # chunkifies by class then by chunk size.
    chunks = []
    for cls, indexable in get_indexable(mapping_types=mapping_type_names):
        chunks.extend((cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))

    chunks_count = len(chunks)

    try:
        client = redis_client('default')
        client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
    except RedisError:
        log.warning('Redis not running. Can\'t denote outstanding tasks.')

    for chunk in chunks:
        index = chunk[0].get_index()
        index_chunk_task.delay(index, batch_id, chunk)
Пример #18
0
def landing(request):
    """Customer Care Landing page."""

    # Get a redis client
    redis = None
    try:
        redis = redis_client(name="default")
    except RedisError as e:
        log.error("Redis error: %s" % e)

    contributor_stats = redis and redis.get(settings.CC_TOP_CONTRIB_CACHE_KEY)
    if contributor_stats:
        contributor_stats = json.loads(contributor_stats)

    twitter_user = None
    if request.twitter.authed:
        try:
            credentials = request.twitter.api.verify_credentials()
        except (TwythonError, TwythonAuthError):
            # Bad oauth token. Create a new session so user re-auths.
            request.twitter = twitter.Session()
        else:
            twitter_user = credentials["screen_name"]

    yesterday = datetime.now() - timedelta(days=1)

    recent_replied_count = _count_answered_tweets(since=yesterday)

    return render(
        request,
        "customercare/landing.html",
        {
            "contributor_stats":
            contributor_stats,
            "canned_responses":
            get_common_replies(request.LANGUAGE_CODE),
            "tweets":
            _get_tweets(locale=request.LANGUAGE_CODE,
                        filter="unanswered",
                        https=request.is_secure()),
            "authed":
            request.user.is_authenticated and request.twitter.authed,
            "twitter_user":
            twitter_user,
            "filters":
            FILTERS,
            "filter":
            "unanswered",
            "time":
            datetime.now(),
            "goal":
            settings.CC_REPLIES_GOAL,
            "recent_replied_count":
            recent_replied_count,
        },
    )
Пример #19
0
    def handle(self, **options):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY

        old_formatted = _get_old_unhelpful()
        final = _get_current_unhelpful(old_formatted)

        if final == {}:
            return

        def _mean(vals):
            """Argument: List of floats"""
            if len(vals) == 0:
                return None
            return sum(vals) / len(vals)

        def _bayes_avg(C, m, R, v):
            # Bayesian Average
            # C = mean vote, v = number of votes,
            # R = mean rating, m = minimum votes to list in topranked
            return (C * m + R * v) / (m + v)

        mean_perc = _mean(
            [float(final[key]['currperc']) for key in final.keys()])
        mean_total = _mean(
            [float(final[key]['total']) for key in final.keys()])

        #  TODO: Make this into namedtuples
        sorted_final = [(key, final[key]['total'], final[key]['currperc'],
                         final[key]['diffperc'],
                         _bayes_avg(mean_perc, mean_total,
                                    final[key]['currperc'],
                                    final[key]['total']))
                        for key in final.keys()]
        sorted_final.sort(key=lambda entry: entry[4])  # Sort by Bayesian Avg

        redis = redis_client('helpfulvotes')

        redis.delete(REDIS_KEY)

        max_total = max([b[1] for b in sorted_final])

        for entry in sorted_final:
            doc = Document.objects.get(pk=entry[0])
            redis.rpush(
                REDIS_KEY,
                (
                    u'%s::%s::%s::%s::%s::%s::%s' % (
                        entry[0],  # Document ID
                        entry[1],  # Total Votes
                        entry[2],  # Current Percentage
                        entry[3],  # Difference in Percentage
                        1 - (entry[1] / max_total),  # Graph Color
                        doc.slug,  # Document slug
                        doc.title,  # Document title
                    )))
Пример #20
0
def _process_answer_vote_chunk(data):
    """Save karma data for a chunk of answer votes."""
    redis = redis_client(name='karma')
    v_qs = AnswerVote.objects.select_related('answer')
    for vote in v_qs.filter(pk__in=data):
        if vote.helpful:
            action_class = AnswerMarkedHelpfulAction
        else:
            action_class = AnswerMarkedNotHelpfulAction
        action_class(vote.answer.creator_id, vote.created).save(async=False,
                                                                redis=redis)
Пример #21
0
def _process_answer_vote_chunk(data):
    """Save karma data for a chunk of answer votes."""
    redis = redis_client(name='karma')
    v_qs = AnswerVote.objects.select_related('answer')
    for vote in v_qs.filter(pk__in=data):
        if vote.helpful:
            action_class = AnswerMarkedHelpfulAction
        else:
            action_class = AnswerMarkedNotHelpfulAction
        action_class(vote.answer.creator_id, vote.created).save(async=False,
                                                                redis=redis)
Пример #22
0
    def rows(self, max=None):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
        try:
            redis = redis_client('helpfulvotes')
            length = redis.llen(REDIS_KEY)
            max_get = max or length
            output = redis.lrange(REDIS_KEY, 0, max_get)
        except RedisError as e:
            log.error('Redis error: %s' % e)
            output = []

        return [self._format_row(r) for r in output]
Пример #23
0
def cache_most_unhelpful_kb_articles():
    """Calculate and save the most unhelpful KB articles in the past month."""

    REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY

    old_formatted = _get_old_unhelpful()
    final = _get_current_unhelpful(old_formatted)

    if final == {}:
        return

    def _mean(vals):
        """Argument: List of floats"""
        if len(vals) == 0:
            return None
        return sum(vals) / len(vals)

    def _bayes_avg(C, m, R, v):
        # Bayesian Average
        # C = mean vote, v = number of votes,
        # R = mean rating, m = minimum votes to list in topranked
        return (C * m + R * v) / (m + v)

    mean_perc = _mean([float(final[key]['currperc']) for key in final.keys()])
    mean_total = _mean([float(final[key]['total']) for key in final.keys()])

    #  TODO: Make this into namedtuples
    sorted_final = [(key,
                     final[key]['total'],
                     final[key]['currperc'],
                     final[key]['diffperc'],
                     _bayes_avg(mean_perc, mean_total,
                                final[key]['currperc'],
                                final[key]['total']))
                    for key in final.keys()]
    sorted_final.sort(key=lambda entry: entry[4])  # Sort by Bayesian Avg

    redis = redis_client('helpfulvotes')

    redis.delete(REDIS_KEY)

    max_total = max([b[1] for b in sorted_final])

    for entry in sorted_final:
        doc = Document.objects.get(pk=entry[0])
        redis.rpush(REDIS_KEY, (u'%s::%s::%s::%s::%s::%s::%s' %
                                  (entry[0],  # Document ID
                                   entry[1],  # Total Votes
                                   entry[2],  # Current Percentage
                                   entry[3],  # Difference in Percentage
                                   1 - (entry[1] / max_total),  # Graph Color
                                   doc.slug,  # Document slug
                                   doc.title)))  # Document title
Пример #24
0
    def rows(self, max=None):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
        try:
            redis = redis_client('helpfulvotes')
            length = redis.llen(REDIS_KEY)
            max_get = max or length
            output = redis.lrange(REDIS_KEY, 0, max_get)
        except RedisError as e:
            log.error('Redis error: %s' % e)
            output = []

        return [self._format_row(r) for r in output]
Пример #25
0
def _document_lock_check(document_id):
    """Check for a lock on a document.

    Returns the username of the user that has the page locked, or ``None`` if
    no user has a lock.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        return redis.get(key)
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return None
Пример #26
0
def _document_lock_check(document_id):
    """Check for a lock on a document.

    Returns the username of the user that has the page locked, or ``None`` if
    no user has a lock.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        return redis.get(key)
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return None
Пример #27
0
def handle_reset(request):
    """Resets the redis scoreboard we use

    Why? The reason you'd want to reset it is if the system gets
    itself into a hosed state where the redis scoreboard says there
    are outstanding tasks, but there aren't. If you enter that state,
    this lets you reset the scoreboard.
    """
    try:
        client = redis_client("default")
        client.set(OUTSTANDING_INDEX_CHUNKS, 0)
    except RedisError:
        log.warning("Redis not running. Can not check if there are " "outstanding tasks.")
    return HttpResponseRedirect(request.path)
Пример #28
0
def index_chunk_task(write_index, batch_id, chunk):
    """Index a chunk of things.

    :arg write_index: the name of the index to index to
    :arg batch_id: the name for the batch this chunk belongs to
    :arg chunk: a (class, id_list) of things to index
    """
    # Need to import Record here to prevent circular import
    from kitsune.search.models import Record

    cls, id_list = chunk

    task_name = '{0} {1} -> {2}'.format(cls.get_mapping_type_name(),
                                        id_list[0], id_list[-1])

    rec = Record.objects.create(
        starttime=datetime.datetime.now(),
        text=u'Batch: {0} Task: {1}: Reindexing into {2}'.format(
            batch_id, task_name, write_index))

    try:
        # Pin to master db to avoid replication lag issues and stale
        # data.
        pin_this_thread()

        index_chunk(cls, id_list, reraise=True)

    except Exception:
        rec.text = u'{0}: Errored out {1} {2}'.format(
            rec.text, sys.exc_type,
            sys.exc_value)[:255]  # Truncate at 255 chars.

        log.exception('Error while indexing a chunk')

        # Some exceptions aren't pickleable and we need this to throw
        # things that are pickleable.
        raise IndexingTaskError()

    finally:
        unpin_this_thread()
        rec.endtime = datetime.datetime.now()
        rec.save()

        try:
            client = redis_client('default')
            client.decr(OUTSTANDING_INDEX_CHUNKS, 1)
        except RedisError:
            # If Redis isn't running, then we just log that the task
            # was completed.
            log.info('Index task %s completed.', task_name)
Пример #29
0
def index_chunk_task(write_index, batch_id, chunk):
    """Index a chunk of things.

    :arg write_index: the name of the index to index to
    :arg batch_id: the name for the batch this chunk belongs to
    :arg chunk: a (class, id_list) of things to index
    """
    # Need to import Record here to prevent circular import
    from kitsune.search.models import Record

    cls, id_list = chunk

    task_name = '{0} {1} -> {2}'.format(
        cls.get_mapping_type_name(), id_list[0], id_list[-1])

    rec = Record.objects.create(
        starttime=datetime.datetime.now(),
        text=u'Batch: {0} Task: {1}: Reindexing into {2}'.format(
            batch_id, task_name, write_index))

    try:
        # Pin to master db to avoid replication lag issues and stale
        # data.
        pin_this_thread()

        index_chunk(cls, id_list, reraise=True)

    except Exception:
        rec.text = u'{0}: Errored out {1} {2}'.format(
            rec.text, sys.exc_type, sys.exc_value)[:255]  # Truncate at 255 chars.

        log.exception('Error while indexing a chunk')

        # Some exceptions aren't pickleable and we need this to throw
        # things that are pickleable.
        raise IndexingTaskError()

    finally:
        unpin_this_thread()
        rec.endtime = datetime.datetime.now()
        rec.save()

        try:
            client = redis_client('default')
            client.decr(OUTSTANDING_INDEX_CHUNKS, 1)
        except RedisError:
            # If Redis isn't running, then we just log that the task
            # was completed.
            log.info('Index task %s completed.', task_name)
Пример #30
0
    def test_stored_in_redis(self):
        key = settings.CC_TOP_CONTRIB_CACHE_KEY
        try:
            redis = redis_client(name='default')
            # Other tests are lame and don't clean up after themselves.
            # This also verifies that Redis is alive and well.
            redis.delete(key)
        except RedisError:
            raise SkipTest

        get_customercare_stats()

        blob = redis.get(key)
        stats = json.loads(blob)
        eq_(len(stats), 2)
Пример #31
0
    def test_stored_in_redis(self):
        key = settings.CC_TOP_CONTRIB_CACHE_KEY
        try:
            redis = redis_client(name='default')
            # Other tests are lame and don't clean up after themselves.
            # This also verifies that Redis is alive and well.
            redis.delete(key)
        except RedisError:
            raise SkipTest

        get_customercare_stats()

        blob = redis.get(key)
        stats = json.loads(blob)
        eq_(len(stats), 2)
Пример #32
0
def handle_reset(request):
    """Resets the redis scoreboard we use

    Why? The reason you'd want to reset it is if the system gets
    itself into a hosed state where the redis scoreboard says there
    are outstanding tasks, but there aren't. If you enter that state,
    this lets you reset the scoreboard.
    """
    try:
        client = redis_client('default')
        client.set(OUTSTANDING_INDEX_CHUNKS, 0)
    except RedisError:
        log.warning('Redis not running. Can not check if there are '
                    'outstanding tasks.')
    return HttpResponseRedirect(request.path)
Пример #33
0
def _document_lock_steal(document_id, user_name, expire_time=60 * 15):
    """Lock a document for a user.

    Note that this does not check if the page is already locked, and simply
    sets the lock on the page.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        it_worked = redis.set(key, user_name)
        redis.expire(key, expire_time)
        return it_worked
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return False
Пример #34
0
def _document_lock_steal(document_id, user_name, expire_time=60 * 15):
    """Lock a document for a user.

    Note that this does not check if the page is already locked, and simply
    sets the lock on the page.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        it_worked = redis.set(key, user_name)
        redis.expire(key, expire_time)
        return it_worked
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return False
Пример #35
0
def landing(request):
    """Customer Care Landing page."""

    # Get a redis client
    redis = None
    try:
        redis = redis_client(name="default")
    except RedisError as e:
        statsd.incr("redis.errror")
        log.error("Redis error: %s" % e)

    contributor_stats = redis and redis.get(settings.CC_TOP_CONTRIB_CACHE_KEY)
    if contributor_stats:
        contributor_stats = json.loads(contributor_stats)
        statsd.incr("customercare.stats.contributors.hit")
    else:
        statsd.incr("customercare.stats.contributors.miss")

    twitter_user = None
    if request.twitter.authed:
        try:
            credentials = request.twitter.api.verify_credentials()
        except (TwythonError, TwythonAuthError):
            # Bad oauth token. Create a new session so user re-auths.
            request.twitter = twitter.Session()
        else:
            twitter_user = credentials["screen_name"]

    yesterday = datetime.now() - timedelta(days=1)

    recent_replied_count = _count_answered_tweets(since=yesterday)

    return render(
        request,
        "customercare/landing.html",
        {
            "contributor_stats": contributor_stats,
            "canned_responses": get_common_replies(request.LANGUAGE_CODE),
            "tweets": _get_tweets(locale=request.LANGUAGE_CODE, filter="unanswered", https=request.is_secure()),
            "authed": request.user.is_authenticated() and request.twitter.authed,
            "twitter_user": twitter_user,
            "filters": FILTERS,
            "filter": "unanswered",
            "goal": settings.CC_REPLIES_GOAL,
            "recent_replied_count": recent_replied_count,
        },
    )
Пример #36
0
def _process_question_chunk(data):
    """Save karma data for a chunk of questions."""
    redis = redis_client(name='karma')
    q_qs = Question.objects.select_related('solution').defer('content')
    for question in q_qs.filter(pk__in=data):
        first = True
        a_qs = question.answers.order_by('created').select_related('creator')
        for answer in a_qs.values_list('creator', 'created'):
            AnswerAction(answer[0], answer[1]).save(async=False, redis=redis)
            if first:
                FirstAnswerAction(answer[0], answer[1]).save(async=False,
                                                             redis=redis)
                first = False
        soln = question.solution
        if soln:
            SolutionAction(soln.creator, soln.created).save(async=False,
                                                            redis=redis)
Пример #37
0
def _process_question_chunk(data):
    """Save karma data for a chunk of questions."""
    redis = redis_client(name='karma')
    q_qs = Question.objects.select_related('solution').defer('content')
    for question in q_qs.filter(pk__in=data):
        first = True
        a_qs = question.answers.order_by('created').select_related('creator')
        for answer in a_qs.values_list('creator', 'created'):
            AnswerAction(answer[0], answer[1]).save(async=False, redis=redis)
            if first:
                FirstAnswerAction(answer[0], answer[1]).save(async=False,
                                                             redis=redis)
                first = False
        soln = question.solution
        if soln:
            SolutionAction(soln.creator, soln.created).save(async=False,
                                                            redis=redis)
Пример #38
0
def landing(request):
    """Customer Care Landing page."""

    # Get a redis client
    redis = None
    try:
        redis = redis_client(name='default')
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)

    contributor_stats = redis and redis.get(settings.CC_TOP_CONTRIB_CACHE_KEY)
    if contributor_stats:
        contributor_stats = json.loads(contributor_stats)
        statsd.incr('customercare.stats.contributors.hit')
    else:
        statsd.incr('customercare.stats.contributors.miss')

    twitter_user = None
    if request.twitter.authed:
        try:
            credentials = request.twitter.api.verify_credentials()
        except (TwythonError, TwythonAuthError):
            # Bad oauth token. Create a new session so user re-auths.
            request.twitter = twitter.Session()
        else:
            twitter_user = credentials['screen_name']

    yesterday = datetime.now() - timedelta(days=1)

    recent_replied_count = _count_answered_tweets(since=yesterday)

    return render(request, 'customercare/landing.html', {
        'contributor_stats': contributor_stats,
        'canned_responses': get_common_replies(request.LANGUAGE_CODE),
        'tweets': _get_tweets(locale=request.LANGUAGE_CODE,
                              filter='unanswered',
                              https=request.is_secure()),
        'authed': request.user.is_authenticated() and request.twitter.authed,
        'twitter_user': twitter_user,
        'filters': FILTERS,
        'filter': 'unanswered',
        'time': datetime.now(),
        'goal': settings.CC_REPLIES_GOAL,
        'recent_replied_count': recent_replied_count})
Пример #39
0
    def rows(self, max=None):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
        try:
            redis = redis_client('helpfulvotes')
            length = redis.llen(REDIS_KEY)
            max_get = max or length
            output = redis.lrange(REDIS_KEY, 0, max_get)
        except RedisError as e:
            log.error('Redis error: {0!s}'.format(e))
            output = []

        data = []
        for r in output:
            row = self._format_row(r)
            if row:
                data.append(row)

        return data
Пример #40
0
def redis_info(request):
    """Admin view that displays redis INFO+CONFIG output for all backends."""
    redis_info = {}
    for key in django_settings.REDIS_BACKENDS.keys():
        redis_info[key] = {}
        client = redis_client(key)
        redis_info[key]["connection"] = django_settings.REDIS_BACKENDS[key]
        try:
            cfg = client.config_get()
            redis_info[key]["config"] = [{"key": k, "value": cfg[k]} for k in sorted(cfg)]
            info = client.info()
            redis_info[key]["info"] = [{"key": k, "value": info[k]} for k in sorted(info)]
        except ConnectionError:
            redis_info[key]["down"] = True

    return render_to_response(
        "kadmin/redis.html", {"redis_info": redis_info, "title": "Redis Information"}, RequestContext(request, {})
    )
Пример #41
0
    def rows(self, max=None):
        REDIS_KEY = settings.HELPFULVOTES_UNHELPFUL_KEY
        try:
            redis = redis_client("helpfulvotes")
            length = redis.llen(REDIS_KEY)
            max_get = max or length
            output = redis.lrange(REDIS_KEY, 0, max_get)
        except RedisError as e:
            log.error("Redis error: %s" % e)
            output = []

        data = []
        for r in output:
            row = self._format_row(r)
            if row:
                data.append(row)

        return data
Пример #42
0
def redis_info(request):
    """Admin view that displays redis INFO+CONFIG output for all backends."""
    redis_info = {}
    for key in django_settings.REDIS_BACKENDS.keys():
        redis_info[key] = {}
        client = redis_client(key)
        redis_info[key]['connection'] = django_settings.REDIS_BACKENDS[key]
        try:
            cfg = client.config_get()
            redis_info[key]['config'] = [{'key': k, 'value': cfg[k]} for k in
                                         sorted(cfg)]
            info = client.info()
            redis_info[key]['info'] = [{'key': k, 'value': info[k]} for k in
                                       sorted(info)]
        except ConnectionError:
            redis_info[key]['down'] = True

    return render_to_response('kadmin/redis.html',
                              {'redis_info': redis_info,
                               'title': 'Redis Information'},
                              RequestContext(request, {}))
Пример #43
0
def build_kb_bundles(products=('firefox-os', 'firefox', 'mobile')):
    redis = redis_client('default')

    if not redis:
        raise IOError('Redis not available. Cannot generate offline bundles.')

    start_time = time.time()
    size = 0

    products = [Product.objects.get(slug=p) for p in products]
    with statsd.timer('offline.build_kb_bundles.time_elapsed'):
        for locale in settings.SUMO_LANGUAGES:
            for product in products:
                with uselocale(locale):
                    bundle = merge_bundles(bundle_for_product(product, locale))

                size += len(
                    insert_bundle_into_redis(redis, product.slug, locale,
                                             bundle)[0])

    time_taken = time.time() - start_time
    log.info('Generated all offline bundles. '
             'Size: {0}. Took {1} seconds'.format(size, time_taken))
Пример #44
0
def _document_lock_clear(document_id, user_name):
    """Remove a lock from a document.

    This would be used to indicate the given user no longer wants the page
    locked, so the lock should be cleared.

    If the `user` parameter does not match the current lock, the lock remains
    in place.

    Returns true if the lock was removed, false otherwise.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        locked_by = redis.get(key)
        if locked_by == user_name:
            return redis.delete(key)
        else:
            return False
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return False
Пример #45
0
def bundle_meta(request):
    """This view is responsible for update checking."""
    if 'locale' not in request.GET or 'product' not in request.GET:
        return HttpResponseBadRequest(BAD_REQUEST, mimetype='application/json')

    locale = request.GET['locale']
    product = request.GET['product']

    name = redis_bundle_name(locale, product)
    try:
        redis = redis_client('default')
    except RedisError:
        return HttpResponse('{"error": "no bundles available"}',
                            mimetype='application/json',
                            status=503)

    bundle_hash = redis.hget(name, 'hash')

    if bundle_hash:
        u = {'hash': bundle_hash}
        return HttpResponse(json.dumps(u), mimetype='application/json')
    else:
        return HttpResponseNotFound(NOT_FOUND, mimetype='application/json')
Пример #46
0
def _document_lock_clear(document_id, user_name):
    """Remove a lock from a document.

    This would be used to indicate the given user no longer wants the page
    locked, so the lock should be cleared.

    If the `user` parameter does not match the current lock, the lock remains
    in place.

    Returns true if the lock was removed, false otherwise.
    """
    try:
        redis = redis_client(name='default')
        key = _document_lock_key.format(id=document_id)
        locked_by = redis.get(key)
        if locked_by == user_name:
            return redis.delete(key)
        else:
            return False
    except RedisError as e:
        statsd.incr('redis.errror')
        log.error('Redis error: %s' % e)
        return False
Пример #47
0
def redis_info(request):
    """Admin view that displays redis INFO+CONFIG output for all backends."""
    redis_info = {}
    for key in list(django_settings.REDIS_BACKENDS.keys()):
        redis_info[key] = {}
        client = redis_client(key)
        redis_info[key]["connection"] = django_settings.REDIS_BACKENDS[key]
        try:
            cfg = client.config_get()
            redis_info[key]["config"] = [
                {"key": k, "value": cfg[k]} for k in sorted(cfg)
            ]
            info = client.info()
            redis_info[key]["info"] = [
                {"key": k, "value": info[k]} for k in sorted(info)
            ]
        except ConnectionError:
            redis_info[key]["down"] = True

    return render_to_response(
        "kadmin/redis.html",
        {"redis_info": redis_info, "title": "Redis Information"},
        RequestContext(request, {}),
    )
Пример #48
0
def build_kb_bundles(products=('firefox-os', 'firefox', 'mobile')):
    redis = redis_client('default')

    if not redis:
        raise IOError('Redis not available. Cannot generate offline bundles.')

    start_time = time.time()
    size = 0

    products = [Product.objects.get(slug=p) for p in products]
    with statsd.timer('offline.build_kb_bundles.time_elapsed'):
        for locale in settings.SUMO_LANGUAGES:
            for product in products:
                with uselocale(locale):
                    bundle = merge_bundles(bundle_for_product(product, locale))

                size += len(insert_bundle_into_redis(redis,
                                                     product.slug,
                                                     locale,
                                                     bundle)[0])

    time_taken = time.time() - start_time
    log.info('Generated all offline bundles. '
             'Size: {0}. Took {1} seconds'.format(size, time_taken))
Пример #49
0
def get_customercare_stats():
    """
    Generate customer care stats from the Replies table.

    This gets cached in Redis as a sorted list of contributors, stored as JSON.

    Example Top Contributor data:

    [
        {
            'twitter_username': '******',
            'avatar': 'http://twitter.com/path/to/the/avatar.png',
            'avatar_https': 'https://twitter.com/path/to/the/avatar.png',
            'all': 5211,
            '1m': 230,
            '1w': 33,
            '1d': 3,
        },
        { ... },
        { ... },
    ]
    """
    if settings.STAGE:
        print ('Skipped get_customercare_stats(). '
               'Set settings.STAGE to False to run it for real.')
        return

    contributor_stats = {}

    now = datetime.now()
    one_month_ago = now - timedelta(days=30)
    one_week_ago = now - timedelta(days=7)
    yesterday = now - timedelta(days=1)

    for chunk in chunked(Reply.objects.all(), 2500, Reply.objects.count()):
        for reply in chunk:
            user = reply.twitter_username
            if user not in contributor_stats:
                raw = json.loads(reply.raw_json)
                if 'from_user' in raw:  # For tweets collected using v1 API
                    user_data = raw
                else:
                    user_data = raw['user']

                contributor_stats[user] = {
                    'twitter_username': user,
                    'avatar': user_data['profile_image_url'],
                    'avatar_https': user_data['profile_image_url_https'],
                    'all': 0, '1m': 0, '1w': 0, '1d': 0,
                }
            contributor = contributor_stats[reply.twitter_username]

            contributor['all'] += 1
            if reply.created > one_month_ago:
                contributor['1m'] += 1
                if reply.created > one_week_ago:
                    contributor['1w'] += 1
                    if reply.created > yesterday:
                        contributor['1d'] += 1

    sort_key = settings.CC_TOP_CONTRIB_SORT
    limit = settings.CC_TOP_CONTRIB_LIMIT
    # Sort by whatever is in settings, break ties with 'all'
    contributor_stats = sorted(contributor_stats.values(),
                               key=lambda c: (c[sort_key], c['all']),
                               reverse=True)[:limit]

    try:
        redis = redis_client(name='default')
        key = settings.CC_TOP_CONTRIB_CACHE_KEY
        redis.set(key, json.dumps(contributor_stats))
    except RedisError as e:
        statsd.incr('redis.error')
        log.error('Redis error: %s' % e)

    return contributor_stats
Пример #50
0
    def handle(self, **options):
        """
        This gets cached in Redis as a sorted list of contributors, stored as JSON.

        Example Top Contributor data:

        [
            {
                'twitter_username': '******',
                'avatar': 'http://twitter.com/path/to/the/avatar.png',
                'avatar_https': 'https://twitter.com/path/to/the/avatar.png',
                'all': 5211,
                '1m': 230,
                '1w': 33,
                '1d': 3,
            },
            { ... },
            { ... },
        ]
        """
        if settings.STAGE:
            return

        contributor_stats = {}

        now = datetime.now()
        one_month_ago = now - timedelta(days=30)
        one_week_ago = now - timedelta(days=7)
        yesterday = now - timedelta(days=1)

        for chunk in chunked(Reply.objects.all(), 2500, Reply.objects.count()):
            for reply in chunk:
                user = reply.twitter_username
                if user not in contributor_stats:
                    raw = json.loads(reply.raw_json)
                    if "from_user" in raw:  # For tweets collected using v1 API
                        user_data = raw
                    else:
                        user_data = raw["user"]

                    contributor_stats[user] = {
                        "twitter_username": user,
                        "avatar": user_data["profile_image_url"],
                        "avatar_https": user_data["profile_image_url_https"],
                        "all": 0,
                        "1m": 0,
                        "1w": 0,
                        "1d": 0,
                    }
                contributor = contributor_stats[reply.twitter_username]

                contributor["all"] += 1
                if reply.created > one_month_ago:
                    contributor["1m"] += 1
                    if reply.created > one_week_ago:
                        contributor["1w"] += 1
                        if reply.created > yesterday:
                            contributor["1d"] += 1

        sort_key = settings.CC_TOP_CONTRIB_SORT
        limit = settings.CC_TOP_CONTRIB_LIMIT
        # Sort by whatever is in settings, break ties with 'all'
        contributor_stats = sorted(
            list(contributor_stats.values()),
            key=lambda c: (c[sort_key], c["all"]),
            reverse=True,
        )[:limit]

        try:
            redis = redis_client(name="default")
            key = settings.CC_TOP_CONTRIB_CACHE_KEY
            redis.set(key, json.dumps(contributor_stats))
        except RedisError as e:
            log.error("Redis error: %s" % e)

        return contributor_stats
Пример #51
0
    except ES_EXCEPTIONS:
        pass

    try:
        write_stats = get_doctype_stats(write_index())
    except ES_EXCEPTIONS:
        pass

    try:
        indexes = get_indexes()
        indexes.sort(key=lambda m: m[0])
    except ES_EXCEPTIONS as e:
        error_messages.append('Error: {0}'.format(repr(e)))

    try:
        client = redis_client('default')
        outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS))
    except (RedisError, TypeError):
        pass

    recent_records = Record.uncached.order_by('-starttime')[:20]

    return render(
        request, 'admin/search_maintenance.html', {
            'title': 'Search',
            'es_deets': es_deets,
            'doctype_stats': stats,
            'doctype_write_stats': write_stats,
            'indexes': indexes,
            'read_index': read_index(),
            'write_index': write_index(),
Пример #52
0
def handle_reindex(request):
    """Caculates and kicks off indexing tasks"""
    # This is truthy if the user wants us to delete and recreate
    # the index first.
    delete_index_first = bool(request.POST.get('delete_index'))

    if delete_index_first:
        # Coming from the delete form, so we reindex all models.
        mapping_types_to_index = None
    else:
        # Coming from the reindex form, so we reindex whatever we're
        # told.
        mapping_types_to_index = [
            name.replace('check_', '') for name in request.POST.keys()
            if name.startswith('check_')
        ]

    # TODO: If this gets fux0rd, then it's possible this could be
    # non-zero and we really want to just ignore it. Need the ability
    # to ignore it.
    try:
        client = redis_client('default')
        val = client.get(OUTSTANDING_INDEX_CHUNKS)
        if val is not None and int(val) > 0:
            raise ReindexError('There are %s outstanding chunks.' % val)

        # We don't know how many chunks we're building, but we do want
        # to make sure another reindex request doesn't slide in here
        # and kick off a bunch of chunks.
        #
        # There is a race condition here.
        client.set(OUTSTANDING_INDEX_CHUNKS, 1)
    except RedisError:
        log.warning('Redis not running. Can not check if there are '
                    'outstanding tasks.')

    batch_id = create_batch_id()

    # Break up all the things we want to index into chunks. This
    # chunkifies by class then by chunk size.
    chunks = []
    for cls, indexable in get_indexable(mapping_types=mapping_types_to_index):
        chunks.extend((cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))

    if delete_index_first:
        # The previous lines do a lot of work and take some time to
        # execute.  So we wait until here to wipe and rebuild the
        # index. That reduces the time that there is no index by a little.
        recreate_index()

    chunks_count = len(chunks)

    try:
        client = redis_client('default')
        client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
    except RedisError:
        log.warning('Redis not running. Can\'t denote outstanding tasks.')

    for chunk in chunks:
        index_chunk_task.delay(write_index(), batch_id, chunk)

    return HttpResponseRedirect(request.path)
Пример #53
0
def get_customercare_stats():
    """
    Generate customer care stats from the Replies table.

    This gets cached in Redis as a sorted list of contributors, stored as JSON.

    Example Top Contributor data:

    [
        {
            'twitter_username': '******',
            'avatar': 'http://twitter.com/path/to/the/avatar.png',
            'avatar_https': 'https://twitter.com/path/to/the/avatar.png',
            'all': 5211,
            '1m': 230,
            '1w': 33,
            '1d': 3,
        },
        { ... },
        { ... },
    ]
    """
    if settings.STAGE:
        return

    contributor_stats = {}

    now = datetime.now()
    one_month_ago = now - timedelta(days=30)
    one_week_ago = now - timedelta(days=7)
    yesterday = now - timedelta(days=1)

    for chunk in chunked(Reply.objects.all(), 2500, Reply.objects.count()):
        for reply in chunk:
            user = reply.twitter_username
            if user not in contributor_stats:
                raw = json.loads(reply.raw_json)
                if 'from_user' in raw:  # For tweets collected using v1 API
                    user_data = raw
                else:
                    user_data = raw['user']

                contributor_stats[user] = {
                    'twitter_username': user,
                    'avatar': user_data['profile_image_url'],
                    'avatar_https': user_data['profile_image_url_https'],
                    'all': 0,
                    '1m': 0,
                    '1w': 0,
                    '1d': 0,
                }
            contributor = contributor_stats[reply.twitter_username]

            contributor['all'] += 1
            if reply.created > one_month_ago:
                contributor['1m'] += 1
                if reply.created > one_week_ago:
                    contributor['1w'] += 1
                    if reply.created > yesterday:
                        contributor['1d'] += 1

    sort_key = settings.CC_TOP_CONTRIB_SORT
    limit = settings.CC_TOP_CONTRIB_LIMIT
    # Sort by whatever is in settings, break ties with 'all'
    contributor_stats = sorted(contributor_stats.values(),
                               key=lambda c: (c[sort_key], c['all']),
                               reverse=True)[:limit]

    try:
        redis = redis_client(name='default')
        key = settings.CC_TOP_CONTRIB_CACHE_KEY
        redis.set(key, json.dumps(contributor_stats))
    except RedisError as e:
        statsd.incr('redis.error')
        log.error('Redis error: %s' % e)

    return contributor_stats
Пример #54
0
def handle_reindex(request):
    """Caculates and kicks off indexing tasks"""
    # This is truthy if the user wants us to delete and recreate
    # the index first.
    delete_index_first = bool(request.POST.get('delete_index'))

    if delete_index_first:
        # Coming from the delete form, so we reindex all models.
        mapping_types_to_index = None
    else:
        # Coming from the reindex form, so we reindex whatever we're
        # told.
        mapping_types_to_index = [name.replace('check_', '')
                                  for name in request.POST.keys()
                                  if name.startswith('check_')]

    # TODO: If this gets fux0rd, then it's possible this could be
    # non-zero and we really want to just ignore it. Need the ability
    # to ignore it.
    try:
        client = redis_client('default')
        val = client.get(OUTSTANDING_INDEX_CHUNKS)
        if val is not None and int(val) > 0:
            raise ReindexError('There are %s outstanding chunks.' % val)

        # We don't know how many chunks we're building, but we do want
        # to make sure another reindex request doesn't slide in here
        # and kick off a bunch of chunks.
        #
        # There is a race condition here.
        client.set(OUTSTANDING_INDEX_CHUNKS, 1)
    except RedisError:
        log.warning('Redis not running. Can not check if there are '
                    'outstanding tasks.')

    batch_id = create_batch_id()

    # Break up all the things we want to index into chunks. This
    # chunkifies by class then by chunk size.
    chunks = []
    for cls, indexable in get_indexable(mapping_types=mapping_types_to_index):
        chunks.extend(
            (cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))

    if delete_index_first:
        # The previous lines do a lot of work and take some time to
        # execute.  So we wait until here to wipe and rebuild the
        # index. That reduces the time that there is no index by a little.
        recreate_index()

    chunks_count = len(chunks)

    try:
        client = redis_client('default')
        client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
    except RedisError:
        log.warning('Redis not running. Can\'t denote outstanding tasks.')

    for chunk in chunks:
        index_chunk_task.delay(write_index(), batch_id, chunk)

    return HttpResponseRedirect(request.path)
Пример #55
0
def search(request):
    """Render the admin view containing search tools"""
    if not request.user.has_perm('search.reindex'):
        raise PermissionDenied

    error_messages = []
    stats = {}

    if 'reset' in request.POST:
        try:
            return handle_reset(request)
        except ReindexError as e:
            error_messages.append(u'Error: %s' % e.message)

    if 'reindex' in request.POST:
        try:
            return handle_reindex(request)
        except ReindexError as e:
            error_messages.append(u'Error: %s' % e.message)

    if 'recreate_index' in request.POST:
        try:
            return handle_recreate_index(request)
        except ReindexError as e:
            error_messages.append(u'Error: %s' % e.message)

    if 'delete_index' in request.POST:
        try:
            return handle_delete(request)
        except DeleteError as e:
            error_messages.append(u'Error: %s' % e.message)
        except ES_EXCEPTIONS as e:
            error_messages.append('Error: {0}'.format(repr(e)))

    stats = None
    write_stats = None
    es_deets = None
    indexes = []
    outstanding_chunks = None

    try:
        # TODO: SUMO has a single ES_URL and that's the ZLB and does
        # the balancing. If that ever changes and we have multiple
        # ES_URLs, then this should get fixed.
        es_deets = requests.get(settings.ES_URLS[0]).json()
    except requests.exceptions.RequestException:
        pass

    stats = {}
    for index in all_read_indexes():
        try:
            stats[index] = get_doctype_stats(index)
        except ES_EXCEPTIONS:
            stats[index] = None

    write_stats = {}
    for index in all_write_indexes():
        try:
            write_stats[index] = get_doctype_stats(index)
        except ES_EXCEPTIONS:
            write_stats[index] = None

    try:
        indexes = get_indexes()
        indexes.sort(key=lambda m: m[0])
    except ES_EXCEPTIONS as e:
        error_messages.append('Error: {0}'.format(repr(e)))

    try:
        client = redis_client('default')
        outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS))
    except (RedisError, TypeError):
        pass

    recent_records = Record.uncached.order_by('-starttime')[:100]

    outstanding_records = (Record.uncached.filter(
        endtime__isnull=True).order_by('-starttime'))

    index_groups = set(settings.ES_INDEXES.keys())
    index_groups |= set(settings.ES_WRITE_INDEXES.keys())

    index_group_data = [[group, read_index(group),
                         write_index(group)] for group in index_groups]

    return render(
        request, 'admin/search_maintenance.html', {
            'title': 'Search',
            'es_deets': es_deets,
            'doctype_stats': stats,
            'doctype_write_stats': write_stats,
            'indexes': indexes,
            'index_groups': index_groups,
            'index_group_data': index_group_data,
            'read_indexes': all_read_indexes,
            'write_indexes': all_write_indexes,
            'error_messages': error_messages,
            'recent_records': recent_records,
            'outstanding_records': outstanding_records,
            'outstanding_chunks': outstanding_chunks,
            'now': datetime.now(),
            'read_index': read_index,
            'write_index': write_index,
        })
Пример #56
0
 def setUp(self):
     super(OfflineViewTests, self).setUp()
     try:
         redis_client('default').flushdb()
     except RedisError:
         raise SkipTest
Пример #57
0
def monitor(request):
    """View for services monitor."""
    status = {}

    # Note: To add a new component to the services monitor, do your
    # testing and then add a name -> list of output tuples map to
    # status.

    # Check memcached.
    memcache_results = []
    try:
        for cache_name, cache_props in settings.CACHES.items():
            result = True
            backend = cache_props['BACKEND']
            location = cache_props['LOCATION']

            # LOCATION can be a string or a list of strings
            if isinstance(location, basestring):
                location = location.split(';')

            if 'memcache' in backend:
                for loc in location:
                    # TODO: this doesn't handle unix: variant
                    ip, port = loc.split(':')
                    result = test_memcached(ip, int(port))
                    memcache_results.append(
                        (INFO, '%s:%s %s' % (ip, port, result)))

        if not memcache_results:
            memcache_results.append((ERROR, 'memcache is not configured.'))

        elif len(memcache_results) < 2:
            memcache_results.append(
                (ERROR, ('You should have at least 2 memcache servers. '
                         'You have %s.' % len(memcache_results))))

        else:
            memcache_results.append((INFO, 'memcached servers look good.'))

    except Exception as exc:
        memcache_results.append(
            (ERROR, 'Exception while looking at memcached: %s' % str(exc)))

    status['memcached'] = memcache_results

    # Check Libraries and versions
    libraries_results = []
    try:
        Image.new('RGB', (16, 16)).save(StringIO.StringIO(), 'JPEG')
        libraries_results.append((INFO, 'PIL+JPEG: Got it!'))
    except Exception as exc:
        libraries_results.append((ERROR, 'PIL+JPEG: Probably missing: '
                                  'Failed to create a jpeg image: %s' % exc))

    status['libraries'] = libraries_results

    # Check file paths.
    msg = 'We want read + write.'
    filepaths = (
        (settings.USER_AVATAR_PATH, os.R_OK | os.W_OK, msg),
        (settings.IMAGE_UPLOAD_PATH, os.R_OK | os.W_OK, msg),
        (settings.THUMBNAIL_UPLOAD_PATH, os.R_OK | os.W_OK, msg),
        (settings.GALLERY_IMAGE_PATH, os.R_OK | os.W_OK, msg),
        (settings.GALLERY_IMAGE_THUMBNAIL_PATH, os.R_OK | os.W_OK, msg),
        (settings.GALLERY_VIDEO_PATH, os.R_OK | os.W_OK, msg),
        (settings.GALLERY_VIDEO_THUMBNAIL_PATH, os.R_OK | os.W_OK, msg),
        (settings.GROUP_AVATAR_PATH, os.R_OK | os.W_OK, msg),
    )

    filepath_results = []
    for path, perms, notes in filepaths:
        path = os.path.join(settings.MEDIA_ROOT, path)
        path_exists = os.path.isdir(path)
        path_perms = os.access(path, perms)

        if path_exists and path_perms:
            filepath_results.append(
                (INFO,
                 '%s: %s %s %s' % (path, path_exists, path_perms, notes)))

    status['filepaths'] = filepath_results

    # Check RabbitMQ.
    rabbitmq_results = []
    try:
        rabbit_conn = establish_connection(connect_timeout=5)
        rabbit_conn.connect()
        rabbitmq_results.append((INFO, 'Successfully connected to RabbitMQ.'))
    except (socket.error, IOError) as exc:
        rabbitmq_results.append(
            (ERROR, 'Error connecting to RabbitMQ: %s' % str(exc)))

    except Exception as exc:
        rabbitmq_results.append(
            (ERROR, 'Exception while looking at RabbitMQ: %s' % str(exc)))

    status['RabbitMQ'] = rabbitmq_results

    # Check ES.
    es_results = []
    try:
        es_utils.get_doctype_stats(es_utils.all_read_indexes()[0])
        es_results.append(
            (INFO, ('Successfully connected to ElasticSearch and index '
                    'exists.')))

    except es_utils.ES_EXCEPTIONS as exc:
        es_results.append((ERROR, 'ElasticSearch problem: %s' % str(exc)))

    except Exception as exc:
        es_results.append(
            (ERROR, 'Exception while looking at ElasticSearch: %s' % str(exc)))

    status['ElasticSearch'] = es_results

    # Check Celery.
    # start = time.time()
    # pong = celery.task.ping()
    # rabbit_results = r = {'duration': time.time() - start}
    # status_summary['rabbit'] = pong == 'pong' and r['duration'] < 1

    # Check Redis.
    redis_results = []
    if hasattr(settings, 'REDIS_BACKENDS'):
        for backend in settings.REDIS_BACKENDS:
            try:
                redis_client(backend)
                redis_results.append((INFO, '%s: Pass!' % backend))
            except RedisError:
                redis_results.append((ERROR, '%s: Fail!' % backend))
    status['Redis'] = redis_results

    status_code = 200

    status_summary = {}
    for component, output in status.items():
        if ERROR in [item[0] for item in output]:
            status_code = 500
            status_summary[component] = False
        else:
            status_summary[component] = True

    return render(request,
                  'services/monitor.html', {
                      'component_status': status,
                      'status_summary': status_summary
                  },
                  status=status_code)
Пример #58
0
from django.conf import settings

from kitsune.customercare.cron import get_customercare_stats
from kitsune.sumo.redis_utils import redis_client, RedisError

try:
    print "Removing old data"
    redis = redis_client(name='default')
    redis.delete(settings.CC_TOP_CONTRIB_CACHE_KEY)

    print "Collecting new data."
    get_customercare_stats()

    print "Done"
except RedisError:
    print "This migration needs Redis to be done."