def update_visitors_metric(): """Get new visitor data from webtrends and save.""" # Start updating the day after the last updated. latest_metric = _get_latest_metric(VISITORS_METRIC_CODE) if latest_metric is not None: latest_metric_date = latest_metric.start else: latest_metric_date = date(2011, 01, 01) start = latest_metric_date + timedelta(days=1) # Collect up until yesterday end = date.today() - timedelta(days=1) # Get the visitor data from webtrends. visitors = Webtrends.visits(start, end) # Create the metrics. metric_kind = MetricKind.objects.get(code=VISITORS_METRIC_CODE) for date_str, visits in visitors.items(): day = datetime.strptime(date_str, '%Y-%m-%d').date() Metric.objects.create( kind=metric_kind, start=day, end=day + timedelta(days=1), value=visits)
def update_visitors_metric(): """Get new visitor data from webtrends and save.""" try: # Get the latest metric value. last_metric = Metric.objects.filter( kind__code=VISITORS_METRIC_CODE).order_by('-start')[0] # Start updating the day after the last updated. start = last_metric.start + timedelta(days=1) except IndexError: # There are no metrics yet, start from 2011-01-01 start = date(2011, 01, 01) # Collect up until yesterday end = date.today() - timedelta(days=1) # Get the visitor data from webtrends. visitors = Webtrends.visits(start, end) # Create the metrics. metric_kind = MetricKind.objects.get(code=VISITORS_METRIC_CODE) for date_str, visits in visitors.items(): day = datetime.strptime(date_str,"%Y-%m-%d").date() Metric.objects.create( kind=metric_kind, start=day, end=day + timedelta(days=1), value=visits)
def test_visits(self, key_metrics): """Test Webtrends.visits().""" key_metrics.return_value = KEY_METRICS_JSON_RESPONSE visits = Webtrends.visits(date(2012, 01, 01), date(2012, 01, 07)) eq_(7, len(visits)) eq_(495974, visits['2012-01-01']) eq_(529301, visits['2012-01-07'])
def test_visits_by_locale(self, request): """Test Webtrends.visits_by_locale().""" request.return_value = L10N_METRICS_JSON_RESPONSE visits = Webtrends.visits_by_locale(date(2012, 02, 11), date(2012, 03, 11)) eq_(77, len(visits)) eq_(7561779.0, visits['en-US']) eq_(815609.0, visits['es'])
def update_l10n_metric(): """Calculate new l10n coverage numbers and save. L10n coverage is a measure of the amount of translations that are up to date, weighted by the number of visits for each locale. The "algorithm" (see Bug 727084): SUMO visits = Total non-en-US SUMO visits for the last 3 months; Total translated = 0; For each locale { Total up to date = Total up to date + ((Number of up to date articles from the en-US top 50 visited)/50 ) * (Visitors for that locale / SUMO visits)); } An up to date article is any of the following: * An en-US article (by definition it is always up to date) * The latest en-US revision has been translated * There are only new revisions with TYPO_SIGNIFICANCE not translated * There is only one revision of MEDIUM_SIGNIFICANCE not translated """ # Get the top 60 visited articles. We will only use the top 50 # but a handful aren't localizable so we get some extras. top_60_docs = _get_top_docs(60) # Get the visits to each locale in the last 30 days. end = date.today() - timedelta(days=1) # yesterday start = end - timedelta(days=30) locale_visits = Webtrends.visits_by_locale(start, end) # Total visits. total_visits = sum(locale_visits.itervalues()) # Calculate the coverage. coverage = 0 for locale, visits in locale_visits.iteritems(): if locale == settings.WIKI_DEFAULT_LANGUAGE: num_docs = MAX_DOCS_UP_TO_DATE up_to_date_docs = MAX_DOCS_UP_TO_DATE else: up_to_date_docs, num_docs = _get_up_to_date_count( top_60_docs, locale) if num_docs and total_visits: coverage += ((float(up_to_date_docs) / num_docs) * (float(visits) / total_visits)) # Save the value to Metric table. metric_kind = MetricKind.objects.get(code=L10N_METRIC_CODE) day = date.today() Metric.objects.create( kind=metric_kind, start=day, end=day + timedelta(days=1), value=int(coverage * 100)) # Store as a % int.
def update_visitors_metric(): """Get new visitor data from webtrends and save.""" # Start updating the day after the last updated. latest_metric = _get_latest_metric(VISITORS_METRIC_CODE) if latest_metric is not None: latest_metric_date = latest_metric.start else: latest_metric_date = date(2011, 01, 01) start = latest_metric_date + timedelta(days=1) # Collect up until yesterday end = date.today() - timedelta(days=1) # Get the visitor data from webtrends. visitors = Webtrends.visits(start, end) # Create the metrics. metric_kind = MetricKind.objects.get(code=VISITORS_METRIC_CODE) for date_str, visits in visitors.items(): day = datetime.strptime(date_str, '%Y-%m-%d').date() Metric.objects.create(kind=metric_kind, start=day, end=day + timedelta(days=1), value=visits)
def update_l10n_metric(): """Calculate new l10n coverage numbers and save. L10n coverage is a measure of the amount of translations that are up to date, weighted by the number of visits for each locale. The "algorithm" (see Bug 727084): SUMO visits = Total non-en-US SUMO visits for the last 3 months; Total translated = 0; For each locale different to en-US { Total translated = Total Translated + ((Number of updated articles from the en-US top 50 visited)/50 ) * (Visitors for that locale / SUMO visits)); } """ # Get the top 60 visited articles. We will only use the top 50 # but a handful aren't localizable so we get some extras. top_60_docs = _get_top_docs(60) # Get the visits to each locale in the last 90 days. end = date.today() - timedelta(days=1) # yesterday start = end - timedelta(days=90) locale_visits = Webtrends.visits_by_locale(start, end) # Discard en-US. locale_visits.pop('en-US') # Total non en-US visits. total_visits = sum(locale_visits.itervalues()) # Calculate the coverage. coverage = 0 for locale, visits in locale_visits.iteritems(): up_to_date_docs = 0 num_docs = 0 for doc in top_60_docs: if num_docs == 50: # Stop at 50 documents. break if not doc.is_localizable: # Skip non localizable documents. continue num_docs += 1 cur_rev_id = doc.latest_localizable_revision_id translation = doc.translated_to(locale) if (translation and translation.current_revision_id and translation.current_revision_id >= cur_rev_id): up_to_date_docs += 1 if num_docs and total_visits: coverage += ((float(up_to_date_docs) / num_docs) * (float(visits) / total_visits)) # Save the value to Metric table. metric_kind = MetricKind.objects.get(code=L10N_METRIC_CODE) day = date.today() Metric.objects.create( kind=metric_kind, start=day, end=day + timedelta(days=1), value=int(coverage * 100)) # Store as a % int.
def json_for(cls, period): """Return the JSON-formatted WebTrends stats for the given period.""" start, end = period_dates()[period] return Webtrends.wiki_report(start, end)
def update_l10n_metric(): """Calculate new l10n coverage numbers and save. L10n coverage is a measure of the amount of translations that are up to date, weighted by the number of visits for each locale. The "algorithm" (see Bug 727084): SUMO visits = Total non-en-US SUMO visits for the last 3 months; Total translated = 0; For each locale different to en-US { Total translated = Total Translated + ((Number of updated articles from the en-US top 50 visited)/50 ) * (Visitors for that locale / SUMO visits)); } """ # Get the top 60 visited articles. We will only use the top 50 # but a handful aren't localizable so we get some extras. top_60_docs = _get_top_docs(60) # Get the visits to each locale in the last 90 days. end = date.today() - timedelta(days=1) # yesterday start = end - timedelta(days=90) locale_visits = Webtrends.visits_by_locale(start, end) # Discard en-US. locale_visits.pop('en-US') # Total non en-US visits. total_visits = sum(locale_visits.itervalues()) # Calculate the coverage. coverage = 0 for locale, visits in locale_visits.iteritems(): up_to_date_docs = 0 num_docs = 0 for doc in top_60_docs: if num_docs == 50: # Stop at 50 documents. break if not doc.is_localizable: # Skip non localizable documents. continue num_docs += 1 cur_rev_id = doc.latest_localizable_revision_id translation = doc.translated_to(locale) if (translation and translation.current_revision_id and translation.current_revision_id >= cur_rev_id): up_to_date_docs += 1 if num_docs and total_visits: coverage += ((float(up_to_date_docs) / num_docs) * (float(visits) / total_visits)) # Save the value to Metric table. metric_kind = MetricKind.objects.get(code=L10N_METRIC_CODE) day = date.today() Metric.objects.create(kind=metric_kind, start=day, end=day + timedelta(days=1), value=int(coverage * 100)) # Store as a % int.