Esempio n. 1
0
    def test_tags_generation(self, mock_process):
        tags = {'tenant': 'test', 'type': 'order'}
        fields = {'amount': 100}

        timestamp = timezone.now()
        queue_analytics_record(timestamp=timestamp, tags=tags, fields=fields)
        mock_process.assert_called_with(timestamp, tags, fields)
    def store_engagement_aggregated_data(self, aggregated_engagement_data):
        tags = {'type': 'engagement_number_aggregate'}

        fields = {'start_date': self.start_date.isoformat(),
                  'end_date': self.end_date.isoformat()
                  }

        for item in self.engagement_parameters:
            fields[item] = aggregated_engagement_data[item]

        fields['engagement_number'] = aggregated_engagement_data['total_engaged'] + \
            aggregated_engagement_data['donations_anonymous']

        if getattr(properties, 'CELERY_RESULT_BACKEND', None):
            queue_analytics_record.delay(timestamp=self.end_date, tags=tags, fields=fields)
        else:
            queue_analytics_record(timestamp=self.end_date, tags=tags, fields=fields)
Esempio n. 3
0
 def update_status_stats(cls, tenant):
     logger.info('Updating Project Status Stats: {}'.format(tenant.name))
     timestamp = datetime.datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
     for status in ProjectPhase.objects.all():
         # TODO: Should we count statuses only where the project phase status is active?
         count = Project.objects.filter(status=status).count()
         logger.info('status: {}, count: {}'.format(status.name, count))
         tags = {
             'type': 'project_status_daily',
             'status': status.name,
             'status_slug': status.slug,
             'tenant': tenant.client_name,
         }
         fields = {
             'total': count,
         }
         if getattr(properties, 'CELERY_RESULT_BACKEND', None):
             queue_analytics_record.delay(timestamp=timestamp, tags=tags, fields=fields)
         else:
             queue_analytics_record(timestamp=timestamp, tags=tags, fields=fields)
    def store_engagement_tenant_data(self):
        engagement_data = self.generate_engagement_data()
        aggregated_engagement_data = defaultdict(int)

        for client_name, data in engagement_data.iteritems():
            tags = {'type': 'engagement_number_tenant', 'tenant': client_name}
            fields = {'start_date': self.start_date.isoformat(),
                      'end_date': self.end_date.isoformat()
                      }
            for item in self.engagement_parameters:
                fields[item] = data[item]
                aggregated_engagement_data[item] += data[item]

            fields['engagement_number'] = data['total_engaged'] + data['donations_anonymous']

            if getattr(properties, 'CELERY_RESULT_BACKEND', None):
                queue_analytics_record.delay(timestamp=self.end_date, tags=tags, fields=fields)
            else:
                queue_analytics_record(timestamp=self.end_date, tags=tags, fields=fields)

        return aggregated_engagement_data
Esempio n. 5
0
    def test_log_file_generated(self):
        timestamp = datetime(2016, 12, 31, 23, 59, 59, 123456)
        queue_analytics_record(timestamp=timestamp,
                               tags=self.tags,
                               fields=self.fields)

        log_path = os.path.join(
            self.log_dir, '{}.log'.format(timestamp.strftime('%Y-%m-%d')))
        self.assertTrue(os.path.exists(log_path))

        # Get last line from log
        with open(log_path) as infile:
            for line in infile:
                if not line.strip('\n'):
                    continue
                last_line = line

        json_logs = json.loads(last_line)
        log = json_logs[0]
        self.assertEqual(len(json_logs), 1)
        self.assertEqual(log['fields'], self.fields)
        self.assertEqual(log['tags'], self.tags)
        self.assertEqual(log['time'], 1483228799123456)
        self.assertEqual(log['measurement'], 'saas')
Esempio n. 6
0
 def test_file_exporter(self, mock_process):
     timestamp = timezone.now()
     queue_analytics_record(timestamp=timestamp,
                            tags=self.tags,
                            fields=self.fields)
     mock_process.assert_called_with(timestamp, self.tags, self.fields)