def reduce_hpo_date_metric_counts_to_database_buckets(reducer_key, reducer_values, version_id=None):
  """Emits a metrics bucket with counts for metrics for a given hpoId + date to SQL
  Args:
     reducer_key: hpoId|date ('*' for hpoId for cross-HPO counts)
     reducer_values: list of participant_type|metric|count strings
  """
  metrics_dict = collections.defaultdict(lambda: 0)
  (hpo_id, date_str) = parse_tuple(reducer_key)
  if hpo_id == '*':
    hpo_id = ''
  date = datetime.strptime(date_str, DATE_FORMAT)
  for reducer_value in reducer_values:
    (participant_type, metric_key, count) = parse_tuple(reducer_value)
    if metric_key == PARTICIPANT_KIND:
      if participant_type == _REGISTERED_PARTICIPANT:
        metrics_dict[metric_key] += int(count)
    else:
      kind = FULL_PARTICIPANT_KIND if participant_type == _FULL_PARTICIPANT else PARTICIPANT_KIND
      metrics_dict['%s.%s' % (kind, metric_key)] += int(count)

  version_id = version_id or context.get().mapreduce_spec.mapper.params.get('version_id')
  bucket = MetricsBucket(metricsVersionId=version_id,
                         date=date,
                         hpoId=hpo_id,
                         metrics=json.dumps(metrics_dict))
  # Use upsert here; when reducer shards retry, we will just replace any metrics bucket that was
  # written before, rather than failing.
  MetricsBucketDao().upsert(bucket)
 def post(self):
     dao = MetricsBucketDao()
     resource = request.get_data()
     if resource:
         resource_json = json.loads(resource)
         start_date_str = resource_json.get('start_date')
         end_date_str = resource_json.get('end_date')
         if not start_date_str or not end_date_str:
             raise BadRequest("Start date and end date should not be empty")
         try:
             start_date = datetime.datetime.strptime(
                 start_date_str, DATE_FORMAT).date()
         except ValueError:
             raise BadRequest("Invalid start date: %s" % start_date_str)
         try:
             end_date = datetime.datetime.strptime(end_date_str,
                                                   DATE_FORMAT).date()
         except ValueError:
             raise BadRequest("Invalid end date: %s" % end_date_str)
         date_diff = abs((end_date - start_date).days)
         if date_diff > DAYS_LIMIT:
             raise BadRequest("Difference between start date and end date "\
               "should not be greater than %s days" % DAYS_LIMIT)
         buckets = dao.get_active_buckets(start_date, end_date)
         if buckets is None:
             return []
         return [dao.to_client_json(bucket) for bucket in buckets]
     else:
         raise BadRequest("Request data is empty")
 def setUp(self):
     super(MetricsApiTest, self).setUp()
     self.version_dao = MetricsVersionDao()
     self.bucket_dao = MetricsBucketDao()
     self.today = datetime.date.today()
     self.tomorrow = self.today + datetime.timedelta(days=1)
     self.expected_bucket_1 = {
         'facets': {
             'date': self.today.isoformat()
         },
         'entries': {
             'x': 'a'
         }
     }
     self.expected_bucket_2 = {
         'facets': {
             'date': self.today.isoformat(),
             'hpoId': 'PITT'
         },
         'entries': {
             'x': 'b'
         }
     }
     self.expected_bucket_3 = {
         'facets': {
             'date': self.tomorrow.isoformat()
         },
         'entries': {
             'y': 'c'
         }
     }
Exemplo n.º 4
0
 def post(self):
   dao = MetricsBucketDao()
   resource = request.get_data()
   start_date = None
   end_date = None
   if resource:
     resource_json = json.loads(resource)
     start_date_str = resource_json.get('start_date')
     end_date_str = resource_json.get('end_date')
     if start_date_str:
       try:
         start_date = datetime.datetime.strptime(start_date_str, DATE_FORMAT).date()
       except ValueError:
         raise BadRequest("Invalid start date: %s" % start_date_str)
     if end_date_str:
       try:
         end_date = datetime.datetime.strptime(end_date_str, DATE_FORMAT).date()
       except ValueError:
         raise BadRequest("Invalid start date: %s" % end_date_str)
   buckets = dao.get_active_buckets(start_date, end_date)
   if buckets is None:
     return []
   return [dao.to_client_json(bucket) for bucket in buckets]
Exemplo n.º 5
0
 def setUp(self):
     super(MetricsDaoTest, self).setUp()
     self.metrics_version_dao = MetricsVersionDao()
     self.metrics_bucket_dao = MetricsBucketDao()