Пример #1
0
 def getCounters(self, from_time, until_time):
   result = {}
   start = counter_names.getEpochRounded(
       from_time + datetime.timedelta(0 ,config.MIN_SLOT_SIZE))
   end = counter_names.getEpochRounded(until_time)
   stats = memcache.get_stats()
   for key in stats:
     counter = "AppEngine.Memcache." + key
     if counter not in result:
       result[counter] = {}
     current = start
     while current <= end:
       result[counter][current] = stats[key]
       current += config.MIN_SLOT_SIZE
   return result
Пример #2
0
 def append(self, end_time, key, value, result):
   if key not in result:
     result[key] = {}
   slot = counter_names.getEpochRounded(end_time)
   if slot not in result[key]:
     result[key][slot] = 0
   result[key][slot] += value
Пример #3
0
 def append(self, end_time, key, value, result):
     if key not in result:
         result[key] = {}
     slot = counter_names.getEpochRounded(end_time)
     if slot not in result[key]:
         result[key][slot] = []
     result[key][slot] += [value]
Пример #4
0
def incrBatch(counters, save_stats=config.SAVE_PRODEAGLE_STATS):
    try:
        cnm = counter_names.getDefaultCounterNamesManager()
        slot = counter_names.getEpochRounded()
        prefixed_counters = {}
        for name in counters:
            prefixed_counters[str(slot) + name] = counters[name]
        save_in_between_name = None
        if config.SAVE_IN_BETWEEN:
            save_in_between_name = (
                "save_in_between_%d" % counter_names.getEpochRounded(
                    utc_datetime=None, slot_size=config.SAVE_IN_BETWEEN))
            prefixed_counters[save_in_between_name] = 1
        existing = memcache.offset_multi(prefixed_counters,
                                         namespace=cnm.namespace,
                                         initial_value=0)
        new_counter_names = []
        for name in counters:
            if (counters[name] == existing[str(slot) + name]):
                new_counter_names += [name]
        (data_store_access, n_added_names) = cnm.addIfNew(new_counter_names)
        if config.SAVE_IN_BETWEEN and existing[save_in_between_name] == 1:
            try:
                taskqueue.Task(url=config.PRODEAGLE_HARVEST_URL,
                               params={
                                   "save_in_between": "1"
                               },
                               countdown=config.SAVE_IN_BETWEEN,
                               name="prodeagle-" + save_in_between_name).add()
            except:
                pass
        if save_stats:
            counters = Batch()
            if data_store_access:
                counters.incr("ProdEagle.Datastore.ReadAccess")
            if n_added_names:
                counters.incr("ProdEagle.NewNames", n_added_names)
                counters.incr("ProdEagle.Datastore.WriteAccess")
            if config.SAVE_IN_BETWEEN and existing[save_in_between_name] == 1:
                counters.incr("ProdEagle.SaveInBetween")
            counters.commit(save_stats=False)
    except:
        logging.warning("Couldn't increase the following counters: %s" %
                        ", ".join(counters.keys()))
Пример #5
0
def incrBatch(counters, save_stats=config.SAVE_PRODEAGLE_STATS):
  try:
    cnm = counter_names.getDefaultCounterNamesManager()
    slot = counter_names.getEpochRounded()
    prefixed_counters = {}
    for name in counters:
      prefixed_counters[str(slot) + name] = counters[name]
    save_in_between_name = None
    if config.SAVE_IN_BETWEEN:
      save_in_between_name = ("save_in_between_%d" %
                              counter_names.getEpochRounded(utc_datetime=None,
                                  slot_size=config.SAVE_IN_BETWEEN))
      prefixed_counters[save_in_between_name] = 1
    existing = memcache.offset_multi(prefixed_counters,
                                     namespace=cnm.namespace,
                                     initial_value=0)
    new_counter_names = []
    for name in counters:
      if (counters[name] == existing[str(slot) + name]):
        new_counter_names += [name]
    (data_store_access, n_added_names) = cnm.addIfNew(new_counter_names)
    if config.SAVE_IN_BETWEEN and existing[save_in_between_name] == 1:
      try:
        taskqueue.Task(url=config.PRODEAGLE_HARVEST_URL,
                       params={"save_in_between": "1"},
                       countdown=config.SAVE_IN_BETWEEN,
                       name="prodeagle-" + save_in_between_name).add()
      except:
        pass
    if save_stats:
      counters = Batch()
      if data_store_access:
        counters.incr("ProdEagle.Datastore.ReadAccess")
      if n_added_names:
        counters.incr("ProdEagle.NewNames", n_added_names)
        counters.incr("ProdEagle.Datastore.WriteAccess")
      if config.SAVE_IN_BETWEEN and existing[save_in_between_name] == 1:
        counters.incr("ProdEagle.SaveInBetween")
      counters.commit(save_stats=False)
  except:
    logging.warning("Couldn't increase the following counters: %s"
                    % ", ".join(counters.keys()))
Пример #6
0
    def createReport(self, production_call=False):
        namespace = namespace_manager.get_namespace()
        try:
            cnm = counter_names.getDefaultCounterNamesManager()
            namespace_manager.set_namespace(cnm.namespace)

            last_export_date, this_export_date = self.getAndSetExporDates()

            slot = counter_names.getEpochRounded(last_export_date - datetime.timedelta(0, config.MAX_CLOCK_SKEW))
            result = {
                "time": int(time.mktime(this_export_date.timetuple())),
                "counters": {},
                "ms_of_data_lost": 0,
                "version": 1.0,
            }
            result["all_data_inaccurate"] = self.wasDataLostSinceLastHarvest(cnm.namespace, slot, True)
            all_keys = cnm.all(force_reload=True)
            while slot <= counter_names.getEpochRounded(this_export_date):
                gap = time.time()
                slot_updates = memcache.get_multi(all_keys, key_prefix=str(slot), namespace=cnm.namespace)
                # NOTE(andrin): Between get_multi & delete_multi we loose all updates!
                memcache.delete_multi(slot_updates.keys(), key_prefix=str(slot), namespace=cnm.namespace)
                result["ms_of_data_lost"] = max(int((time.time() - gap) * 1000), result["ms_of_data_lost"])
                for counter in slot_updates:
                    if slot_updates[counter]:
                        self.addCounterToResult(counter, slot, slot_updates[counter], result["counters"])
                slot += config.MIN_SLOT_SIZE

            result["all_data_inaccurate"] |= self.wasDataLostSinceLastHarvest(cnm.namespace, slot)

            if config.APPSTATS_ENABLE:
                appstats = appstats_export.AppStatsExport().getCounters(last_export_date, this_export_date)
                self.addCountersToResult(appstats, result["counters"])
            if config.ERROR_LOG_EXPORT_ENABLE or config.LOG_EXPORT_ENABLE:
                logexport = logservice_export.LogServiceExport()
                if config.ERROR_LOG_EXPORT_ENABLE:
                    self.addCountersToResult(
                        logexport.getErrorLogCounters(last_export_date, this_export_date), result["counters"]
                    )
                if config.LOG_EXPORT_ENABLE:
                    self.addCountersToResult(
                        logexport.getLogCounters(last_export_date, this_export_date), result["counters"]
                    )
            if config.MEMCACHE_EXPORT_ENABLE:
                self.addCountersToResult(
                    memcache_export.MemcacheExport().getCounters(last_export_date, this_export_date), result["counters"]
                )

            if not production_call:
                save = CountersSavedInBetween()
                save.counters = simplejson.dumps(result["counters"])
                save.all_data_inaccurate = result["all_data_inaccurate"]
                save.ms_of_data_lost = result["ms_of_data_lost"]
                save.put()
                result = {
                    "time": result["time"],
                    "counters": {},
                    "ms_of_data_lost": 0,
                    "all_data_inaccurate": False,
                    "version": result["version"],
                }
                logging.info("Saved counters in between!")
            if self.request.get("save_in_between"):
                return

            for saved in CountersSavedInBetween.all():
                saved_counters = simplejson.loads(saved.counters)
                self.addCountersToResult(saved_counters, result["counters"])
                result["ms_of_data_lost"] += saved.ms_of_data_lost
                result["all_data_inaccurate"] |= saved.all_data_inaccurate
                if production_call:
                    saved.delete()

            if production_call or self.request.get("json"):
                self.response.headers["Content-Type"] = "text/plain; charset=utf-8"
                self.response.out.write(simplejson.dumps(result, sort_keys=True, indent=2))
            else:
                slot = counter_names.getEpochRounded()
                for key in all_keys:
                    self.addCounterToResult(key, slot, 0, result["counters"])
                self.response.out.write("<h3>Data since last export</h3>")
                self.response.out.write("<a href='http://www.prodeagle.com'>Go to ProdEagle dashboard</a>")
                self.response.out.write("<br><br><a href='%s'>Logout</a>" % users.create_logout_url(self.request.url))
                for counter in sorted(result["counters"].keys()):
                    self.response.out.write("<br/><b>%s</b>: %d" % (counter, sum(result["counters"][counter].values())))
        finally:
            namespace_manager.set_namespace(namespace)
Пример #7
0
    def createReport(self, production_call=False):
        namespace = namespace_manager.get_namespace()
        try:
            cnm = counter_names.getDefaultCounterNamesManager()
            namespace_manager.set_namespace(cnm.namespace)

            last_export_date, this_export_date = self.getAndSetExporDates()

            slot = counter_names.getEpochRounded(
                last_export_date -
                datetime.timedelta(0, config.MAX_CLOCK_SKEW))
            result = {
                "time": int(time.mktime(this_export_date.timetuple())),
                "counters": {},
                "ms_of_data_lost": 0,
                "version": 1.0
            }
            result["all_data_inaccurate"] = self.wasDataLostSinceLastHarvest(
                cnm.namespace, slot, True)
            all_keys = cnm.all(force_reload=True)
            while slot <= counter_names.getEpochRounded(this_export_date):
                gap = time.time()
                slot_updates = memcache.get_multi(all_keys,
                                                  key_prefix=str(slot),
                                                  namespace=cnm.namespace)
                # NOTE(andrin): Between get_multi & delete_multi we loose all updates!
                memcache.delete_multi(slot_updates.keys(),
                                      key_prefix=str(slot),
                                      namespace=cnm.namespace)
                result["ms_of_data_lost"] = max(
                    int((time.time() - gap) * 1000), result["ms_of_data_lost"])
                for counter in slot_updates:
                    if slot_updates[counter]:
                        self.addCounterToResult(counter, slot,
                                                slot_updates[counter],
                                                result["counters"])
                slot += config.MIN_SLOT_SIZE

            result["all_data_inaccurate"] |= self.wasDataLostSinceLastHarvest(
                cnm.namespace, slot)

            if config.APPSTATS_ENABLE:
                appstats = appstats_export.AppStatsExport().getCounters(
                    last_export_date, this_export_date)
                self.addCountersToResult(appstats, result["counters"])
            if config.ERROR_LOG_EXPORT_ENABLE or config.LOG_EXPORT_ENABLE:
                logexport = logservice_export.LogServiceExport()
                if config.ERROR_LOG_EXPORT_ENABLE:
                    self.addCountersToResult(
                        logexport.getErrorLogCounters(last_export_date,
                                                      this_export_date),
                        result["counters"])
                if config.LOG_EXPORT_ENABLE:
                    self.addCountersToResult(
                        logexport.getLogCounters(last_export_date,
                                                 this_export_date),
                        result["counters"])
            if config.MEMCACHE_EXPORT_ENABLE:
                self.addCountersToResult(
                    memcache_export.MemcacheExport().getCounters(
                        last_export_date, this_export_date),
                    result["counters"])

            if not production_call:
                save = CountersSavedInBetween()
                save.counters = simplejson.dumps(result["counters"])
                save.all_data_inaccurate = result["all_data_inaccurate"]
                save.ms_of_data_lost = result["ms_of_data_lost"]
                save.put()
                result = {
                    "time": result["time"],
                    "counters": {},
                    "ms_of_data_lost": 0,
                    "all_data_inaccurate": False,
                    "version": result["version"]
                }
                logging.info("Saved counters in between!")
            if self.request.get("save_in_between"):
                return

            for saved in CountersSavedInBetween.all():
                saved_counters = simplejson.loads(saved.counters)
                self.addCountersToResult(saved_counters, result["counters"])
                result["ms_of_data_lost"] += saved.ms_of_data_lost
                result["all_data_inaccurate"] |= saved.all_data_inaccurate
                if production_call:
                    saved.delete()

            if production_call or self.request.get("json"):
                self.response.headers[
                    'Content-Type'] = "text/plain; charset=utf-8"
                self.response.out.write(
                    simplejson.dumps(result, sort_keys=True, indent=2))
            else:
                slot = counter_names.getEpochRounded()
                for key in all_keys:
                    self.addCounterToResult(key, slot, 0, result["counters"])
                self.response.out.write("<h3>Data since last export</h3>")
                self.response.out.write(
                    "<a href='http://www.prodeagle.com'>Go to ProdEagle dashboard</a>"
                )
                self.response.out.write(
                    "<br><br><a href='%s'>Logout</a>" %
                    users.create_logout_url(self.request.url))
                for counter in sorted(result["counters"].keys()):
                    self.response.out.write(
                        "<br/><b>%s</b>: %d" %
                        (counter, sum(result["counters"][counter].values())))
        finally:
            namespace_manager.set_namespace(namespace)