def get(self): datak = memcache.get('metrics') if datak is None: tdata = [] measurements = Measurement.query() for m in measurements: mdict = m.to_dict() mdict['timestamp'] = datetime.strftime(mdict['timestamp'], "%Y-%m-%d %H:%M:%S") if mdict['updated']: mdict['updated'] = datetime.strftime(mdict['updated'], "%Y-%m-%d %H:%M:%S") tdata.append(mdict) datak = self._to_blobstore(tdata) self.response.headers['Content-Type'] = 'application/zip' stamp = str(int(time.mktime(memcache.get('updated').timetuple()))) self.send_blob(datak, save_as='metrics-{}.zip'.format(stamp))
def get(self): # Generate lower bound for timestamp lower_bound = datetime.now() + timedelta(days=-7) dosables = Dosable.query(Dosable.timestamp > lower_bound).fetch() measurements = (Measurement.query(Measurement.timestamp > lower_bound) .fetch()) parallel_switches = (ParallelSwitch .query(ParallelSwitch.timestamp > lower_bound).fetch()) # Sort data in each category by type for data_group in [dosables, measurements, parallel_switches]: data_group.sort(key=lambda d: d.type) ret = { category: {type: [instance.to_json() for instance in instances] for type, instances in itertools.groupby(data_group, key=lambda x: x.type)} for category, data_group in [('dosables', dosables), ('measurements', measurements), ('parallel_switches', parallel_switches)] } self.out_json(ret)
def _get_count(self): query = Measurement.query() return query.count()
def _get_earliest(self): query = Measurement.query() return query.order(Measurement.timestamp).get().timestamp