def poll_loop(s): """ Periodically update DB """ while s._active: if timestamp.now( ) - s._last_active <= s._period: # Check if we are idle... s.poll() time.sleep(s._period)
def record(key, value, stamp=False): record = StatsRecord(event=key, value=value) if not stamp: stamp = timestamp.now() record.timestamp = stamp logging.info("Firing stats off. Event: %s" % key) prospective_search.match(record, result_task_queue='stats')
def record(key, value, stamp=False): record = StatsRecord(event=key, value=value) if not stamp: stamp = timestamp.now() record.timestamp = stamp logging.info("Firing stats off. Event: %s" % key) prospective_search.match(record, result_task_queue="stats")
def tokenValid(self): if self.token == None or self.token_expiration == None: return False current = timestamp.now() if self.token_expiration < current: return False else: return True
def get(self): stamp = timestamp.now() reset = stamp.replace(day=stamp.day + 1, hour=0, minute=0, second=0, microsecond=0) countdown = reset - stamp response = {} response["readable"] = str(countdown) response["seconds"] = countdown.seconds + (countdown.days * 24 * 3600) self.response.out.write(simplejson.dumps(response))
def getQuota(): date = timestamp.now() quota = memcache.get("quota") if quota == None: quota = QuotaData.all().order("-date").get() if quota == None: quota = QuotaData(amount=0, date=date).put() memcache.set("quota", quota) return quota
def getPushshiftData(query): before = timestamp.now() url = 'https://api.pushshift.io/reddit/search/submission/?title=' +str(query) + '&size=1000&' + '&before=' + str(before) + '&subreddit=india' print(url) r = requests.get(url) data = json.loads(r.text) return data['data']
def save(self): self.put() stamp = timestamp.now() stamp = stamp.replace(day=stamp.day + 1, hour=0, minute=0, second=0, microsecond=0) self.date = stamp self.user.setExplicitImmunity(stamp) stats.record("payment", simplejson.dumps({ "user": self.user.user.email(), "item": self.item }))
def get(self): stamp = timestamp.now() reset = stamp.replace(day=stamp.day + 1, hour=0, minute=0, second=0, microsecond=0) countdown = reset - stamp response = {} response['readable'] = str(countdown) response['seconds'] = countdown.seconds + (countdown.days * 24 * 3600) self.response.out.write(simplejson.dumps(response))
def fetch_thread(thread_id, params, after_id): timestamp = now() callback_param = f'Zepto{timestamp}' json_params = build_thread_params(thread_id, request_num, params, after_id) url_params = { 'json': json_params, 'callback': callback_param, '_': timestamp } http_response = requests.get('https://api.viafoura.co/v2/', params=url_params) response = parse_response(http_response.text, callback_param) return response['responses'][str(request_num)]['result']
def post(self): user = auth.getCurrentUser() response = {} if user: try: user_data = models.getUser(user) except models.UserDoesNotExistError: user_data = models.UserData(user=user).save() payment_data = models.PaymentData( date=timestamp.now(), user=user_data, item=self.request.get("item_id"), order_number=self.request.get("order_number"), status="unconfirmed") payment_data.save()
def __init__(s, software, user, db_path=os.path.expanduser("~/timesheet.db")): # Create database and set its structure s.db = db.DB(db_path) # Set variables s._active = False # Keep polling? Stop? s._period = timestamp.MINUTE * 5 # Poll how often? s._last_active = timestamp.now() # Last checkin s._note = "" s._software = software s._user = user s._path = ""
def post(self): record = prospective_search.get_document(self.request) record_value = simplejson.loads(record.value) subscriber_keys = map(db.Key, self.request.get_all("id")) subscribers = db.get(subscriber_keys) datapoints = [] stats_json = [] for subscriber_key, subscriber in zip(subscriber_keys, subscribers): if not subscriber: prospective_search.unsubscribe(stats.StatsRecord, subscriber_key) else: datapoints.append(models.getStats(subscriber.datapoint, record.timestamp, duration="day")) datapoints.append(models.getStats(subscriber.datapoint, record.timestamp, duration="hour")) for datapoint in datapoints: if datapoint.datapoint == "active_users": try: user = models.getUser(record_value["user"], False) except models.UserDoesNotExistError: continue last_seen = user.last_seen new_last_seen = timestamp.now() if datapoint.duration == "day": last_seen = last_seen.replace(hour=0, minute=0, second=0, microsecond=0) new_last_seen = new_last_seen.replace(hour=0, minute=0, second=0, microsecond=0) if last_seen < new_last_seen: user.updateLastSeen(new_last_seen) user.save() else: continue if datapoint.datapoint == "quota": datapoint.count = models.getQuota().amount else: datapoint.increment() json = { "datapoint": datapoint.datapoint, "value": datapoint.count, "date": datapoint.date.strftime("%D, %M %d %y"), "datestamp": int(time.mktime(datapoint.date.date().timetuple())) * 1000, "hour": datapoint.date.hour, } if datapoint.duration == "day": json["hour"] = "total" stats_json.append(json) db.put(datapoints) push = channels.Channel("[email protected]/Web", False) push.message = {"stats": stats_json} push.send() logging.debug(simplejson.dumps(stats_json))
def post(self): user = auth.getCurrentUser() response = {} if user: try: user_data = models.getUser(user) except models.UserDoesNotExistError: user_data = models.UserData(user=user).save() payment_data = models.PaymentData( date=timestamp.now(), user=user_data, item=self.request.get("item_id"), order_number=self.request.get("order_number"), status="unconfirmed", ) payment_data.save()
def save(self): self.put() stamp = timestamp.now() stamp = stamp.replace(day=stamp.day + 1, hour=0, minute=0, second=0, microsecond=0) self.date = stamp self.user.setExplicitImmunity(stamp) stats.record( "payment", simplejson.dumps({ "user": self.user.user.email(), "item": self.item }))
def getStats(datapoint, date=False, duration="day"): if not date: date = timestamp.now() if duration == 'day': date = date.replace(hour=0, minute=0, second=0, microsecond=0) elif duration == 'hour': date = date.replace(minute=0, second=0, microsecond=0) else: return stats = memcache.get("stats_%s_%s_%s" % (datapoint, date, duration)) if stats == None: stats = (StatsData.all().filter("datapoint =", datapoint) .filter("date =", date).filter("duration =", duration).get()) if stats == None: stats = StatsData(datapoint=datapoint, date=date, count=0, duration=duration) if datapoint == 'quota': stats.count = getQuota().amount stats.put() else: memcache.set("stats_%s_%s_%s" % (datapoint, date, duration), stats) return stats
def getStats(datapoint, date=False, duration="day"): if not date: date = timestamp.now() if duration == 'day': date = date.replace(hour=0, minute=0, second=0, microsecond=0) elif duration == 'hour': date = date.replace(minute=0, second=0, microsecond=0) else: return stats = memcache.get("stats_%s_%s_%s" % (datapoint, date, duration)) if stats == None: stats = (StatsData.all().filter("datapoint =", datapoint).filter( "date =", date).filter("duration =", duration).get()) if stats == None: stats = StatsData(datapoint=datapoint, date=date, count=0, duration=duration) if datapoint == 'quota': stats.count = getQuota().amount stats.put() else: memcache.set("stats_%s_%s_%s" % (datapoint, date, duration), stats) return stats
def setRelativeImmunity(self, offset): self.immunity = timestamp.now() + offset self.save()
def poll(s, period, user, software, file, status, note=""): """ Poll the database to show activity """ with s: return s.write(None, timestamp.now(), UUID, period, user, software, file, status, note)
def set_path(s, val): s.set_var("_path", val) def get_path(s): return s._path def get_status(s): return s._active if __name__ == '__main__': import test import os with test.temp(".db") as tmp: os.unlink(tmp) mon = Monitor("python", "ME!", tmp) mon._period = 1 # speed period to one second mon.set_note("HI THERE") mon.set_path("path/to/file") print("Polling please wait...") mon.start() time.sleep(1) # One active mon.checkin() time.sleep(2) # One active, one idle mon.stop() curr = timestamp.now() res = list(mon.query(curr - 10, curr)) assert len(res) == 2 assert res[0]["file"] == "path/to/file" assert res[0]["note"] == "HI THERE"
import re import timestamp stats = cmd.run('speedtest-cli') # Example: # Download: 38.87 Mbits/s # Upload: 5.29 Mbits/s download = re.compile( r'Download: ([0-9]*\.[0-9]*.*)\n').findall(stats) if len(download) > 0: download = download[0] else: download = "ERROR" upload = re.compile( r'Upload: ([0-9]*\.[0-9]*.*)\n').findall(stats) if len(upload) > 0: upload = upload[0] else: upload = "ERROR" time = timestamp.now() string = time + ', ' + download + ', ' + upload f = open("speedLog.txt", 'a') f.write(s + "\n") f.close()
def updateQuota(level): quota = QuotaData(amount=int(level), date=timestamp.now()) quota.put() memcache.set("quota", quota) stats.record("quota_updated", str(quota.amount)) return quota
def updateToken(self, token): self.token = token self.token_expiration = timestamp.now() + timedelta(hours=2) self.save()
def post(self): record = prospective_search.get_document(self.request) record_value = simplejson.loads(record.value) subscriber_keys = map(db.Key, self.request.get_all('id')) subscribers = db.get(subscriber_keys) datapoints = [] stats_json = [] for subscriber_key, subscriber in zip(subscriber_keys, subscribers): if not subscriber: prospective_search.unsubscribe(stats.StatsRecord, subscriber_key) else: datapoints.append( models.getStats(subscriber.datapoint, record.timestamp, duration='day')) datapoints.append( models.getStats(subscriber.datapoint, record.timestamp, duration='hour')) for datapoint in datapoints: if datapoint.datapoint == 'active_users': try: user = models.getUser(record_value['user'], False) except models.UserDoesNotExistError: continue last_seen = user.last_seen new_last_seen = timestamp.now() if datapoint.duration == "day": last_seen = last_seen.replace(hour=0, minute=0, second=0, microsecond=0) new_last_seen = new_last_seen.replace(hour=0, minute=0, second=0, microsecond=0) if last_seen < new_last_seen: user.updateLastSeen(new_last_seen) user.save() else: continue if datapoint.datapoint == 'quota': datapoint.count = models.getQuota().amount else: datapoint.increment() json = { 'datapoint': datapoint.datapoint, 'value': datapoint.count, 'date': datapoint.date.strftime("%D, %M %d %y"), 'datestamp': int(time.mktime(datapoint.date.date().timetuple())) * 1000, 'hour': datapoint.date.hour } if datapoint.duration == "day": json['hour'] = "total" stats_json.append(json) db.put(datapoints) push = channels.Channel("[email protected]/Web", False) push.message = {"stats": stats_json} push.send() logging.debug(simplejson.dumps(stats_json))
def immune(self): if self.immunity is not None and self.immunity > timestamp.now(): return True else: return False
def updateLastSeen(self, newtimestamp=None): if newtimestamp is None: self.last_seen = timestamp.now() else: self.last_seen = newtimestamp
def checkin(s): """ Check in to show activity with software """ s._last_active = timestamp.now()
import cmd import re import timestamp stats = cmd.run('speedtest-cli') # Example: # Download: 38.87 Mbits/s # Upload: 5.29 Mbits/s download = re.compile(r'Download: ([0-9]*\.[0-9]*.*)\n').findall(stats) if len(download) > 0: download = download[0] else: download = "ERROR" upload = re.compile(r'Upload: ([0-9]*\.[0-9]*.*)\n').findall(stats) if len(upload) > 0: upload = upload[0] else: upload = "ERROR" time = timestamp.now() string = time + ', ' + download + ', ' + upload f = open("speedLog.txt", 'a') f.write(s + "\n") f.close()