def update( self, frequency=4.0 ): if unixtime() - self.update_time > 1./frequency: self.surface.unlock() pygame.display.flip() #pygame.display.update() pygame.display.set_icon( self.surface ) self.update_time = unixtime()
def editdraft(draftid): if 'logged_in' not in session: abort(403) if request.method == "POST": blogpost = request.form['blogpost'] title = request.form['title'] if 'Preview Draft' in request.form.values(): return render_template("editdraft.html",preview="1",renderedblog=render_bbcode(blogpost),title=title,blogpost=blogpost,recover=1) elif 'Submit Draft' in request.form.values(): g.db.execute(""" UPDATE draft SET title=?, text=?, lastedit=? WHERE id=? """,[title,blogpost,unixtime(),draftid]) g.db.commit() flash("You have saved your draft","message") return redirect(url_for('drafts')) elif 'Publish Draft' in request.form.values(): error = 0 if title == "": error = 1 flash("You must make a title","error") if blogpost == "": error = 1 flash("You must make the blogpost","error") if error: return render_template('editdraft.html',title=title,blogpost=blogpost,recover=1) time_var = unixtime() g.db.execute(""" INSERT INTO post (title, text, removed,unixtime,views) VALUES (?,?,0,?,0) """,(title,blogpost,time_var)) g.db.commit() blogid = query_db("""SELECT id FROM post WHERE unixtime=?""",[time_var],True)['id'] g.db.execute(""" DELETE FROM draft WHERE id=? """,[draftid]) g.db.commit() flash("You have published a draft","message") return redirect(url_for('blogpost',blogid=blogid)) g.blog = query_db(""" SELECT * FROM draft WHERE id=? AND removed=0 """,[draftid],True,False) return render_template("editdraft.html")
def blogedit(blogid): if 'logged_in' not in session: abort(403) if request.method == 'POST': title = request.form['title'].strip() text = request.form['blogpost'].strip() error = 0 if title == "": error = 1 flash("You must make a title","error") if text == "": error = 1 flash("You must make the blogpost","error") if 'Preview Blog' in request.form.values(): renderedblog = render_bbcode(text) return render_template("blogedit.html",blogid=blogid,title=title,blogpost=text,recover=1,preview="1",renderedblog=renderedblog) if error: return render_template('blogedit.html',blogid=blogid,title=title,blogpost=text,recover=1) g.db.execute(""" UPDATE post SET title=?, text=?, lastedit=? WHERE id=? """,(title,text,unixtime(),blogid)) g.db.commit() flash("You successfully changed your blogpost","message") return redirect(url_for('blogpost',blogid=blogid)) g.blog = query_db(""" SELECT * FROM post WHERE id=? """, [str(blogid)], True,False) return render_template('blogedit.html',blogid=blogid)
def delete(self, key, seconds=0, namespace=None): """ Deletes a key from cache. If ``key`` is not found return False >>> c = MemoryCache() >>> c.delete('k') False >>> c.store('k', 'v', 100) True >>> c.delete('k') True There is item in cache that expired >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.delete('k') False """ now = int(unixtime()) items = self.items self.lock.acquire(1) try: try: entry = items[key] del items[key] if entry.expires < now: return False return True except KeyError: return False finally: self.lock.release()
def get(self, key, namespace=None): """ Looks up a single key. If ``key`` is not found return None >>> c = MemoryCache() >>> c.get('k') Otherwise return value >>> c.set('k', 'v', 100) True >>> c.get('k') 'v' There is item in cached that expired >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.get('k') """ now = int(unixtime()) items = self.items self.lock.acquire(1) try: try: entry = items[key] if entry.expires < now: del items[key] return None return entry.value except KeyError: return None finally: self.lock.release()
def muc_online(self, nick, jid, role, affiliation): self.participants[nick] = {'jid': jid, 'nick': nick, 'role': role, 'affiliation': affiliation, 'time': unixtime() } for listener in self.listeners: listener.muc_online(nick=nick, jid=jid, role=role, affiliation=affiliation)
def commitblog(): if 'logged_in' in session: title = request.form['title'].strip() blogpost = request.form['blogpost'].strip() if 'Preview Blog' in request.form.values(): return redirect(url_for('add_blog',preview=1,title=title,blogpost=blogpost,recover=1)) error = 0 if title == "": error = 1 flash("You must make a title","error") if blogpost == "": error = 1 flash("You must make the blogpost","error") if error: return redirect(url_for('add_blog',title=title,blogpost=blogpost,recover=1)) time_var = unixtime() g.db.execute(""" INSERT INTO post (title, text, removed,unixtime,views) VALUES (?,?,0,?,0) """,(title,blogpost,time_var)) g.db.commit() blogid = query_db("""SELECT id FROM post WHERE unixtime=?""",[time_var],True)['id'] else: return abort(403) return redirect(url_for('blogpost',blogid=blogid))
def deploy(request, path, msg): if path is None: return alexa.respond( message="Could not prepare the deployment request to create the %s" % msg, end_session=True) response = dclient.put_item( TableName=DYNAMO_TABLE, Item={ 'sessionId': { 'S': request.session_id() }, 'templatePath': { 'S': path }, 'message': { 'S': msg }, 'time': { 'N': str(int(unixtime(datetime.now().timetuple()))) }, 'expires': { 'N': '3600' } }) return alexa.respond( message="Please confirm you would like to deploy the %s" % msg)
def run(): print('start') temp_control_thread_start() water_pump_control_thread_start() while True: GPIO.output(PIN_TRIGGER, GPIO.LOW) # print("Waiting for sensor to settle") time.sleep(1) print("Calculating distance") GPIO.output(PIN_TRIGGER, GPIO.HIGH) time.sleep(0.00001) GPIO.output(PIN_TRIGGER, GPIO.LOW) while GPIO.input(PIN_ECHO) == 0: pulse_start_time = time.time() while GPIO.input(PIN_ECHO) == 1: pulse_end_time = time.time() pulse_duration = pulse_end_time - pulse_start_time distance = round(pulse_duration * 17150) # , 2 humidity, temperature = get_humidity_temperature() now = int(unixtime()) lux = mcp.read_adc(0) soil_humidity = mcp.read_adc(1) data = { "date": now, "humidity": humidity, "temperature": temperature, "water_level": distance, "soil_humidity": soil_humidity, "lux": lux } print(data) # push to database db.child(db_sensor_data_loc).push(data) time.sleep(10)
def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int( val * self.tunits[ unit ] ) val = int(unixtime()) - val # mod for trac 0.12 nval = to_utimestamp(to_datetime(val)) except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name],name) ) return (nval,text) else: defval, deftext = default ndef = to_utimestamp(to_datetime(defval)) return (ndef,deftext)
def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int(val * self.tunits[unit]) val = int(unixtime()) - val # mod for trac 0.12 nval = to_utimestamp(to_datetime(val)) except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name], name)) return (nval, text) else: defval, deftext = default ndef = to_utimestamp(to_datetime(defval)) return (ndef, deftext)
def store_multi(self, mapping, time=0, key_prefix='', op=0): """ There is item in cached that expired >>> c = MemoryCache() >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.store_multi({'k': 'v'}, 100) [] There is item in expire_buckets that expired >>> c = MemoryCache() >>> i = int((int(unixtime()) % c.period) ... / c.interval) - 1 >>> c.expire_buckets[i] = (allocate_lock(), [('x', 10)]) >>> c.store_multi({'k': 'v'}, 100) [] """ now = int(unixtime()) time = expires(now, time) items = self.items keys_failed = [] succeeded = [] self.lock.acquire(1) try: for k, value in iteritems(mapping): key = key_prefix + k try: entry = items[key] if entry.expires < now: del items[key] elif op == 1: # add keys_failed.append(k) continue except KeyError: if op == 2: # replace keys_failed.append(k) continue items[key] = CacheItem(key, value, time) succeeded.append((key, time)) finally: self.lock.release() if time < 0x7FFFFFFF and succeeded: expired_keys = None bucket_id = int((now % self.period) / self.interval) bucket_lock, bucket_items = self.expire_buckets[bucket_id - 1] bucket_lock.acquire(1) try: if self.last_expire_bucket_id != bucket_id: self.last_expire_bucket_id = bucket_id expired_keys = find_expired(bucket_items, now) bucket_items.extend(succeeded) finally: bucket_lock.release() if expired_keys: _logger.warning(u"缓存删除key:" + key + u",entry.expires:" + str(entry.expires) + u">=now:" + str(now)) self.delete_multi(expired_keys) return keys_failed
def __init__(self): self._stop = False self._status = { 'last': unixtime(), 'interval': 2 } self._stats = {} signal.signal(signal.SIGINT, self._signal_handler)
def muc_online(self, jid, nick, role, affiliation, **keywords): self.participants[nick] = { 'jid': jid, 'nick': nick, 'role': role, 'affiliation': affiliation, 'time': unixtime() }
def store_multi(self, mapping, time=0, key_prefix='', op=0): """ There is item in cached that expired >>> c = MemoryCache() >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.store_multi({'k': 'v'}, 100) [] There is item in expire_buckets that expired >>> c = MemoryCache() >>> i = int((int(unixtime()) % c.period) ... / c.interval) - 1 >>> c.expire_buckets[i] = (allocate_lock(), [('x', 10)]) >>> c.store_multi({'k': 'v'}, 100) [] """ now = int(unixtime()) time = expires(now, time) items = self.items keys_failed = [] succeeded = [] self.lock.acquire(1) try: for k, value in iteritems(mapping): key = key_prefix + k try: entry = items[key] if entry.expires < now: del items[key] elif op == 1: # add keys_failed.append(k) continue except KeyError: if op == 2: # replace keys_failed.append(k) continue items[key] = CacheItem(key, value, time) succeeded.append((key, time)) finally: self.lock.release() if time < 0x7FFFFFFF and succeeded: expired_keys = None bucket_id = int((now % self.period) / self.interval) bucket_lock, bucket_items = self.expire_buckets[bucket_id - 1] bucket_lock.acquire(1) try: if self.last_expire_bucket_id != bucket_id: self.last_expire_bucket_id = bucket_id expired_keys = find_expired(bucket_items, now) bucket_items.extend(succeeded) finally: bucket_lock.release() if expired_keys: _logger.warning(u"缓存删除key:"+key+u",entry.expires:"+str(entry.expires)+u">=now:"+str(now)) self.delete_multi(expired_keys) return keys_failed
def WriteIndexQuote(self, quote, submitter): date = int(unixtime()) # Take last index, add one up, store it # Or else: set it to zero, bc there is nothing. try: indexNr = (self.quoteDB[-1][0]) + 1 except: indexNr = 0 self.quoteDB.append([indexNr, quote, submitter, date]) self.WriteDB()
def set(self, key, value, time=0, namespace=None): now = int(unixtime()) time = memory.expires(now, time) lvalue = list(value) lvalue.append(time) lvalue[0] = str(lvalue[0]) lvalue[2] = str(lvalue[2]) wrap_value = ','.join(lvalue) _logger.debug(u"redis设置:key=" + key + u",value=" + wrap_value) return self.r.set(key, wrap_value)
def set(self, key, value, time=0, namespace=None): now = int(unixtime()) time = memory.expires(now, time) lvalue = list(value) lvalue.append(time) lvalue[0] = str(lvalue[0]) lvalue[2] = str(lvalue[2]) wrap_value = ','.join(lvalue) _logger.debug(u"redis设置:key="+key+u",value="+wrap_value) return self.r.set(key, wrap_value)
def delete(what,whatid): if 'logged_in' not in session: abort(403) returnto_ip = request.args.get('returnto_ip') returnto_manage_comments = request.args.get('returnto_manage_comments') if what == "blogpost": g.db.execute(""" UPDATE post SET removed=1, timeofremoval=? WHERE id=? """,[unixtime(),whatid]) g.db.commit() flash("The blogpost has been deleted <a class='undo_recover' href='"+ url_for("recover",what="blogpost",whatid=whatid) +"'>Undo deletion</a>","message") return redirect(url_for("frontpage")) elif what == "comment": commentpage = query_db(""" SELECT commentpage FROM comment WHERE id=? """,[whatid])[0]["commentpage"] g.db.execute(""" UPDATE comment SET removed=1, timeofremoval=? WHERE id=? """,[unixtime(),whatid]) g.db.commit() if not returnto_ip and not returnto_manage_comments: flash("The comment has been deleted <a class='undo_recover' href='"+ url_for("recover",what="comment",whatid=whatid) +"'>Undo deletion</a>","message") return redirect(url_for("blogpost",blogid=commentpage)) elif returnto_manage_comments: flash("The comment has been deleted <a class='undo_recover' href='"+ url_for("recover",what="comment",whatid=whatid,returnto_manage_comments=returnto_manage_comments) +"'>Undo deletion</a>","message") return redirect(url_for('comments_for',blogpost=returnto_manage_comments)) else: flash("The comment has been deleted <a class='undo_recover' href='"+ url_for("recover",what="comment",whatid=whatid,returnto_ip=returnto_ip) +"'>Undo deletion</a>","message") return redirect(url_for("comments_by",ipaddress=returnto_ip)) elif what == "draft": g.db.execute(""" UPDATE draft SET removed=1, timeofremoval=? WHERE id=? """,[unixtime(),whatid]) g.db.commit() flash("The draft has been removed <a class='undo_recover' href='"+ url_for("recover",what="draft",whatid=whatid) +"'>Undo deletion</a>","message") return redirect(url_for('drafts')) else: abort(404)
def show_status(self): """ Display a summary line """ status = self._status stats = self._stats now = unixtime() if (now - status['last']) > status['interval']: status['last'] = now print 'now:', ' | '.join(['%s:%d' % (key, value) for key, value in stats.items()]) self._stats = {key: 0 for key in stats.keys()}
def run(self, wakeup_interval=1): """ Periodically check for expired messages. """ stopevent = self._stopevent while not stopevent.ready(): eventlet.sleep(wakeup_interval) self.tick(unixtime()) ret = stopevent.wait() stopevent.reset() return ret
def run(): print('start') fan_control_thread_start() while True: time.sleep(10) humidity, temperature = get_humidity_temperature() now = int(unixtime()) data = {"date": now, "humidity": humidity, "temperature": temperature} print(data) # push to database db.child(db_sensor_data_loc).push(data)
def adddraft(): if 'logged_in' not in session: abort(403) blogpost = request.form['blogpost'] title = request.form['title'] g.db.execute(""" INSERT INTO draft(title,text,removed,lastedit) VALUES(?,?,0,?) """,[title,blogpost,unixtime()]) g.db.commit() flash("You saved a new draft.","message") return redirect(url_for('drafts'))
def tick(self, now=None): """ Remove expired messages from the Tracker """ if now is None: now = unixtime() for k, item in self._estor.items(): uid, expires, event = item if expires > now: break del self._estor[k] del self._kstor[uid] event.send_exception(MessageTimeoutError())
def store(self, key, value, time=0, op=0): """ There is item in cached that expired >>> c = MemoryCache() >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.store('k', 'v', 100) True There is item in expire_buckets that expired >>> c = MemoryCache() >>> i = int((int(unixtime()) % c.period) ... / c.interval) - 1 >>> c.expire_buckets[i] = (allocate_lock(), [('x', 10)]) >>> c.store('k', 'v', 100) True """ now = int(unixtime()) time = expires(now, time) items = self.items self.lock.acquire(1) try: try: entry = items[key] if entry.expires < now: del items[key] elif op == 1: # add return False except KeyError: if op == 2: # replace return False items[key] = CacheItem(key, value, time) finally: self.lock.release() if time < 0x7FFFFFFF: expired_keys = None bucket_id = int((now % self.period) / self.interval) bucket_lock, bucket_items = self.expire_buckets[bucket_id - 1] bucket_lock.acquire(1) try: if self.last_expire_bucket_id != bucket_id: self.last_expire_bucket_id = bucket_id expired_keys = find_expired(bucket_items, now) bucket_items.append((key, time)) finally: bucket_lock.release() if expired_keys: self.delete_multi(expired_keys) return True
def get_comm_time(self, in_cat_pri_string): query = "SELECT cur_task, comm_time FROM Users" query += " WHERE userid = " + str(self.get_userid()) self.cur.execute(query) result = self.cur.fetchone() cur_task = result[0] if(cur_task == in_cat_pri_string): comm_time = result[1] #~ pr(' cur_task == in_cat_pri_string ') return(comm_time) else: now_time = unixtime() #~ pr(' cur_task not equal to in_cat_pri_string ') return(now_time)
def commitcomment(blogid): error = 0 website = request.form['website'].strip() nickname = request.form['nickname'].strip() comment = request.form['comment'].strip() email = request.form['email'].strip() ispublic = {'public':1,'nopublic':0}[request.form['ispublic']] if 'logged_in' in session: admin = 1 else: admin = 0 answer = request.form['captchaanswer'] correct = int(urllib2.urlopen('http://captchator.com/captcha/check_answer/'+session['session_id']+'/'+answer).read(100)) if ispublic not in (0,1): ispublic = 0 ip = request.remote_addr if admin or correct: if nickname == "": error = 1 flash('You must fill in your name!','error') elif len(nickname) > 50: error = 1 flash("Your name may be no longer than 50 characters long.","error") if comment == "": error = 1 flash('You must make a comment!','error') elif len(comment) > 1500: error = 1 flash('You may not write a comment longer than 1500 characters. The one you submitted has '+str(len(comment))+" characters.","error") if email != "" and not re.match('.*@.*\..*',email): error = 1 flash("Please only submit a valid email address. (email is optional)","error") if not error: flash("You made a comment!",'message') g.db.execute(""" INSERT INTO comment (commentpage, commenttext,nickname,website,email,removed,unixtime,ip,publicemail,isadmin) VALUES(?, ?, ?, ?, ?, 0, ?, ?, ?, ?) """,(blogid,comment,nickname,website,email,unixtime(),ip,ispublic,admin)) g.db.commit() else: error = 1 flash('Your answer to the image test was incorrect.','error') if error: return redirect(url_for('blogpost',blogid=blogid,recover=1,website=website,nickname=nickname,comment=comment,email=email)) else: return redirect(url_for('blogpost',blogid=blogid))
def highlight_regions(self): """ Highlight new inserted content so user knows what was modified. Also remove that highlight after short time so it won't bother user. """ # For each inserted line generate unique key regions_key = 'timesheets-{}'.format(unixtime()) # Highlight regions self.view.add_regions(regions_key, self.regions_to_highlight, 'text', 'dot', sublime.DRAW_NO_FILL) self.regions_to_highlight.clear() # Schedule regions clear sublime.set_timeout(lambda: self.view.erase_regions(regions_key), 750)
def repack(self, progress=True, path=None, **kwargs): if path is None: path = self.ifs_out # open first in case path is bad ifs_file = open(path, 'wb') self.data_blob = BytesIO() self.manifest = KBinXML(etree.Element('imgfs')) manifest_info = etree.SubElement(self.manifest.xml_doc, '_info_') # the important bit data = self._repack_tree(progress, **kwargs) data_md5 = etree.SubElement(manifest_info, 'md5') data_md5.attrib['__type'] = 'bin' data_md5.attrib['__size'] = '16' data_md5.text = hashlib.md5(data).hexdigest() data_size = etree.SubElement(manifest_info, 'size') data_size.attrib['__type'] = 'u32' data_size.text = str(len(data)) manifest_bin = self.manifest.to_binary() manifest_hash = hashlib.md5(manifest_bin).digest() head = ByteBuffer() head.append_u32(SIGNATURE) head.append_u16(self.file_version) head.append_u16(self.file_version ^ 0xFFFF) head.append_u32(int(unixtime())) head.append_u32(self.manifest.mem_size) manifest_end = len(manifest_bin) + head.offset + 4 if self.file_version > 1: manifest_end += 16 head.append_u32(manifest_end) if self.file_version > 1: head.append_bytes(manifest_hash) ifs_file.write(head.data) ifs_file.write(manifest_bin) ifs_file.write(data) ifs_file.close()
def get(self, key, namespace=None): wrap_value = self.r.get(key) if wrap_value: _logger.debug(u"redis获取:key="+key+u",wrap_value="+wrap_value) lvalue = wrap_value.split(',') time = int(lvalue[2]) value = lvalue[:2] _logger.debug(u"redis获取:key="+key+u",value="+','.join(value)) value[0] = int(value[0]) now = int(unixtime()) if now < time: return value else: _logger.debug(u"redis:key="+key+u",时间超时,清除") self.r.delete(key) return None
def set_comm_time(self, in_cat_pri_string, seconds_ago = None): if (self.exists(in_cat_pri_string)): pass else: raise ValueError('Cannot set comm_time without pri_key match') seconds = int(unixtime()) if(seconds_ago): # Set to a time in the past for testing assert(type(seconds_ago) == type(100)) # Must be int seconds -= seconds_ago query = "UPDATE Users SET comm_time = " + str(seconds) query += ", cur_task = '" + in_cat_pri_string query += "' WHERE userid = " + str(self.get_userid()) self.cur.execute(query) self.db.commit() pr('query executed is:::::::: ' + query) pr('string is :::::::::::::::: ' + in_cat_pri_string) return(seconds)
def get(self, key, namespace=None): wrap_value = self.r.get(key) if wrap_value: _logger.debug(u"redis获取:key=" + key + u",wrap_value=" + wrap_value) lvalue = wrap_value.split(',') time = int(lvalue[2]) value = lvalue[:2] _logger.debug(u"redis获取:key=" + key + u",value=" + ','.join(value)) value[0] = int(value[0]) now = int(unixtime()) if now < time: return value else: _logger.debug(u"redis:key=" + key + u",时间超时,清除") self.r.delete(key) return None
def _handle(self, data): """ Grunt work, wrapper for the 'process' method. Handles re-queueing of items which couldn't be processed. """ self.incr_stats('popped') try: record = marshal.loads(data) except ValueError: record = None if record is None: self.incr_stats('invalid') return is_processed = False try: is_processed = self.process(record) except Exception: LOG.error("Failed to process", exc_info=True) # Failed processing for some reason if not is_processed: # Put the CDR back in queue for processing if process fails record['ttl'] = record.get('ttl', 0) + 1 if record['ttl'] > 3: # But only 3-4 times... then it's 'f****d' # XXX: how do we handle 'f****d' items? self.redis.rpush('queue_fucked', json.dumps(record)) self.incr_stats('f****d') else: self.redis.rpush('queue', json.dumps(record)) self.incr_stats('retry') self.incr_stats('redis.ops.rpush') self.incr_stats('redis.ops') else: # TODO: insert the 'cost' of processing this record self.redis.rpush(record['id'], unixtime()) self.redis.expire(record['id'], 2) self.incr_stats('processed') self.incr_stats('redis.ops.rpush') self.incr_stats('redis.ops.expire') self.incr_stats('redis.ops', 2)
def saveBMP( self, name = '/tmp/window-' + str( unixtime() ) + '.bmp' ): width = self.surface.get_width() height = self.surface.get_height() # fix size to be multiple of 4 ; BMP doesn't like it any other way width -= width%4 height -= height%4 file = open( name, 'wb' ) file.write( 'BM' + struct.pack( '<QIIHHHH', width*height*3+26,26, 12, width, height, 1,24) ) self.surface.lock() for y in xrange( height-1, -1, -1 ): for x in xrange( width ): v = self.surface.get_at( (x, y) ) file.write( struct.pack( 'BBB', v[2], v[1], v[0] ) ) self.surface.unlock() file.close()
def incr(self, key, delta=1, namespace=None, initial_value=None): """ Atomically increments a key's value. The value, if too large, will wrap around. If the key does not yet exist in the cache and you specify an initial_value, the key's value will be set to this initial value and then incremented. If the key does not exist and no initial_value is specified, the key's value will not be set. >>> c = MemoryCache() >>> c.incr('k') >>> c.incr('k', initial_value=0) 1 >>> c.incr('k') 2 There is item in cached that expired >>> c.items['k'] = CacheItem('k', 1, 1) >>> c.incr('k') """ now = int(unixtime()) items = self.items self.lock.acquire(1) try: try: entry = items[key] if entry.expires < now: del items[key] entry = None except KeyError: entry = None if entry is None: if initial_value is None: return None else: entry = items[key] = CacheItem( key, initial_value, expires(now, 0)) value = entry.value = entry.value + delta return value finally: self.lock.release()
def editabout(): if "logged_in" not in session: abort(403) if request.method == "POST": blogpost = request.form['blogpost'] if 'Preview Aboutpage' in request.form.values(): return render_template("editabout.html",preview="1",renderedblog=render_bbcode(blogpost),blogpost=blogpost,recover=1) g.db.execute(""" UPDATE about SET text=?, unixtime=? """,[blogpost,unixtime()]) g.db.commit() flash("You have successfully edited the aboutpage","message") return redirect(url_for("about")) g.orig_about = query_db(""" SELECT text FROM about LIMIT 1 """,(),True)['text'] return render_template("editabout.html")
def incr(self, key, delta=1, namespace=None, initial_value=None): """ Atomically increments a key's value. The value, if too large, will wrap around. If the key does not yet exist in the cache and you specify an initial_value, the key's value will be set to this initial value and then incremented. If the key does not exist and no initial_value is specified, the key's value will not be set. >>> c = MemoryCache() >>> c.incr('k') >>> c.incr('k', initial_value=0) 1 >>> c.incr('k') 2 There is item in cached that expired >>> c.items['k'] = CacheItem('k', 1, 1) >>> c.incr('k') """ now = int(unixtime()) items = self.items self.lock.acquire(1) try: try: entry = items[key] if entry.expires < now: del items[key] entry = None except KeyError: entry = None if entry is None: if initial_value is None: return None else: entry = items[key] = CacheItem(key, initial_value, expires(now, 0)) value = entry.value = entry.value + delta return value finally: self.lock.release()
def tonight(bot, trigger): global _sched_cache t0 = unixtime() if t0 - _sched_cache['time'] < 60 * 10 and _sched_cache['data']: sched_data = _sched_cache['data'] else: api_key = bot.config.tvrage.api_key sched_data = tvrage.schedule(api_key) _sched_cache['data'] = sched_data _sched_cache['time'] = t0 # Header rows = [render(items=[ EntityGroup([Entity("Schedule")]), EntityGroup([Entity(sched_data['date'])]) ])] for hour, shows in sched_data['hours'].items(): dt = tvrage.parse_hour(hour) if dt.hour < 17: continue for show in shows: if show['network'].lower() not in network_whitelist: continue items = [ EntityGroup([Entity(hour)]), EntityGroup([Entity(show['name'])]), EntityGroup([ Entity(show['ep']), Entity(show['title']), Entity(show['network']) ]) ] time_delta = show['airs_in'].total_seconds() if time_delta > 0: time_parts = time.format_time_delta(time_delta) time_msg = '{} Hours {} Mins'.format(time_parts[0], time_parts[1]) items.append(EntityGroup([Entity("Airs", time_msg)])) rows.append(render(items=items)) for row in rows: bot.write(("PRIVMSG", trigger.nick), row)
def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int( val * self.tunits[ unit ] ) val = int(unixtime()) - val except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name],name) ) return (val,text) else: return default
def timeval(self, name, default): if name in self.kwargs: try: val = self.kwargs[name] try: val = int(val) text = \ str(val) + self.tunits_name['s'] + ['s',''][val == 1] except: unit = val[-1].lower() val = float(val[:-1]) text = \ str(val).strip('.0') + self.tunits_name[unit] \ + ['s',''][val == 1] val = int(val * self.tunits[unit]) val = int(unixtime()) - val except: raise TracError("Invalid value '%s' for argument '%s'! " % (self.kwargs[name], name)) return (val, text) else: return default
def get_multi(self, keys, key_prefix='', namespace=None): """ Looks up multiple keys from cache in one operation. This is the recommended way to do bulk loads. >>> c = MemoryCache() >>> c.get_multi(('k1', 'k2', 'k3')) {} >>> c.store('k1', 'v1', 100) True >>> c.store('k2', 'v2', 100) True >>> sorted(c.get_multi(('k1', 'k2')).items()) [('k1', 'v1'), ('k2', 'v2')] There is item in cache that expired >>> c.items['k'] = CacheItem('k', 'v', 1) >>> c.get_multi(('k', )) {} """ now = int(unixtime()) results = {} items = self.items self.lock.acquire(1) try: for k in keys: key = key_prefix + k try: entry = items[key] if entry.expires < now: del items[key] else: results[k] = entry.value except KeyError: pass finally: self.lock.release() return results
def update_seconds(self): query = "SELECT cur_task, comm_time FROM Users" query += " WHERE userid = " + str(self.get_userid()) self.cur.execute(query) result = self.cur.fetchone() cur_task = result[0] comm_time = result[1] if cur_task == None: pass #~ pr("No action taken in update_seconds() because no pri_key match") elif (self.exists(cur_task)): key = Key.Key(cur_task) cat = key.get_cat() pri = key.get_pri() delta_seconds = int(unixtime()) - comm_time qu = "UPDATE Tasks SET seconds = seconds + " qu += str(delta_seconds) qu += " WHERE category = " + str(cat) + ' AND ' qu += "priority = " + str(pri) + ' AND ' qu += "userid = " + str(self.get_userid()) self.cur.execute(qu) self.db.commit() else: pass
self.height = int( width / aspect ) self.xmin = float( xmin ) self.xmax = float( xmax ) self.ymin = float( ymin ) if ymin == ymax: self.ymax = float( ymin + (xmax-xmin) / aspect ) else: self.ymax = float( ymax ) self.xstep = float( (self.xmax-self.xmin)/self.width ) self.ystep = float( (self.ymax-self.ymin)/self.height ) pygame.init() #self.surface = pygame.display.set_mode( (self.width, self.height), pygame.constants.RESIZABLE ) self.surface = pygame.display.set_mode( (self.width, self.height) ) self.surface.fill( (0, 0, 0) ) self.update_time = unixtime()-100.0 self.update() self.button_status = ( False, False, False ) self.event = pygame.event.poll() def set_title( self, title ): pygame.display.set_caption( title ) def plot( self, pixel, rgb = (1.0, 1.0, 1.0) ): self.surface.set_at( pixel, rgb ) def update( self, frequency=4.0 ): if unixtime() - self.update_time > 1./frequency: self.surface.unlock() pygame.display.flip()
def me_view(request, template_name='dtr5app/me.html'): """Show a settings page for auth user's profile.""" if not request.user.is_authenticated(): return redirect(settings.OAUTH_REDDIT_REDIRECT_AUTH_ERROR) ctx = {'sex_choices': settings.SEX, 'lookingfor_choices': settings.LOOKINGFOR, 'unixtime': unixtime(), 'timeleft': request.session['expires'] - unixtime(), 'USER_MAX_PICS_COUNT': settings.USER_MAX_PICS_COUNT} LK = settings.USER_MIN_LINK_KARMA CK = settings.USER_MIN_COMMENT_KARMA # Check if the user has filled in the basics of their profile. If # they haven't, show special pages for it. if not request.user.subs.all(): # the user has no subs in their profile, offer to load them template_name = 'dtr5app/step_2.html' request.session['view_post_signup'] = True elif not request.user.profile.created: # the user reddit profile is incomplete, download it again # template_name = 'dtr5app/step_3_something.html' template_name = 'dtr5app/step_2.html' request.session['view_post_signup'] = True elif not (request.user.profile.link_karma >= LK or request.user.profile.comment_karma >= CK): # if they don't have sufficient karma, they can't sign up template_name = 'dtr5app/step_3_err_karma.html' # request.user.is_active = False # request.user.save() elif ((datetime.now().date() - request.user.profile.created) < timedelta(settings.USER_MIN_DAYS_REDDIT_ACCOUNT_AGE)): # if the account isn't old enough, they can's sign up template_name = 'dtr5app/step_3_err_account_age.html' # request.user.is_active = False # request.user.save() elif not (request.user.profile.lat and request.user.profile.lng): # geolocation missing, offer to auto-set it template_name = 'dtr5app/step_3.html' request.session['view_post_signup'] = True elif not (request.user.profile.dob and request.user.profile.sex and request.user.profile.about): # required manually input profile data is missing template_name = 'dtr5app/step_4.html' request.session['view_post_signup'] = True ctx['dob_min'] = '{}-{}-{}'.format( date.today().year-118, date.today().month, date.today().day) ctx['dob_max'] = '{}-{}-{}'.format( date.today().year-18, date.today().month, date.today().day) elif len(request.user.profile.pics) == 0: # no pics yet, ask to link one picture template_name = 'dtr5app/step_5.html' request.session['view_post_signup'] = True elif not (request.user.profile.f_distance): # no search settings found, ask user to chose search settings template_name = 'dtr5app/step_6.html' request.session['view_post_signup'] = True elif (request.session.get('view_post_signup', False)): # user just set at least one required item. now show them the "all # done" page to make display of the first search result less abrupt template_name = 'dtr5app/step_7.html' request.session['view_post_signup'] = False return render_to_response(template_name, ctx, context_instance=RequestContext(request))
class Player: def __init__(self, name, number): self.name = name self.matches_won = 0 self.number = number """ The rules of Rock, Paper, Scissors are as follows: Rock eliminates Scissors Paper eliminates Rock Scissors eliminates Paper """ player1 = Player(input("Player 1: Please enter a name: "), 1) player2 = Player(input("Player 2: Please enter a name: "), 2) started = unixtime() def getInput(player): prefix = "{} (Player {})".format(player.name, player.number) inp = input("{}: Please input 'Rock', 'Paper' or 'Scissors': ".format(prefix)).lower() if inp not in ["rock", "paper", "scissors"]: print("{}: Try again.".format(prefix)) getInput(player) return inp while True: p1_inp = getInput(player1) sys("cls") p2_inp = getInput(player2)
def get_unix_time_ms() -> int: return int(unixtime() * 1000)
def ListOfWikiPages(self, formatter, content): """ == Description == Website: http://trac-hacks.org/wiki/ListOfWikiPagesMacro `$Id$` The macro `ListOfWikiPages` prints a table of all (user generated, i.e. non-trac-default) wiki pages with last changed date and author as requested in Request-a-Hack th:#2427. Version 0.2 provides also a long format which also includes the newest version number and links to the difference and the history as well as the last comment. This was requested by th:#4717. The second macro provided by this package is `LastChangesBy` which prints the last changes made by the given user or the logged-in user if no username is given. == Usage == You can use the `ListOfWikiPages` macro like this: {{{ [[ListOfWikiPages]] # default format as configured in the config file [[ListOfWikiPages(format=short)]] # short format [[ListOfWikiPages(format=long)]] # long format (new v0.2) }}} which prints a table of all wiki pages, or with a list of wiki pages: {{{ [[ListOfWikiPages(ThatWikiPage,ThisWikiPage,AnotherWikiPage,format=...)]] }}} Since v0.3 the optional arguments `from` and `to` can be used to specify a time/date range as requested by th:#5344. The values of this arguments are taken as negative offsets to the current time (i.e. the time the wiki page is displayed). Allowed is a number followed by a unit which can be `s`,`m`,`h`,`d`,`w`,`o`,`y` for seconds, minutes, hours, days, weeks, month and years. If the unit is missing seconds are assumed. {{{ [[ListOfWikiPages(from=3d)]] # displays all wiki pages changed in the last three days [[ListOfWikiPages(to=15m)]] # displays all wiki pages was where changed longer than 15 minutes ago [[ListOfWikiPages(from=4.5w,to=15h)]] # displays all wiki pages was where changed between 4 1/2 week and 15 hours ago }}} A headline can be given using a `headline` argument: {{{ [[ListOfWikiPages(headline=Headline text without any comma)]] # sets a table headline, may not contain '`,`' }}} The order can be reversed, i.e. list the oldest wikis first, using: {{{ [[ListOfWikiPages(order=reverse)]] }}} Unwanted wiki ranges (e.g. `Trac*`) can be excluded by the `exclude=pattern` option which can be given multiple times. The wildcards '`*`' (matches everything) and '`?`' (matches a single character) can be used in the pattern. (Requested by #6074) {{{ [[ListOfWikiPages(exclude=Trac*,exclude=abc?)]] }}} """ largs, kwargs = parse_args(content, multi=['exclude']) self.href = formatter.req.href long_format = self.default_format.lower() == 'long' if 'format' in kwargs: long_format = kwargs['format'].lower() == 'long' self.long_format = long_format db = self.env.get_db_cnx() cursor = db.cursor() #cursor.log = self.env.log sql_wikis = '' if largs: sql_wikis = self._get_sql_include(largs) sql_exclude = '' if 'exclude' in kwargs: sql_exclude = self._get_sql_exclude(kwargs['exclude']) self.kwargs = kwargs dfrom, fromtext = self.timeval('from', (0, '')) dto, totext = self.timeval('to', (int(unixtime()), '')) if 'from' in kwargs or 'to' in kwargs: sql_time = " time BETWEEN %d AND %d AND " % (dfrom, dto) else: sql_time = '' if kwargs.get('order', 'normal') == 'reverse': order = " " else: order = " DESC " sqlcmd = \ "SELECT name,time,author,version,comment FROM wiki AS w1 WHERE " \ + sql_time + \ "author NOT IN ('%s') " % "','".join( self.ignore_users ) + sql_wikis + sql_exclude + \ "AND version=(SELECT MAX(version) FROM wiki AS w2 WHERE w1.name=w2.name) ORDER BY time " + \ order cursor.execute(sqlcmd) rows = [ self.formatrow(n, name, time, version, comment, author) for n, [name, time, author, version, comment] in enumerate(cursor) ] if self.long_format: cols = ("WikiPage", "Last Changed At", "By", "Version", "Diff", "History", "Comment") else: cols = ("WikiPage", "Last Changed At", "By") if 'headline' in kwargs: headlinetag = tag.tr(tag.th(kwargs['headline'], colspan=len(cols))) else: headlinetag = tag() head = tag.thead( headlinetag, tag.tr( map(lambda x: tag.th(x, class_=x.replace(" ", "").lower()), cols))) table = tag.table(head, rows, class_='listofwikipages') self.href = None return table
def get_token(request, refresh=False): """ Return an access_token, either from session storage or get a fresh one from the Reddit API. If there is a "code" parameter in the request GET values, then refresh the cached access_token value. Call with refresh=True to refresh an existing access_token. """ logger.info("Find access_token for session '%s'", request.session.session_key) api_url = "https://ssl.reddit.com/api/v1/access_token" is_expired = request.session.get("expires", 0) < int(unixtime()) headers = settings.OAUTH_REDDIT_BASE_HEADERS client_auth = requests.auth.HTTPBasicAuth(settings.OAUTH_REDDIT_CLIENT_ID, settings.OAUTH_REDDIT_CLIENT_SECRET) if is_expired and request.GET.get("code", None): logger.info("Using authorization_code for first time auth.") # Received an access code to get a new access_token. Use # this above anything else. post_data = { "grant_type": "authorization_code", "code": request.GET.get("code"), "redirect_uri": settings.OAUTH_REDDIT_REDIRECT_URI, } response = requests.post(api_url, auth=client_auth, headers=headers, data=post_data) t = response.json() request.session["access_token"] = t.get("access_token", "") request.session["refresh_token"] = t.get("refresh_token", "") request.session["token_type"] = t.get("token_type", "") request.session["expires"] = int(unixtime()) + int(t.get("expires_in", 0)) request.session["scope"] = t.get("scope", "") logger.info("Initial access_token acquired.") elif (refresh or is_expired) and request.session.get("refresh_token", False): logger.info("Using refresh_token to acquire new access_token.") # The previous access_token is expired, use refresh_token to # get a new one. post_data = {"grant_type": "refresh_token", "refresh_token": request.session.get("refresh_token")} response = requests.post(api_url, auth=client_auth, headers=headers, data=post_data) t = response.json() request.session["access_token"] = t.get("access_token", "") request.session["token_type"] = t.get("token_type", "") request.session["expires"] = int(unixtime()) + int(t.get("expires_in", 0)) request.session["scope"] = t.get("scope", "") logger.info("New access_token acquired.") else: if request.session.get("access_token", False): logger.info("Re-using cached access_token.") else: logger.info("No access_token found anywhere!") # If there is an access_token now, return it. Or wipe session vals. if request.session.get("access_token", False): logger.info("Returning access_token: '%s'", request.session.get("access_token")) return request.session.get("access_token") else: logger.info("Deleting all related session values.") request.session["access_token"] = None request.session["refresh_token"] = None request.session["token_type"] = None request.session["expires"] = 0 request.session["scope"] = None return False
def make_cookie_hash(login, passwd, ip): return aux.sha1("{}:{}:{}:{}".format(login, passwd, ip, unixtime()))
def LastChangesBy(self, formatter, content): """ This macro prints a table similar to the `[[ListOfWikiPages]]` only with the ''By'' column missing and the author name in the table head. {{{ [[LastChangesBy(martin_s)]] # the last 5 changes by user `martin_s` [[LastChangesBy(martin_s,10)]] # the last 10 changes by user `martin_s` [[LastChangesBy]] # or [[LastChangesBy()]] # the last 5 changes by the current user (i.e. every user sees it's own changes, if logged-on) [[LastChangesBy(,12)]] # the last 12 changes by the current user [[LastChangesBy(...,format=...]] # Selects `long` or `short` table format [[LastChangesBy(...,from=..,to=..]] # Selects `from` and `to` time/date range [[LastChangesBy(...,headline=...]] # Overwrites headline, may not contain `','` [[LastChangesBy(...,order=reverse]] # Lists the wikis in reverse order. Only really useful with few wikis or with `to`/`from`. [[LastChangesBy(..,exclude=pattern]] # Excludes wikis matching `pattern`. Wildcards `*` and `?` are supported. }}} """ largs, kwargs = parse_args(content) #self.base_path = formatter.req.base_path self.href = formatter.req.href section = 'listofwikipages' long_format = self.env.config.get(section, 'default_format', 'short').lower() == 'long' if 'format' in kwargs: long_format = kwargs['format'].lower() == 'long' self.long_format = long_format self.kwargs = kwargs dfrom, fromtext = self.timeval('from', (0, '')) dto, totext = self.timeval('to', (int(unixtime()), '')) if 'from' in kwargs or 'to' in kwargs: sql_time = " AND time BETWEEN %d AND %d " % (dfrom, dto) else: sql_time = '' sql_exclude = '' if 'exclude' in kwargs: sql_exclude = self._get_sql_exclude(kwargs['exclude']) author = len(largs) > 0 and largs[0] or formatter.req.authname count = len(largs) > 1 and largs[1] or 5 try: count = int(count) if count < 1: raise except: raise TracError("Second list argument must be a positive integer!") db = self.env.get_db_cnx() cursor = db.cursor() #cursor.log = self.env.log if kwargs.get('order', 'normal') == 'reverse': order = " " else: order = " DESC " cursor.execute( """ SELECT name,time,version,comment FROM wiki AS w1 WHERE author = %s """ + sql_time + sql_exclude + """ AND version=(SELECT MAX(version) FROM wiki AS w2 WHERE w1.name=w2.name) ORDER BY time """ + order + " LIMIT 0,%d " % count, (author, )) rows = [ self.formatrow(n, name, time, version, comment) for n, [name, time, version, comment] in enumerate(cursor) if n < count ] if count == 1: count = '' s = '' else: s = 's' if self.long_format: cols = ("WikiPage", "Last Changed At", "Version", "Diff", "History", "Comment") else: cols = ("WikiPage", "Last Changed At") headline = "Last %s change%s by " % (count, s) if sql_time: if fromtext: if totext: timetag = " between %s and %s ago" % (fromtext, totext) else: timetag = " in the last %s" % fromtext else: if totext: timetag = " before the last %s" % totext else: timetag = "" else: timetag = '' if 'headline' in kwargs: headlinetag = tag.tr(tag.th(kwargs['headline'], colspan=len(cols))) else: headlinetag = tag.tr( tag.th(headline, tag.strong(author), timetag, colspan=len(cols))) head = tag.thead( headlinetag, tag.tr( map(lambda x: tag.th(x, class_=x.replace(" ", "").lower()), cols))) table = tag.table(head, rows, class_='lastchangesby') self.href = None return table
def me_view(request, template_name="dtr5app/me.html"): """Show a settings page for auth user's profile.""" if not request.user.is_authenticated(): return redirect(settings.OAUTH_REDDIT_REDIRECT_AUTH_ERROR) ctx = {'sex_choices': settings.SEX, 'lookingfor_choices': settings.LOOKINGFOR, 'unixtime': unixtime(), 'timeleft': request.session['expires'] - unixtime(), 'USER_MAX_PICS_COUNT': settings.USER_MAX_PICS_COUNT} LK = settings.USER_MIN_LINK_KARMA CK = settings.USER_MIN_COMMENT_KARMA # Check if the user has filled in the basics of their profile. If # they haven't, show special pages for it. if not request.user.subs.all(): # the user has no subs in their profile, offer to load them template_name = 'dtr5app/step_2.html' request.session['view_post_signup'] = True elif not request.user.profile.created: # the user reddit profile is incomplete, download it again # template_name = 'dtr5app/step_3_something.html' template_name = 'dtr5app/step_2.html' request.session['view_post_signup'] = True elif not (request.user.profile.link_karma > LK or request.user.profile.comment_karma > CK): # if they don't have sufficient karma, they can't sign up template_name = 'dtr5app/step_3_err_karma.html' request.user.is_active = False request.user.save() elif ((datetime.now().date() - request.user.profile.created) < timedelta(settings.USER_MIN_DAYS_REDDIT_ACCOUNT_AGE)): # if the account isn't old enough, they can's sign up template_name = 'dtr5app/step_3_err_account_age.html' request.user.is_active = False request.user.save() elif not (request.user.profile.lat and request.user.profile.lng): # geolocation missing, offer to auto-set it template_name = 'dtr5app/step_3.html' request.session['view_post_signup'] = True elif not (request.user.profile.dob and request.user.profile.sex and request.user.profile.about): # required manually input profile data is missing template_name = 'dtr5app/step_4.html' request.session['view_post_signup'] = True elif len(request.user.profile.pics) == 0: # no pics yet, ask to link one picture template_name = 'dtr5app/step_5.html' request.session['view_post_signup'] = True elif not (request.user.profile.f_distance): # no search settings found, ask user to chose search settings template_name = 'dtr5app/step_6.html' request.session['view_post_signup'] = True elif (request.session.get('view_post_signup', False)): # user just set at least one required item. now show them the "all # done" page to make display of the first search result less abrupt template_name = 'dtr5app/step_7.html' request.session['view_post_signup'] = False return render_to_response(template_name, ctx, context_instance=RequestContext(request))
def get_token(request, refresh=False): """ Return an access_token, either from session storage or get a fresh one from the Reddit API. If there is a "code" parameter in the request GET values, then refresh the cached access_token value. Call with refresh=True to refresh an existing access_token. """ api_url = "https://ssl.reddit.com/api/v1/access_token" is_expired = request.session.get("expires", 0) < int(unixtime()) headers = settings.OAUTH_REDDIT_BASE_HEADERS client_auth = requests.auth.HTTPBasicAuth( settings.OAUTH_REDDIT_CLIENT_ID, settings.OAUTH_REDDIT_CLIENT_SECRET ) if is_expired and request.GET.get("code", None): # Received an access code to get a new access_token. Use # this above anything else. post_data = { "grant_type": "authorization_code", "code": request.GET.get("code"), "redirect_uri": settings.OAUTH_REDDIT_REDIRECT_URI, } response = requests.post( api_url, auth=client_auth, headers=headers, data=post_data ) t = response.json() request.session["access_token"] = t.get("access_token", "") request.session["refresh_token"] = t.get("refresh_token", "") request.session["token_type"] = t.get("token_type", "") request.session["expires"] = int(unixtime()) + int(t.get("expires_in", 0)) request.session["scope"] = t.get("scope", "") if settings.DEBUG: print("Initial access_token acquired.") elif (refresh or is_expired) and request.session.get("refresh_token", False): # The previous access_token is expired, use refresh_token to # get a new one. post_data = { "grant_type": "refresh_token", "refresh_token": request.session.get("refresh_token"), } response = requests.post( api_url, auth=client_auth, headers=headers, data=post_data ) t = response.json() request.session["access_token"] = t.get("access_token", "") request.session["token_type"] = t.get("token_type", "") request.session["expires"] = int(unixtime()) + int(t.get("expires_in", 0)) request.session["scope"] = t.get("scope", "") if settings.DEBUG: print("New access_token acquired.") else: if settings.DEBUG: if request.session.get("access_token", False): print("Re-using cached access_token.") else: print("No access_token found anywhere!") # If there is an access_token now, return it. Or wipe session vals. if request.session.get("access_token", False): return request.session.get("access_token") else: request.session["access_token"] = None request.session["refresh_token"] = None request.session["token_type"] = None request.session["expires"] = 0 request.session["scope"] = None return False