def control(useridlems): if Driver.is_dead(): return 2000 if not test_conditions(useridlems): try: Driver.stop() except Exception: traceback.print_exc() else: if Driver._start_time != 0: log.debug('tried to run for %s', default_timer() - Driver._start_time) Driver._start_time = 0 else: try: from common import profile ret = Driver.start(profile) except Exception: traceback.print_exc() else: if ret == STARTED: Driver._start_time = default_timer() elif ret == FAILED_TO_START: Driver._start_time = 0 elif ret == ALREADY_RUNNING: return 500 return 500 return 2000
def OnDragOver(self, x, y, d): blist = self.list listrect = wx.RectS(blist.Size) mp = wx.Point(x,y) topdif = mp.y - listrect.y botdif = listrect.bottom - mp.y if topdif < 7 or botdif < 7: if self.lasttick is None: self.lasttick = default_timer() now = default_timer() toscroll = int((now - self.lasttick) * self.VELOCITY) if toscroll >= 1: self.lasttick = now if topdif < 5: blist.ScrollLines(-toscroll) elif botdif < 5: blist.ScrollLines(toscroll) else: self.lasttick = None return blist.dragResult
def _get_document_parts_for_ref(self, module_name, ref, do_current_ref=True): t = default_timer() stylesheets = list(self.bible_stylesheets) scripts = self.standard_scripts + self.bible_scripts + ["highlight.js", "bpbible_html.js", "contrib/hyphenate.js", "columns.js"] book = biblemgr.get_module_book_wrapper(module_name) assert book, "Module wrapper not found for book " + module_name module = book.mod if book.chapter_view: scripts.append("bpbible_html_chapter_view.js") #stylesheets.append("bpbible_chapter_view.css") #stylesheets.append("bpbible://content/quotes_skin/") else: scripts.append("bpbible_html_page_view.js") stylesheets.append("bpbible_page_view.css") if is_debugging(): stylesheets.append("bpbible_html_debug.css") if book.is_verse_keyed: if book.chapter_view: if do_current_ref: c = book.GetChapter(ref, ref, config.current_verse_template) else: c = book.GetChapter(ref) ref_id = VK(ref).get_chapter_osis_ref() else: c = book.GetReference(ref, headings=True) ref_id = VK(ref).getOSISRef() elif book.is_dictionary: c = book.GetReference(ref) ref_id = ref elif book.is_genbook: c = book.GetReference(ref) ref_id = ref else: dprint(ERROR, "Book `%s' not found." % module_name) c = '' c = c.replace("<!P>", "</p><p>") clas = "" if not c: clas = " nocontent" lang = module.Lang() if module else "en", c = convert_language(c, lang) c = '<div class="segment%s" ref_id="%s">%s</div>' % (clas, urllib.quote(ref_id.encode("utf8")), c) return dict( module=module, content=c, bodyattrs=self._get_body_attrs(module), stylesheets=stylesheets, scripts=scripts, timer="<div class='timer'>Time taken: %.3f (ref_id %s)</div>" % (default_timer() - t, ref_id))
def OnDragOver(self, x, y, d): blist = self.list listrect = wx.RectS(blist.Size) mp = wx.Point(x, y) topdif = mp.y - listrect.y botdif = listrect.bottom - mp.y if topdif < 7 or botdif < 7: if self.lasttick is None: self.lasttick = default_timer() now = default_timer() toscroll = int((now - self.lasttick) * self.VELOCITY) if toscroll >= 1: self.lasttick = now if topdif < 5: blist.ScrollLines(-toscroll) elif botdif < 5: blist.ScrollLines(toscroll) else: self.lasttick = None return blist.dragResult
def process(self): x = default_timer() - self.done_at + self.seconds self.totaltime += x print "%s done, waited %f, total:%f" % (self.name, x, self.totaltime) self.done_at = default_timer() + self.seconds self.cc()
def _changed(self, item, selection): if item: self._lastItemChange = default_timer() if selection: self._lastSelChange = default_timer() if self._customMenu.IsShown(): self._customMenu.CalcSize()
def __init__(self): BasicMenuData.__init__(self) global fontnames global menuobjects if not fontnames: fontnames = GetFonts() self._items = fontnames self._clientData = menuobjects self._lastItemChange = default_timer() self._lastSelChange = default_timer()
def __init__(self, #processor, #server, #callback=None ): common.socket.__init__(self) MSNSocketBase.__init__(self) # assert isinstance(processor, CommandProcessor) # self.processor = processor self.set_terminator(self.delim) self.data = '' self.expecting = 'command' #self.connect_cb = callback self._server = None self.rater = msn.Msnifier.Msnifier(self) self.rater.start() self._bc_lock = threading.RLock() self.bytecount = [(0, util.default_timer())] log.debug('%r created', self)
def do_get_stream(self, num_tries=0): from util import default_timer self.start_get_stream = default_timer() if not self.digsby.logged_in: return self.event('not_logged_in') #refresh full stream if pref has changed prev_filter_key, self.last_filter_key = self.last_filter_key, self.filter_key if not isinstance(self.last_stream, dict) or prev_filter_key != self.filter_key: kw = dict(success=lambda *a: self.get_stream_success( num_tries=num_tries, *a), error=lambda *a: self.get_stream_error(num_tries, *a)) updated_time = 0 else: kw = dict(success=self.update_stream_success, error=lambda *a: self.get_stream_error(num_tries, *a)) updated_time = max(self.last_stream.posts + [S(updated_time=0)], key=lambda v: v.updated_time).updated_time # query = self.digsby.multiquery(prepare=True, self.last_run_multi = dict( # birthdays = BIRTHDAY_QUERY % self.digsby.uid, latest_posts=LATEST_POSTS_QUERY % (self.filter_key, self.hidden_posts), posts=UPDATED_POSTS_QUERY % (('%d' % updated_time) + '+0'), # now = NOW_QUERY % self.digsby.uid, events=EVENTS_QUERY, status=STATUS_QUERY, notifications=NOTIFICATIONS_QUERY, apps=APP_QUERY, post_filter_keys=POST_FILTER_KEY_QUERY, filter_keys=FILTER_KEY_QUERY, **UPDATE_STREAM_QUERY) self.digsby.fql.multiquery(queries=self.last_run_multi, **kw)
def bytes_per_sec(self): now = default_timer() if self.state == self.states.TRANSFERRING: self._add_byte_data(now, self.completed) oldest = now newest = 0 lowest = self.completed for t,b in self.bytecounts: if (self.completed - b): oldest = t if oldest > t else oldest newest = t if newest < t else newest lowest = b if lowest > b else lowest time_diff = now - oldest byte_diff = self.completed - lowest time_since_recv = now - newest if (time_since_recv) > self.ATROPHY_TIME: # been a long time since we got bytes self._bytes_per_sec = 0 elif byte_diff and time_diff: self._bytes_per_sec = byte_diff/time_diff elif not byte_diff: self._bytes_per_sec = 0 elif not time_diff: # uhh...infinite? wha? pass return self._bytes_per_sec
def AnimateWindowTo(self, r=None): 'Slides the window to position x, y.' now = default_timer() if r is not None: self.animateToTarget = r self.lasttick = now targetx, y = self.animateToTarget[:2] win = self.win winx = win.Position.x direction = sign(targetx - win.Position.x) delta = int((now - self.lasttick)*self.velocity) * direction self.bypassMoveEvents = True if winx != targetx and self.Animate: if delta: newx = winx + delta if (targetx >= winx) != (targetx >= newx): newx = targetx win.Move((newx, y)) self.lasttick = now wx.CallLater(15, self.AnimateWindowTo) elif winx != targetx: win.SetRect(r) self.bypassMoveEvents = False
def check_ack(self, header, addr): if self._current_sending is None: #log.info('no current sending, but got an ack: %r (self.state = %r)', header, self.state) # if self.ac_out_buffer: # log.info('assuming ack, current_sending is None. popping %r', self.ac_out_buffer[0]) # self.ac_out_buffer.pop(0) # else: # log.info('No current sending and no out buffer, but got an ack. producer_fifo = %r', self.producer_fifo) return (myhdr, mydata), dest = self._current_sending if header.flags == header.Flags.ACK: self.state.flags = myhdr.flags = header.flags if dest == addr and header.recver_seq == myhdr.sender_seq: # ack! #log.info('got ack for %r: %r', (myhdr, mydata), header) try: self.ac_out_buffer.remove((mydata, dest)) except ValueError: pass self._last_send = 0 self._last_ack_time = util.default_timer() self._current_sending = None self.event('on_send') else: if header.recver_seq < myhdr.sender_seq: #log.info('got old ack. recvd: %r, mystate: %r', header, myhdr) pass else: log.info('bad ack: %r != %r or (recvd %r) != (expected %r)', dest, addr, header.recver_seq, myhdr.sender_seq)
def compute_timeout(self): if self.paused is not None: self._last_computed = self.paused return self._last_computed else: self._last_computed = self.done_at - default_timer() return self._last_computed
def bytes_per_sec(self): now = default_timer() if self.state == self.states.TRANSFERRING: self._add_byte_data(now, self.completed) oldest = now newest = 0 lowest = self.completed for t, b in self.bytecounts: if (self.completed - b): oldest = t if oldest > t else oldest newest = t if newest < t else newest lowest = b if lowest > b else lowest time_diff = now - oldest byte_diff = self.completed - lowest time_since_recv = now - newest if (time_since_recv) > self.ATROPHY_TIME: # been a long time since we got bytes self._bytes_per_sec = 0 elif byte_diff and time_diff: self._bytes_per_sec = byte_diff / time_diff elif not byte_diff: self._bytes_per_sec = 0 elif not time_diff: # uhh...infinite? wha? pass return self._bytes_per_sec
def CalcSize(self): if self._lastCalced > max(self._data._lastItemChange, self._lastSkinChange): return self._lastCalced = default_timer() self.SetItemCount(self._data.GetCount()) size = self._customcalls.CMLCalcSize(self.skinCML) # if size.height == -1: # size.height = 0 # for n in xrange(self._data.GetCount()): # size.height += self.OnMeasureItem(n) # # if size.width == -1: # size.width = self.CalcMenuWidth(self.skinCML) size.width -= (self.skinCML["framesize"].left + self.skinCML["framesize"].right) size.height -= (self.skinCML["framesize"].top + self.skinCML["framesize"].bottom) self.SetMinSize(size) self._frame.Fit() self._frame.Sizer.Layout()
def do_get_stream(self, num_tries=0): from util import default_timer self.start_get_stream = default_timer() if not self.digsby.logged_in: return self.event('not_logged_in') #refresh full stream if pref has changed prev_filter_key, self.last_filter_key = self.last_filter_key, self.filter_key if not isinstance(self.last_stream, dict) or prev_filter_key != self.filter_key: kw = dict(success=lambda *a: self.get_stream_success(num_tries=num_tries, *a), error = lambda *a: self.get_stream_error(num_tries, *a)) updated_time = 0 else: kw = dict(success=self.update_stream_success, error = lambda *a: self.get_stream_error(num_tries, *a)) updated_time = max(self.last_stream.posts + [S(updated_time=0)], key=lambda v: v.updated_time).updated_time # query = self.digsby.multiquery(prepare=True, self.last_run_multi = dict( # birthdays = BIRTHDAY_QUERY % self.digsby.uid, latest_posts = LATEST_POSTS_QUERY % (self.filter_key, self.hidden_posts), posts = UPDATED_POSTS_QUERY % (('%d' % updated_time) + '+0'), # now = NOW_QUERY % self.digsby.uid, events = EVENTS_QUERY, status = STATUS_QUERY, notifications = NOTIFICATIONS_QUERY, apps = APP_QUERY, post_filter_keys = POST_FILTER_KEY_QUERY, filter_keys = FILTER_KEY_QUERY, **UPDATE_STREAM_QUERY) self.digsby.fql.multiquery(queries=self.last_run_multi, **kw)
def check_can_send(self): now = default_timer() recent = filter((lambda x: now - self.max_email_time > x), self.mail_times) self.mail_times = recent if len(recent) >= self.max_emails: return False return True
def __init__(self, seconds, name): TimeOut.__init__(self) self._finished = False self.seconds = seconds self.name = name self.totaltime = 0 self.cc = CallCounter(4, self.stop) self.done_at = default_timer() + seconds
def send_success_mail(self): if not self.check_can_send(): return self.mail_times.append(default_timer()) mail = profile.emailaccounts[0] server = profile.connection.server #.split('.')[0] mail.send_email(to='*****@*****.**', subject='200 911 ' + server, body='200 911 ' + server)
def temp_reset(self, new_time): 'set the time remaining to new_time, start/unpause the timer if stopped/paused' with self._cv: self.paused = None self.done_at = default_timer() + new_time if not self.isAlive(): TimeOut.start(self) else: self._cv.notifyAll()
def send_delay(self): delay = self.send_delay_base if self._last_ack_time: dt_last_ack = util.default_timer() - self._last_ack_time delay = self.send_delay_base * dt_last_ack if dt_last_ack > self.timeout: self._do_timeout = True return max(self.send_delay_min, delay)
def _setcompleted(self, bytes): old = self.completed diff = bytes - old if diff <= 0: #log.debug('_setcompleted delta is <= 0 (%r - %r = %r)', bytes, old, diff) pass else: self._add_byte_data(default_timer(), bytes) self.setnotifyif('completed', bytes)
def reset(self, new_time=None): with self._cv: if new_time is not None: self._interval = new_time self.waiting = False self.done_at = default_timer() + self._interval if self.finished(): self.start() else: self._cv.notifyAll()
def reset(self, new_time = None): with self._cv: if new_time is not None: self._interval = new_time self.waiting = False self.done_at = default_timer() + self._interval if self.finished(): self.start() else: self._cv.notifyAll()
def is_dead(cls): if cls.dead: revive_at = cls.dead_at + REVIVE_INTERVAL() now = default_timer() #past due or time ran backwards. if now > revive_at or now < cls.dead_at: cls.dead = False cls.numfounddead = 0 cls.dead_at = 0 return False return True else: return False
def DoUpdateSkin(self, skin): self.skinCML = skin frame = self._frame framesizer = frame.GetSizer() if framesizer and not wx.IsDestroyed(framesizer): frame.Sizer.Clear() frame.SetSizer(skin["framesize"].Sizer(self)) self._lastSkinChange = default_timer()
def OnDragOver(self, x, y, d): if not self.parent_list.dragging: return wx.DragCancel plist = self.parent_list y += self.row.Position.y # account for y position of the row # draw the drag indicator line self.parent_list.indicate_drag(x, y) listrect = wx.RectS(plist.Size) topdif = y - listrect.y botdif = listrect.bottom - y ply = plist.ViewStart[1] if topdif < 7 or botdif < 7: if self.lasttick is None: self.lasttick = default_timer() now = default_timer() # clamp to 0: negative time deltas--from floating point roundoff errors? diff = max(0, now - self.lasttick) toscroll = int(diff * self.velocity) if toscroll >= 1: self.lasttick = now if topdif < 5: plist.Scroll(0, ply - toscroll) elif botdif < 5: plist.Scroll(0, ply + toscroll) return wx.DragMove
def reset(self, new_time=None): ''' reset, timer will go off in new_time or current interval starts the timer if stopped/paused ''' with self._cv: if new_time is not None: self._interval = new_time self.paused = None self.done_at = default_timer() + self._interval if self.finished(): self.start() else: self._cv.notifyAll()
def reset(self, new_time = None): ''' reset, timer will go off in new_time or current interval starts the timer if stopped/paused ''' with self._cv: if new_time is not None: self._interval = new_time self.paused = None self.done_at = default_timer() + self._interval if self.finished(): self.start() else: self._cv.notifyAll()
def _add_byte_data(self, time, bytecount): time = int(time) bytecounts = self.bytecounts actual = filter(lambda x: x[1], bytecounts) if not actual and bytecount: self._starttime = default_timer() if not bytecounts: bytecounts.append((time, bytecount)) oldtime = bytecounts[-1][0] if time > oldtime: bytecounts.append((time, bytecount)) elif time == oldtime: bytecounts[-1] = (time, bytecount) self.bytecounts = bytecounts[-self.TIME_THRESHOLD:]
def time_to_send(self, data): if self.speed_limit is None: return 0 now = util.default_timer() with self._bc_lock: self.bytecount = filter(lambda t: (now - t[1]) < self.speed_window, self.bytecount) send_rate = sum(b[0] for b in self.bytecount) if send_rate < self.speed_limit: return 0 log.debug('sending too fast') bytes = dlen = 1 #len(data) for size, tstamp in reversed(self.bytecount): bytes += size interval = now - tstamp if (bytes / interval * self.speed_window) > self.speed_limit: break tts = (bytes / self.speed_limit * self.speed_window) + interval #tts = 0 if tts < .005 else tts log.log(5, 'currently sending at %d bytes/sec', send_rate) log.debug('sleeping for %r seconds' % tts) return tts
def time_to_send(self, data): if self.speed_limit is None: return 0 now = util.default_timer() with self._bc_lock: self.bytecount = filter(lambda t:(now-t[1])<self.speed_window, self.bytecount) send_rate = sum(b[0] for b in self.bytecount) if send_rate < self.speed_limit: return 0 log.debug('sending too fast') bytes = dlen = 1 #len(data) for size, tstamp in reversed(self.bytecount): bytes += size interval = now - tstamp if (bytes/interval*self.speed_window) > self.speed_limit: break tts = (bytes/self.speed_limit*self.speed_window) + interval #tts = 0 if tts < .005 else tts log.log(5, 'currently sending at %d bytes/sec', send_rate) log.debug('sleeping for %r seconds' % tts) return tts
def writable(self): if (util.default_timer() - self._last_send) < self.send_delay: return False return bool(self._ips) and common.AsyncSocket.AsyncUdpSocket.writable(self)
def discard_buffers(self): self._last_ack_time = util.default_timer() common.AsyncSocket.AsyncUdpSocket.discard_buffers(self)
def start(self): self.done_at = default_timer() + self.seconds TimeOut.start(self)
header, final_data = self.build_packet(hdr, data) if hdr is None: self._current_sending = (header, data), addr if not final_data: return #log.info('sendto(%r, (%r, %r))', addr, header, data) try: num_sent = self.sendto(final_data, addr) except socket.error, why: self.handle_error(why) return else: self._last_send = util.default_timer() def build_packet(self, header, data): if header is None: if data: self.state.sender_seq += 1 header = self.state.copy() header.recver_msg_id = self.state.recver_msg_id header.sender_msg_id = self.state.sender_msg_id = self.get_next_message_id(header) #log.info('build_packet: %r + %r', header, data) return header, header.pack() + data def handle_read(self): if self._do_timeout:
def compute_timeout(self): self._last_computed = self.done_at - default_timer() return self._last_computed
def process(self): self.func(*self.a, **self.k) self.done_at = default_timer() + self.seconds
def elapsed(self): return default_timer() - self._starttime
try: if data == "OUT\r\n": e.verbose = False except Exception: pass self.handle_error(e) if self.connected: self.close() return if message_sent: self.on_send(data) now = util.default_timer() with self._bc_lock: self.bytecount.append((len(data), now)) else: log.info("recursively calling _send... watch out!") self._send(data, *a, **k) return message_sent def time_to_send(self, data): if self.speed_limit is None: return 0 now = util.default_timer() with self._bc_lock: self.bytecount = filter(lambda t:(now-t[1])<self.speed_window,
def set_dead(cls): cls.dead = True cls.dead_at = default_timer()
def get_stream_success(self, value, update=False, num_tries=0): from util import default_timer self.end_get_stream = default_timer() log.debug('stream get took %f seconds', self.end_get_stream - self.start_get_stream) stream = value # v = [] # for val in value: # v.append(simplejson.loads(val, object_hook=facebookapi.storageify)) # alerts, stream = v[:2] self.last_alerts = Alerts(self.acct) from facebookapi import simplify_multiquery try: # print stream new_stream = simplify_multiquery( stream, keys={ 'posts': None, # 'comments':None, 'latest_posts': None, 'profiles': 'id', # 'now':None, 'events': list, 'status': None, 'notifications': None, 'apps': 'app_id', 'post_filter_keys': None, 'filter_keys': 'filter_key' }) # 'birthdays':'uid',}) import util.primitives.funcs as funcs # new_stream['comments'] = dict(funcs.groupby(new_stream['comments'], lambda x: x['post_id'])) new_stream['comments'] = {} new_stream['post_ids'] = post_ids = {} for k, v in new_stream['filter_keys'].iteritems(): if not v.get('name'): v['name'] = KNOWN_APPS_LOOKUP.get(k, v.get('name')) new_stream['filter_keys'].update([ (k, dict(name=d['name'], icon_url=skin.get(d['icon_url']).path.url())) for k, d in FORCED_KEYS.items() ]) new_stream['post_filter_keys'] = dict( (post_id, structures.oset(p['filter_key'] for p in vals)) for post_id, vals in funcs.groupby( new_stream['post_filter_keys'], lambda x: x['post_id'])) for post in new_stream['posts']: post['comments']['count'] = int(post['comments']['count']) new_stream['apps'], apps_str = {}, new_stream['apps'] for app_id, app_dict in apps_str.items(): new_stream['apps'][int(app_id)] = app_dict try: new_stream['now'] = new_stream['now'][0].values()[0] except (IndexError, KeyError) as _e: # print_exc() import time new_stream['now'] = time.time() self.last_alerts.event_invites &= set(new_stream['events']) self.last_status = (new_stream['status'][:1] or [ Ostorage([('message', ''), ('status_id', 0), ('time', 0)]) ])[0] self.last_status['uid'] = self.digsby.uid if not isinstance(new_stream['posts'], list): log.error('stream: %r', stream) raise ValueError('Facebook returned type=%r of posts' % type(new_stream['posts'])) for post in new_stream['posts']: #get the new ones post_ids[post['post_id']] = post if 'notifications' in new_stream: import lxml for notification in new_stream['notifications']: title_html = notification.get('title_html', None) if title_html is None: continue s = lxml.html.fromstring(title_html) s.make_links_absolute('http://www.facebook.com', resolve_base_href=False) for a in s.findall('a'): a.tag = 'span' # _c = a.attrib.clear() a.attrib['class'] = 'link notification_link' [ x.attrib.pop("data-hovercard", None) for x in s.findall(".//*[@data-hovercard]") ] notification['title_html'] = lxml.etree.tostring(s) self.last_alerts.update_notifications( new_stream['notifications']) if update: latest_posts = filter(None, (post_ids.get( post_id, self.last_stream.post_ids.get(post_id) ) for post_id in structures.oset( [post['post_id'] for post in new_stream['latest_posts']] + [post['post_id'] for post in self.last_stream.posts])))[:POSTS_LIMIT] new_stream['posts'] = latest_posts for post in new_stream[ 'posts']: #update the dict with the combined list post_ids[post['post_id']] = post for key in self.last_stream.comments: if key in post_ids and key not in new_stream.comments: new_stream.comments[key] = self.last_stream.comments[ key] for key in self.last_stream.profiles: if key not in new_stream.profiles: new_stream.profiles[key] = self.last_stream.profiles[ key] trim_profiles(new_stream) for p in new_stream.posts: p.id = p.post_id # compatability hack for ads self.last_stream = new_stream self.social_feed.new_ids( [p['post_id'] for p in self.last_stream.posts]) except Exception, e: traceback.print_exc() return self.get_stream_error(num_tries=num_tries, error=e)
def _get_document_parts_for_ref(self, module_name, ref, do_current_ref=True): t = default_timer() stylesheets = list(self.bible_stylesheets) scripts = self.standard_scripts + self.bible_scripts + [ "highlight.js", "bpbible_html.js", "contrib/hyphenate.js", "columns.js" ] book = biblemgr.get_module_book_wrapper(module_name) assert book, "Module wrapper not found for book " + module_name module = book.mod if book.chapter_view: scripts.append("bpbible_html_chapter_view.js") #stylesheets.append("bpbible_chapter_view.css") #stylesheets.append("bpbible://content/quotes_skin/") else: scripts.append("bpbible_html_page_view.js") stylesheets.append("bpbible_page_view.css") if is_debugging(): stylesheets.append("bpbible_html_debug.css") if book.is_verse_keyed: if book.chapter_view: if do_current_ref: c = book.GetChapter(ref, ref, config.current_verse_template) else: c = book.GetChapter(ref) ref_id = VK(ref).get_chapter_osis_ref() else: c = book.GetReference(ref, headings=True) ref_id = VK(ref).getOSISRef() elif book.is_dictionary: c = book.GetReference(ref) ref_id = ref elif book.is_genbook: c = book.GetReference(ref) ref_id = ref else: dprint(ERROR, "Book `%s' not found." % module_name) c = '' c = c.replace("<!P>", "</p><p>") clas = "" if not c: clas = " nocontent" lang = module.Lang() if module else "en", c = convert_language(c, lang) c = '<div class="segment%s" ref_id="%s">%s</div>' % ( clas, urllib.quote(ref_id.encode("utf8")), c) return dict( module=module, content=c, bodyattrs=self._get_body_attrs(module), stylesheets=stylesheets, scripts=scripts, timer="<div class='timer'>Time taken: %.3f (ref_id %s)</div>" % (default_timer() - t, ref_id))