def run_ica(): log('loading data') start = util.now() voxels, xdim, ydim, zdim = load_data() log(' elapsed: {}'.format(util.elapsed(start))) log('running independent component analysis') start = util.now() ica = decomposition.FastICA(n_components=64, max_iter=200) sources = ica.fit_transform(voxels) sources = to_dataframe(sources, load_subject_ids(), ['X{}'.format(i) for i in range(64)]) log(' elapsed: {}'.format(util.elapsed(start))) log('calculating correlations between voxel and component time courses') start = util.now() correlations = [] for voxel in voxels.columns[:32]: voxel = voxels[voxel] max_correlation = 0 for source in sources.columns: source = sources[source] correlation = np.corrcoef(voxel, source) if correlation > max_correlation: max_correlation = correlation correlations.append(max_correlation) log(' elapsed: {}'.format(util.elapsed(start)))
def get(self): hour = util.now().hour if hour < 12 or hour > 22: self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('Delayed.') return emails = [x for x in model.Email.all().fetch(1000) if x.confirmed] phones = [x for x in model.Phone.all().fetch(1000) if x.confirmed] count = 0 # количество поставленных в очередь событий now = util.now() # Отправка уведомлений за неделю d1 = now + datetime.timedelta(config.FAR_LIMIT) for event in model.Event.gql('WHERE far_sent = :1 AND date < :2 AND date > :3', False, d1, now).fetch(10): count += self.notify(event, emails, phones, True) event.far_sent = True event.put() # Отправка уведомлений за сутки d1 = now + datetime.timedelta(config.SOON_LIMIT) for event in model.Event.gql('WHERE soon_sent = :1 AND date < :2 AND date > :3', False, d1, now).fetch(10): count += self.notify(event, emails, phones, False) event.soon_sent = True event.far_sent = True event.put() if count: logging.info('Queued %u notifications.' % count)
def get(self, cursor=None, user_id=None, venue_id=None, getall=None, level=None, from_time=None, until_time=None, **kwargs): red = {'select': 'COUNT(id)', 'table': 'promotion_redemptions', 'where': 'promotion_id = promotions.id'} promo_qry = {'select': ['id', 'title', 'description', 'passcode', 'start', '[end]', 'maximum', 'creator', 'level', '(' + util.query(**red) + ') AS redemptions'], 'table': 'promotions', 'where': ['venue_id = ?', 'hidden != 1'], 'order_by': 'id DESC'} if from_time and until_time: own_red = {'select': 'COUNT(id)', 'table': 'promotion_redemptions', 'where': ('promotion_id = promotions.id', 'time >= ' + from_time, 'time < ' + until_time, 'user_id = ' + str(user_id))} promo_qry['select'].append('(' + util.query(**own_red) + ') AS own_redemptions') if not util.to_bool(getall): promo_qry['limit'] = 1 promo_qry['where'].append(str(util.now()) + ' >= start') promo_qry['where'].append('([end] = 0 OR [end] > ' + str(util.now()) + ')') promo_qry['where'].append('(maximum = 0 OR (' + util.query(**red) + ') < maximum)') promo_qry['where'].append(level + ' >= level') promo_qry['order_by'] = 'level DESC, id DESC' cursor.execute(util.query(**promo_qry), (venue_id,)) row = cursor.fetchone() if row: return {t[0]: val for t, val in zip(cursor.description, row)} else: return None cursor.execute(util.query(**promo_qry), (venue_id,)) return [util.row_to_dict(cursor, row) for row in cursor.fetchall()]
def run(self): try: self.acquire_mask() self.session_on = now() self.on = True self.ar.begin_saving() self.cam.begin_saving() self.cam.set_flush(True) self.start_acq() # main loop threading.Thread(target=self.deliver_trial).start() threading.Thread(target=self.update_eyelid).start() while True: if self.trial_on or self.paused: continue if self.session_kill: break moving = self.determine_motion() eyelid = self.determine_eyelid() if self.deliver_override or ((now()-self.trial_off>self.min_iti) and (not moving) and (eyelid)): self.trial_flag = True self.deliver_override = False self.end() except: logging.error('Session has encountered an error!') raise
def __theano_build__(self): start_time = util.now() print("[Compiling model graph...]") print("[Loading modules]") encoder, decoder, ffout = self.modules["encoder"], self.modules["decoder"], self.modules["ffout"] x_e = T.ivector('x_e') y = T.ivector("y") y_d = y[1:] y_x = y[:-1] print("[Encoder loop]") s_e, updates = theano.scan( encoder.step, sequences = x_e, outputs_info = T.zeros(self.hiddenSize)) vector_rep = s_e[-1] def decoder_out(y_x, prev): s = decoder.step(y_x, prev) o_t = ffout.step(s) return o_t, s [o_d, s_d], updates = theano.scan( decoder_out, sequences=y_x, outputs_info=[None, dict(initial=vector_rep)]) def decoder_out_free(y_x, prev): s = decoder.step(y_x, prev) o_t = ffout.step(s) return (T.argmax(o_t), s), theano.scan_module.until(T.eq(T.argmax(o_t), T.constant(token2index["END_TOKEN"]))) print("[Decoder loop]") [o_d_free, s_d_free], updates = theano.scan( decoder_out_free, outputs_info=[dict(initial=T.cast(T.constant(token2index["START_TOKEN"]), "int64")), dict(initial=vector_rep)], n_steps = 40) cost = T.sum(T.nnet.categorical_crossentropy(o_d, y_d)) print("[Prediction]") self.predict_class = theano.function([x_e], o_d_free) self.vector_rep = theano.function([x_e], vector_rep) self.ce_error = theano.function([x_e, y], cost) lr = T.scalar('learning rate') updates = ffout.getUpdates(cost, lr) + decoder.getUpdates(cost, lr) + encoder.getUpdates(cost, lr) print("[Stochastic Gradient Descent]") self.SGD = theano.function( [x_e, y, lr], updates = updates) print("[Took %d milliseconds]" % (util.now() - start_time))
def load(self, name): start_time = util.now() print "[Loading %s:%s...]" % (self.filestr, name), sys.stdout.flush() rtn = self.data[name] print "[Took %d milliseconds]" % (util.now() - start_time) return rtn
def to_phase(self, ph): self.phase_times[self.current_phase][1] = now() self.phase_times[ph][0] = now() self.current_phase = ph self.phase_start = now() self.hinted = False if ph == self.PHASE_END: # sanity check. should have been rewarded only if solely licked on correct side if ( self.lick_rule_side and self.lick_rule_phase and (not self.licked_early) and (not self.multiple_decisions) and self.use_trials ): assert bool(self.rewarded) == ( any(self.lick_phase_licks[self.trial["correct"]]) and (not any(self.lick_phase_licks[-self.trial["correct"] + 1])) ) if not self.rewarded: if self.use_trials: self.trial_corrects.append(self.trial["correct"]) else: self.trial_corrects.append(self.X) # determine trial outcome if not self.use_trials: if any(self.lick_phase_licks): outcome = self.COR else: outcome = self.INCOR elif self.use_trials: if self.rewarded and self.lick_rule_side and not self.multiple_decisions: outcome = self.COR elif self.rewarded and ((not self.lick_rule_side) or self.multiple_decisions): lpl_min = np.array([min(i) if len(i) else -1 for i in self.lickph_andon_licks]) if np.all(lpl_min == -1): outcome = self.NULL else: lpl_min[lpl_min == -1] = now() if np.argmin(lpl_min) == self.trial["correct"]: outcome = self.COR else: outcome = self.INCOR elif self.trial_kill: outcome = self.KILLED elif self.licked_early: outcome = self.EARLY # this BOTH logic no longer works bc i include reward phase licks in lickphaselicks. both will never show up, though it still can *rarely* be a cause for trial failure # elif (any(self.lick_phase_licks[self.L]) and any(self.lick_phase_licks[self.R])): # outcome = self.BOTH elif any(self.lick_phase_licks[-self.trial["correct"] + 1]): outcome = self.INCOR elif not any(self.lick_phase_licks): outcome = self.NULL self.trial_outcomes.append(outcome)
def stimulate(self): n = len(self.th.trt) t0 = now() while self.current_phase == PHASE_STIM and self.stim_idx < n: dt = now() - t0 if dt >= self.th.trt['time'][self.stim_idx]: #logging.debug(dt-self.th.trt['time'][self.stim_idx]) self.stimulator.go(self.th.trt['side'][self.stim_idx]) self.stim_idx += 1
def npz_load(inp, name): data = util.loadFile(inp) start_time = util.now() print("[Loading %s:%s...]" % (inp, name), ) sys.stdout.flush() loaded = data[name] print("[Took %d milliseconds]" % (util.now() - start_time)) return loaded
def _finish_render(self, state, tsrender): # finish by adding the current state as the reading now = util.now() state = self.translate_state(state) if self.autoadd: self.add(now, state) tsrender = dict(tsrender) tsrender['Readings'] = [(util.now() * 1000, state)] return tsrender
def hold_open(self, side, dur): if self.is_open[side]: if self.force_next: self._close(side) elif not self.force_next: return self._open(side) start = now() while now()-start < dur: pass self._close(side)
def _gen_job(self, params): job = Job() job.name = self._gen_name(params) job.command = self._gen_cmd(job, params) (job.deps, job.dep_expr) = self._gen_dep(job, params) job.set_frequency(interval, unit) job.at_time = (now() + at_time_delay).time() job.valid_window = (now(), now() + valid_window_len) job.run_limit = run_limit job.callback = self._gen_callback(job, params) return job
def shuffle(a, b): start_time = util.now() print "[Shuffling...]", sys.stdout.flush() assert len(a) == len(b), "Length of arrays is not equal." combined = np.asarray([[x, y] for x, y in zip(a, b)]) np.random.shuffle(combined) print "[Took %d milliseconds]" % (util.now() - start_time) return combined[:, 0], combined[:, 1]
def go(self): while self.on: while now() - self.t0 < 1. / self.sample_rate: pass self.t0 = now() dat = (0.1 * np.arange(50)).reshape( [5, 10]).astype(float) + np.random.normal(0, .5, size=[5, 10]) if np.random.random() < 0.15: dat[0, :] = np.random.choice([4, 5, 6, 7, 8]) if np.random.random() < 0.15: dat[1, :] = np.random.choice([4, 5, 6, 7, 8]) self.data_q.put([now(), now(), dat])
def hold_open(self, side, dur): self.active += 1 if self.is_open[side]: if self.force_next: self._close(side) elif not self.force_next: return start = now() self._open(side) while now()-start < dur: pass self._close(side) self.active -= 1
def divide(a, b, ratio): start_time = util.now() print "[Dividing len %d data by %f ratio...]" % (len(a), ratio) sys.stdout.flush() assert len(a) == len(b), "Length of arrays is not equal." index = round(len(a) * ratio) a_1, a_2 = a[:index], a[index:] b_1, b_2 = b[:index], b[index:] print "[Division of %d:%d]" % (len(a_1), len(a_2)), print "[Took %d milliseconds]" % (util.now() - start_time) return a_1, a_2, b_1, b_2
def get(self): if now() - self.last_query < 1. / self.query_rate: return None self.last_query = now() self.saver.query_flag.value = True frs = [] idx = 0 full = mp2np(self.saver.query_queue) for r in self.resolution: nl = np.product(r) fr = full[idx:idx + nl].reshape(r[::-1]) idx += nl frs.append(fr) return frs
def run(self): now() lib = "CLEyeMulticam.dll" dll = ctypes.cdll.LoadLibrary(lib) dll.CLEyeGetCameraUUID.restype = GUID dll.CLEyeCameraGetFrame.argtypes = [c_void_p, c_char_p, c_int] dll.CLEyeCreateCamera.argtypes = [GUID, c_int, c_int, c_float] self.vc = Ps3Eye(self.idx, self.color_mode, self.resolution_code, self.frame_rate, dll=dll) settings = [ (CLEYE_AUTO_GAIN, self.auto_gain), (CLEYE_AUTO_EXPOSURE, self.auto_exposure), (CLEYE_AUTO_WHITEBALANCE,self.auto_wbal), (CLEYE_GAIN, self.gain), (CLEYE_EXPOSURE, self.exposure), (CLEYE_WHITEBALANCE_RED,self.wbal_red), (CLEYE_WHITEBALANCE_BLUE,self.wbal_blue), (CLEYE_WHITEBALANCE_GREEN,self.wbal_green), (CLEYE_VFLIP, self.vflip), (CLEYE_HFLIP, self.hflip), ] self.vc.configure(settings) self.vc.start() if self.save_name != None: self.vid_name = self.save_name+'.avi' self.vidts_name = self.save_name+'.tstmp' self.vw = cv2.VideoWriter(self.vid_name,0,self.frame_rate,frameSize=self.resolution,isColor=False) self.vidts_file_temp = open(self.vidts_name, 'a') self.offset.value = self.clock_sync_obj.value-now() self.vidts_file_temp.write('%0.20f\n'%self.offset.value) if not self.vw.isOpened(): logging.error('Video writer failed to open') raise Exception('Video writer failed to open') # begin true run time.sleep(0.1) while self.READING.value: val = False val,fr = self.vc.get_frame() if val: self.ts.value = now()#(time.time(),time.clock(), now()) self.cS[:] = np.fromstring(fr, np.uint8) if self.SAVING.value and self.save_name: self.vw.write(mp2np(self.cS).reshape(self.read_dims)) self.vidts_file_temp.write('%0.20f,'%self.ts.value) if self.save_name: self.vw.release() self.vidts_file_temp.close() self.thread_complete.value = 1
def set(self, cursor=None, user_id=None, staff_user_id=None, venue_id=None, manager=None, promo_perm=None, delete=None, **kwargs): if util.to_bool(delete): qry = {'delete': 'venue_staff', 'where': ('user_id = ?', 'venue_id = ?')} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) qry = {'delete': 'venue_managers', 'where': ('user_id = ?', 'venue_id = ?')} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) elif util.to_bool(manager): qry = {'select': 'id', 'table': 'venue_managers', 'where': ('user_id = ?', 'venue_id = ?'), 'order_by': 'id', 'limit': 1} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) res = cursor.fetchone() if not res: qry = {'delete': 'venue_staff', 'where': ('user_id = ?', 'venue_id = ?')} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) qry = {'insert_into': 'venue_managers', 'columns': ('user_id', 'venue_id', 'time')} cursor.execute(util.query(**qry), (staff_user_id, venue_id, util.now())) else: qry = {'select': 'id', 'table': 'venue_staff', 'where': ('user_id = ?', 'venue_id = ?'), 'order_by': 'id', 'limit': 1} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) res = cursor.fetchone() if not res: qry = {'delete': 'venue_managers', 'where': ('user_id = ?', 'venue_id = ?')} cursor.execute(util.query(**qry), (staff_user_id, venue_id)) qry = {'insert_into': 'venue_staff', 'columns': ('user_id', 'venue_id', 'time', 'promo_perm')} cursor.execute(util.query(**qry), (staff_user_id, venue_id, util.now(), 1 if util.to_bool(promo_perm) else 0)) else: qry = {'update': 'venue_staff', 'set_values': ('promo_perm'), 'where': ('user_id = ?', 'venue_id = ?')} cursor.execute(util.query(**qry), (1 if util.to_bool(promo_perm) else 0, staff_user_id, venue_id)) return True
def simu(options): itera = 0 ################################# # Begin simu if(options.reset): if(not prims.initDb()): raise util.SimuException("Market is not opened") begin = util.getAvct(None) with util.DbConn(const) as dbcon: with util.DbCursor(dbcon) as cursor: util.writeMaxOptions(cursor,options) for user in util.nameUsers(): prims.createUser(cursor,user) if(options.CHECKQUALITYOWNERSHIP): util.setQualityOwnership(cursor,True) ################################## if((not scenarii.threadAllowed(options.scenario)) and options.threads>1): raise Exception("This scenario cannot be run in thread") _begin = util.now() if(options.threads==1): user = util.nameUser(0) scenarii.simuInt((options,user)) else: # run in theads ts = [] for i in range(options.threads): user = util.nameUser(i) t = threa.ThreadWithArgs(func=scenarii.simuInt,args=(options,user),name=user) t.start() ts.append(t) for t in ts: t.join() itera = options.iteration * options.threads duree = util.duree(_begin,util.now()) ################################## # Terminate simu terminate(begin,duree,itera,options) return
def _flush(self, force=False): """Send out json-packed report objects to registered listeners. :param boolean force: if True, ignore ``MinPeriod``/``MaxPeriod`` and force the reporting metadata to disk :rtype: a :py:class:`twisted.internet.task.DeferredList` instance which will fire when deliver to all subscribers has finished, or errBack when any fail """ deferList, deleteList = [], [] for sub in self.subscribers: now = util.now() if sub.get('ExpireTime', now) < now: # remove expired reports deleteList.append(sub['uuid']) # either we've gone too long without trying and so need to # deliver a report or else we have new data and have # waited for at least MinPeriod since the last report. elif force or sub.deliverable(): d = defer.maybeDeferred(sub.attempt) # we don't need an errBack for this case since we want # to propagate the error and don't need to do any # cleanup deferList.append(d) map(self.del_report, deleteList) d = defer.DeferredList(deferList, fireOnOneErrback=True, consumeErrors=True) if force: d.addBoth(self.save_reports) return d
def at_someone(self, wid, uid): forward_key = '%s:%s' % (self.FOLLOWS_KEY, wid) forward = self.r.sadd(forward_key, uid) reverse_key = '%s:%s' % (self.FOLLOWERS_KEY, uid) reverse = self.r.zadd(reverse_key, wid, util.now()) #TODO: 新版本api 参数可能会变化!! return forward and reverse
def get_feeds(self, uid, until=None, before=None, page=1, page_size=10, limit=20): #TODO: 动态的初始条数 '''获取某人的feeds, @param until: 当指定了until的时候仅获取until时间之后到现在发生的最新的feeds @param before: 当指定了before的时候仅获取before时间之前发生的feeds ''' if (not until and not before): return self.r.zrevrange(self.FEED_KEY % uid, (page - 1) * page_size, page * page_size, 'WITHSCORES') elif (until and not before): return self.r.zrevrangebyscore(self.FEED_KEY % uid, until + 1, util.now(), 'WITHSCORES') elif (not until and before): return self.r.zrevrangebyscore(self.FEED_KEY % uid, max=before - 1, min='-inf', start=0, num=limit, withscores=True)
def make_wish(self,wish=None): try: data = { 'title' : xhtml_escape(self.get_argument('title',default='')), 'content' : xhtml_escape(self.get_argument('content',default='')), 'is_public' : 1 if self.get_argument('is_public',default=None)== 'on' else 0, 'is_anonymous' :1 if self.get_argument('is_anonymous',default=None)=='on' else 0, 'has_cometrue' :0, 'is_share' : 1 if self.get_argument('is_share',default=None)=='on' else 0, 'poster' : self.get_argument('poster',default=None), 'ctime' : util.now(), 'stat' : self.get_argument('stat',default='active'), 'uid' : self.current_user.uid } validate(data,self.schema); if not wish : wish = models.Wish() for item in data : setattr(wish,item,data[item]) self.session.add(wish) self.session.commit() is_active = data['stat'] == 'active' self.update_tag(wish,is_active) self.update_friends(wish,is_active) return wish except ValueError,e: self.write(e.message)#TODO for debug self.json_write(code='000')
def next_trial(self): self.email() side = self._next_side() self.update_level() ratio = self._next_ratio() rule = self._next_rule() stereo = self._next_stereo() manip = self._next_manip() dur = self._next_stimphase_dur() delay = self._next_delay() reward_scale = self._next_reward_scale(side) self.do_adjust_mp285 = self._check_adjust_mp285( ) # if this is 0, move leftward (to eliminate left bias) self.trt, final_lam = self._generate_trial(side, ratio, dur, stereo) final_ratio = final_lam[R] / final_lam[L] panda_trt = pd.DataFrame(self.trt) panda_trt['trial'] = len(self.trials) self.saver.write('trials_timing', panda_trt) self.trials.loc[len(self.trials)] = pd.Series( dict(start=now(), ratio=final_ratio, side=side, dur=dur, nL_intended=np.sum(self.trt['side'] == L), nR_intended=np.sum(self.trt['side'] == R), condition=self.condition, idx=len(self.trials), delay=delay, rule=rule, level=float(self.level), manipulation=manip, reward_scale=reward_scale, draw_p=self.current_draw_p))
def run(self): logger.info("Starting matlab engine") self.eng = matlab.engine.start_matlab() self.eng.workspace['xtmp_codepath'] = str(self.sim.cd_path) self.future = Future() self.eval("cd(xtmp_codepath);", nargout=0) self.sim.t_started = now() #TODO security self.future = Future() self.feval(self.sim.entry_point, self.sim.init_params, nargout=0, prefix='%s = ' % str(self.sim.handle_name)) while True: #t, (action, args, future, persist) = self.sim.q.get() data, self.future = self.sim.q.get() print "Popped "+repr(data)+" from Q" if len(data) == 2: statement, nargout = data self.eval(statement, nargout=nargout) elif len(data) == 3: fn_lhs, args, nargout = data self.feval(fn_lhs, args, nargout=nargout) else: assert False self.sim.q.task_done()
def worker_main(anime, shared): try: timeline = shared['timeline'] # pre-processing anchor = create_title_link(generate_uri(anime.search_title)) filtered = filter_tuple(anchor) latest_episode = episodes(filtered)[0] logstate(anime, filtered, latest_episode) if anime.latest_episode == -1 or latest_episode > anime.latest_episode: # update latest episode information anime.latest_episode = latest_episode # remove all occurrence of this animation from timeline then update shared dictionary for index in range(len(timeline)-1, -1, -1): if timeline[index][0].title == anime.title: timeline.pop(index) shared['timeline'] = timeline # send an email for subscriber with io.StringIO() as strbuf: print('Transmission DateTime: %s' % nowstr(), file=strbuf) print('Anime: %s' % anime.title, file=strbuf) print('Latest episode: %s' % latest_episode, file=strbuf) print(filtered, file=strbuf) print('', file=strbuf) sendmail(settings.recipients, anime, strbuf) # may fail when nyaa torrent suffering DDOS except Exception: with open('{startup}_{filename}'.format( startup=now().strftime('%Y%m%d_%H%M%S'), filename='fatal.txt'), 'a') as fatal_log: traceback.print_exc(file=fatal_log)
def subscribe(self, onReceive, onTimeout, timeFrom = 0, timeTo = sys.maxint, minId = 0, timeoutSec = 0): """ subscribe messages within a specific time span. @params onReceive: if the interested messages are retrieved, onReceive will be invoked to notify the subscribers. @params onTimeout: if subscriber waits for more than `timeoutSec` seconds, onTimeout will be invoked. @params timeFrom: only retrieve messages after timestamp `timeFrom`; time is represented in unix time. @params timeTo: only retrieve messages before timestamp `timeTo`; time is represented in unix time. @params minId: this is HACK... """ messages = self._flatten(self.messageQueue[timeFrom: timeTo], minId) messages = list(messages) if len(messages) != 0 or timeoutSec == 0: onReceive(messages) return waitUntil = now() + timeoutSec subscription = (timeFrom, timeTo, onReceive, onTimeout) self.subscribers.setdefault(waitUntil, []).append(subscription)
def subscribe(self): """Send or re-send the request for data. """ if self.expire_time: self.rpt_obj['ExpireTime'] = util.now() + int( self.expire_time * 1000) agent = Agent(reactor) d = agent.request('PUT', self.url + '/reports/' + str(self.rpt_obj['uuid']), Headers({'Content-type': ['application/json']}), sjson.AsyncJSON(self.rpt_obj)) def eb(request): self.is_subscribed = False log.err("Subscription failed to " + self.url) return False def sb(request): if not isinstance(request, bool) and request.code in [200, 201]: log.msg("Successfully subscribed to " + self.url) self.is_subscribed = True return True else: return eb(request) d.addErrback(eb) d.addCallback(sb) return d
def update_db(self): t = self.get_update_db_time() if util.now()-t < 24*3600*1000: return url = withUrl("api/problems/all/") f = urllib.request.urlopen(url) content = f.read().decode('utf-8') qlist = json.loads(content) try: for q in qlist['stat_status_pairs']: id = q['stat']['question_id'] front_id = q['stat']['frontend_question_id'] if is_int(front_id): id = int(front_id) level = q['difficulty']['level'] slug = q['stat']['question__title_slug'] paid_only = q['paid_only'] title = self.get_title_with_slug(id, slug, paid_only) print("id:", id, level, title) self.save_update_db_time() except Exception as e: print("leetcode update db error:", e) pass
def set(self, cursor=None, user_id=None, venue_id=None, comment=None, **kwargs): qry = {'insert_into': 'venue_comments', 'columns': ('user_id', 'venue_id', 'time', 'comment')} cursor.execute(util.query(**qry), (user_id, venue_id, util.now(), comment)) return True
def EveryNCallback(self): self.last_ts = now() with self._data_lock: #self.read_data[:] = 0 self.ReadAnalogF64(self.read_buffer_size, self.timeout, pydaq.DAQmx_Val_GroupByChannel, self.read_data, self.effective_buffer_size, pydaq.byref(self.read), None) self._newdata_event.set() return 0
def EveryNCallback(self): with self._data_lock: self.last_ts = now() self.ReadAnalogF64(self.read_buffer_size, self.timeout, pydaq.DAQmx_Val_GroupByChannel, self.read_data, self.effective_buffer_size, pydaq.byref(self.read), None) self._newdata_event.set() self.save() return 0
def callback(self, *args): """ register the callback function when the job is finished """ stat = 'Success' if args[0] == 0 else 'Fail' job = args[2] dic = {'status': stat, 'stop_time': now()} db.update_runtime(dic, {'job_name': job.name}) logger.info('finish job %s with status %s' % (job.name, stat))
def preview(self, completed, planned, tags): assert cherrypy.request.method.upper() == 'POST' today = util.today().toordinal() now = util.now() post = Post(('<preview>', today, now, completed.decode("utf-8"), planned.decode("utf-8"), tags.decode("utf-8"))) return render('preview.xhtml', post=post)
def css_class(self): classes = '' if self.soon_sent: classes += ' soon' if self.date < util.now(): classes += ' past' return classes.strip()
def addBugReport(self): if self.LineBugReport.text() == "": return bugreport = { "author": self.parent.client.login, "text": self.LineBugReport.text(), "date": datetostr(now()), "uid": "%s-%s" % (self.mod.uid, str(len(self.mod.bugreports) + +len(self.mod.comments)).zfill(3)) } self.parent.client.send( dict(command="modvault", type="addbugreport", moduid=self.mod.uid, bugreport=bugreport)) c = CommentItem(self, bugreport["uid"]) c.update(bugreport) self.BugReports.addItem(c) self.mod.bugreports.append(bugreport) self.LineBugReport.setText("")
def set(self, cursor=None, user_id=None, post_id=None, media_id=None, **kwargs): qry = {'insert_into': 'post_shares', 'columns': ('user_id', 'post_id', 'media_id', 'time')} cursor.execute(util.query(**qry), (user_id, post_id, media_id, util.now())) return True
def __init__(self, url, title, preview, category, body, image=None): self.url = url self.title = title self.body = body self.category = category self.created = now() self.preview = preview self.image = image
def generate_timeline(animes): tl = [] nowtime = now() weekday = nowtime.weekday() for anime in animes: # anime.broadcast = weekday, int(hour), int(minute) aweekday, ahour, aminute = anime.broadcast weekday_diff = aweekday - weekday if weekday_diff > 0 or all([weekday_diff == 0, nowtime.hour < ahour, nowtime.minute < aminute + 30]): base = now().replace(hour=ahour, minute=aminute, second=0, microsecond=0) base += timedelta(days=weekday_diff, minutes=30) for i in range(24): tl.append((anime, base + timedelta(minutes=5 * i))) for i in range(1, 24): tl.append((anime, base + timedelta(hours=i))) tl.sort(key=itemgetter(1)) return tl
def to_dict(self): return { 'id' : self.id, 'cd_path': self.cd_path, 'username': self.username, 'entry_point': self.entry_point, 'init_params': self.init_params, 't_started' : self.t_started, 't_elapsed' : now() - self.t_started }
def i18n_datestr(then, now=None): """ Converts a datetime object to a nice string representation. """ def agohence(n, what, divisor=None): if divisor: n = n // divisor wwhat = "%%s %s" % what n_what = i18n.n_(wwhat, wwhat + "s", abs(n)) % abs(n) if n < 0: return i18n._("%s from now") % n_what else: return i18n._("%s ago") % n_what oneday = 86400 # == 24 * 60 * 60 onehour = 3600 # == 60 * 60 if not now: now = util.now() delta = now - then deltaseconds = int(delta.days * oneday + delta.seconds + delta.microseconds * 1e-06) deltadays = abs(deltaseconds) // oneday if deltaseconds < 0: deltadays *= -1 # fix for oddity of floor if deltadays: if abs(deltadays) < 4: return agohence(deltadays, 'day') datedict = dict( day=then.day, monthname=i18n._("%s_month" % then.month), month=then.month, year=then.year, ) if then.year != now.year or deltadays < 0: return i18n._("%(day)s.%(month)s.%(year)s") % datedict return i18n._("%(day)s.%(month)s") % datedict if int(deltaseconds): if abs(deltaseconds) > onehour: return agohence(deltaseconds, 'hour', onehour) elif abs(deltaseconds) > 60: return agohence(deltaseconds, 'minute', 60) else: return agohence(deltaseconds, 'second') return agohence(1, 'second') #!!! no milli/micro deltamicroseconds = delta.microseconds if delta.days: deltamicroseconds = int(delta.microseconds - 1e6) # datetime oddity if abs(deltamicroseconds) > 1000: return agohence(deltamicroseconds, 'millisecond', 1000) return agohence(deltamicroseconds, 'microsecond')
def post(self, completed, planned, tags, isedit=False, **kwargs): loginid = cherrypy.request.loginid assert cherrypy.request.method.upper() == 'POST' cur = model.get_cursor() cur.execute( '''SELECT IFNULL(email, userid) FROM users WHERE userid = ?''', (loginid, )) email, = cur.fetchone() completed = completed or None planned = planned or None tags = tags or None today = util.today().toordinal() now = util.now() bugs = kwargs_to_buglist(kwargs) if isedit: cur.execute( '''UPDATE posts SET completed = ?, planned = ?, tags = ?, posttime = ? WHERE userid = ? AND postdate = ( SELECT lastpostdate FROM ( SELECT MAX(postdate) AS lastpostdate FROM posts AS p2 WHERE p2.userid = ? ) AS maxq )''', (completed, planned, tags, now, loginid, loginid)) else: cur.execute( '''INSERT INTO posts (userid, postdate, posttime, completed, planned, tags) VALUES (?, ?, ?, ?, ?, ?)''', (loginid, today, now, completed, planned, tags)) for bug in bugs: model.save_bugstatus(cur, loginid, bug, today) allteam, sendnow = model.get_userteam_emails(loginid) if isinstance(completed, str): completed = completed.decode("utf-8") if isinstance(planned, str): planned = planned.decode("utf-8") if isinstance(tags, str): tags = tags.decode("utf-8") if len(sendnow): mail.sendpost( email, sendnow, model.create_post_with_bugs( (loginid, today, now, completed, planned, tags), None, bugs)) raise cherrypy.HTTPRedirect(cherrypy.url('/'))
def deliverable(self): """Check if attempt should be called :rvalue boolean: True if a report should be sent """ now = util.now() if self.get('Paused', False): return False return (now - self['LastSuccess'] > self['MaxPeriod']) or \ (len(self['PendingData']) > 0 and \ (now - self['LastSuccess']) > self['MinPeriod'])
def run(self): while not self.sync_flag.value: self.sync_val.value = now() self.daq = DAQIn(ports=self.ports, read_buffer_size=self.READ_BUF_SIZE, sample_rate=self.daq_sample_rate, **self.daq_kwargs) while self._on.value: if self._kill_flag.value: self.daq.release() try: ts,ts2,dat = self.daq.data_q.get(timeout=0.5) except Queue.Empty: if self._kill_flag.value: # final dump: if self.n_added_to_save_buffer: add_to_saver_buffer(self.saver_obj_buffer, 'analogreader', self.save_buffer[:,-self.n_added_to_save_buffer:].T.copy(), ts=self.save_buffer_ts[0,-self.n_added_to_save_buffer:].copy(), ts2=self.save_buffer_ts[1,-self.n_added_to_save_buffer:].copy(), columns=self.portnames) self._on.value = False continue if self._kill_flag.value: logging.info('Analogreader final flush: {} reads remain.'.format(self.daq.data_q.qsize())) dat = dat.reshape((len(self.ports),self.READ_BUF_SIZE)) # update save buffer with new data self.save_buffer = np.roll(self.save_buffer, -self.READ_BUF_SIZE, axis=1) self.save_buffer_ts = np.roll(self.save_buffer_ts, -self.READ_BUF_SIZE, axis=1) self.save_buffer[:,-self.READ_BUF_SIZE:] = dat[:,:] self.save_buffer_ts[:,-self.READ_BUF_SIZE:] = np.array([ts,ts2])[:,None] if self._saving.value: self.n_added_to_save_buffer += self.READ_BUF_SIZE dump = self.n_added_to_save_buffer >= self.save_buffer_size else: dump = False # update accumulator (runtime analysis buffer) self.accum = np.roll(self.accum, -self.READ_BUF_SIZE, axis=1) self.accum[:,-self.READ_BUF_SIZE:] = dat[:,:] self.accum_ts += [ts]*self.READ_BUF_SIZE self.accum_q[:] = (self.accum.copy()[self.runtime_ports]).ravel() # update experimental logic with self.logic_lock: _tmp_moving = self.accum[self.runtime_ports[self.movement_port], -self.movement_window:] nevents = np.sum(np.abs(np.diff(_tmp_moving)) >= self.movement_magnitude) self.moving_.value = nevents > self.movement_thresh if dump and self._saving.value: if self.n_added_to_save_buffer > self.save_buffer_size: warnings.warn('DAQ save buffer size larger than expected: some samples were missed. Size={}, Expected={}'.format(self.n_added_to_save_buffer,self.save_buffer_size)) add_to_saver_buffer(self.saver_obj_buffer, 'analogreader', self.save_buffer.T.copy(), ts=self.save_buffer_ts[0,:].copy(), ts2=self.save_buffer_ts[1,:].copy(), columns=self.portnames) self.n_added_to_save_buffer = 0
def main(): host_name = socket.gethostname() host_ip = socket.gethostbyname(host_name) port_num = util.get_port_number(sys.argv) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.bind((host_ip, port_num)) except Exception: print(util.now(), "Error binding to port. Choose another port number.\n") exit() s.listen() print(util.now() + ': Proxy server started. Waiting for connection...') while True: conn, addr = s.accept() print(util.now() + ': Server connected by', addr) _thread.start_new_thread(worker, (conn, addr))
async def export(request): data = await request.post() password = data.get('password', '') if password == secrets.EXPORT_PASSWORD: filename = '/export/export_%s.xlsx' % str(util.as_log_timezone(util.now())).replace(' ', '_') await export_all('data' + filename) raise web.HTTPFound(filename) else: return {'error_message': 'invalid credentials'}
def instantiate(self, job, user=None): """ kickoff the job, make this job active """ job.instantiate(callback=self.callback) dic = job.runtime_info() dic['status'] = 'Pending' dic['start_time'] = now() dic['owner'] = user if user else 'null' db.insert_runtime(dic) logger.info('instantiate job: %s' % job.name)
def _add_filelog_handler(self, filelog_config): file_path = filelog_config['path'] if file_path.find('%') >= 0: file_path = util.now().strftime(file_path) handler = logging.FileHandler(file_path) formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") handler.setFormatter(formatter) handler.setLevel(self._parse_level(filelog_config['level'])) self.logger.addHandler(handler)
def set(self, cursor=None, user_id=None, term=None, **kwargs): qry = {'select': 'id', 'table': 'user_searches', 'where': ('user_id = ?', 'term = ?'), 'order_by': 'id', 'limit': 1} cursor.execute(util.query(**qry), (user_id, term)) res = cursor.fetchone() if res: qry = {'update': 'user_searches', 'set_values': ('time'), 'where': 'user_id = ?'} cursor.execute(util.query(**qry), (util.now(), user_id)) else: qry = {'insert_into': 'user_searches', 'columns': ('user_id', 'term', 'time')} cursor.execute(util.query(**qry), (user_id, term, util.now())) return True
def set(self, cursor=None, user_id=None, promotion_id=None, **kwargs): cnt = {'select': ('COUNT(id)'), 'table': 'promotion_redemptions', 'where': ('promotion_id = promotions.id')} promo = {'select': ('[end]', 'maximum', 'passcode', '(' + util.query(**cnt) + ') AS count'), 'table': 'promotions', 'where': ('id = ?')} cursor.execute(util.query(**promo), (promotion_id,)) row = cursor.fetchone() if int(row.end) != 0 and int(row.end) < util.now(): return 'time' if int(row.maximum) != 0 and int(row.count) >= int(row.maximum): return 'number' qry = {'insert_into': 'promotion_redemptions', 'columns': ('user_id', 'promotion_id', 'time')} cursor.execute(util.query(**qry), (user_id, promotion_id, util.now())) return row.passcode