def checkFilesOnWebsite(self): """ Checks if all the files in current batch have been properly uploaded to cloud. If not, then the program pauses and waits for user to make sure Dropbox is running smoothly and batch sync is complete. The functions accepts one argument - list of files on Dropbox cloud that need to be checked. Usage: ----- self.checkFilesOnWebsite() Returns: ------- NULL """ filesUploadFlag = True uploadedFileList = [] for fileName in self.filesRemaining: try: tt = self.dbx.files_get_metadata(fileName) print('%s Upload successful - %s' % (utils.timestamp(), fileName)) uploadedFileList.append(fileName) except: filesUploadFlag = False print('%s Upload ongoing - %s' % (utils.timestamp(), fileName)) break for fileName in uploadedFileList: self.filesRemaining.remove(fileName) return filesUploadFlag
def save_ignored_relationship(self, uid, tid): if not self.__ignored_relationship.get(uid): self.__ignored_relationship[uid] = [utils.timestamp(), [tid]] else: self.__ignored_relationship[uid][0] = utils.timestamp() self.__ignored_relationship[uid][1].append(tid) self.__ignored_relationship[uid][1] = self.__ignored_relationship[uid][1][-2:]
def __init__(self, *args, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) self.pmax = 60.0 self.pmin = 10.0 self.pmaxRead = 0.0 #Load the UI Page uic.loadUi('mainwindow.ui', self) #load default data self.setInitialData() self.pushButtonResetAlarms.pressed.connect(self.resetAlarms) self.pushButtonMarcha.pressed.connect(self.startPLC) self.pushButtonParada.pressed.connect(self.stopPLC) ustring = ' simple string' new_string = ustring.encode() print("type of new String ", type(new_string)) bstring = b'bstring' new_string = bstring.decode() print("type of bstring", type(bstring)) # plot data: x, y values #self.graphWidget.plot(hour, temperature) #self.plot([1,2,3,4,5,6,7,8,9,10], [30,32,34,32,33,31,29,32,35,45]) self.graphWidget = pg.PlotWidget( title="Presión", labels={'left': 'Pressure'}, axisItems={'bottom': TimeAxisItem(orientation='bottom')}) #self.setCentralWidget(self.graphWidget) self.graphWidget.setTitle("Presion vs Volumen") self.graphWidget.setYRange(0, 100) self.graphWidget.setXRange(timestamp(), timestamp() + 100) self.x = [] # 100 time points # self.y = [randint(0,10) for _ in range(100)] # 100 data points self.y = [] self.x.append(timestamp()) self.y.append(0) self.graphWidget.setBackground('w') pen = pg.mkPen(color=(255, 0, 0)) self.data_line = self.graphWidget.plot(self.x, self.y, pen=pen) # ... init continued ... self.timer = QtCore.QTimer() self.timer.setInterval(500) self.timer.timeout.connect(self.update_plot_data) self.timer.start()
async def refresh(self): latest_refresh_start = utils.timestamp() roomlists = [await UtilsTask.fetch_rooms_from_rank(*self.urls[0])] for url, pages_num in self.urls[1:]: await asyncio.sleep(1.5) roomlists.append(await UtilsTask.fetch_rooms_from_rank(url, pages_num)) rank_rooms = [] for rooms in zip_longest(*roomlists): # 这里是为了保持优先级 for room in rooms: if room and room not in rank_rooms: rank_rooms.append(room) for real_roomid in rank_rooms: self.add2rooms(real_roomid) latest_refresh_rank_num = [len(rooms) for rooms in roomlists] latest_refresh_rank_num.append(len(rank_rooms)) self.latest_refresh_rank_num = latest_refresh_rank_num latest_refresh_end = utils.timestamp() self.latest_refresh = f'{latest_refresh_start} to {latest_refresh_end}' rooms = [real_roomid for real_roomid in self.dict_rank_rooms.keys()] rooms.sort(key=lambda real_roomid: self.dict_rank_rooms[real_roomid].weight, reverse=True) rooms = rooms[:2000] # 防止过多 assert len(rooms) == len(set(rooms)) self.dict_rank_rooms = {real_roomid: self.dict_rank_rooms[real_roomid] for real_roomid in rooms} return rooms
def userLogin(self, userId, userCookie, lastSeen = utils.timestamp(False), email = None, openid = None, firstName = None, lastName = None, cellPhone = None, memberSince = utils.timestamp(False), uType = 'manual'): cursor = self.__conn.cursor() try: uid = self.getUser(uid = userId, ck = userCookie)['_id'] cursor.execute("""UPDATE """ + DbWorker.dbCfg['table.user'] + """ SET _firstName = %s , _lastName = %s , _cellPhone = %s , _lastSeen = %s , _type = %s , _cookie = %s WHERE _id = %s """ , (firstName, lastSeen, cellPhone, lastSeen, uType, userCookie, uid)) except RecordNotFound as e: cursor.execute(""" INSERT INTO """ + DbWorker.dbCfg['table.user'] + """ VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) """, (userId, email, openid, firstName, lastName, cellPhone, memberSince, lastSeen, uType, userCookie)) finally: self.__conn.commit() cursor.close()
def get(gkey,itm=[],start_time=0,stop_time=0,sort='clock asc',groupby=0,page=None): """ 获取统计数据 参数说明 itm itemid列表 为空时提取整个group的记录 start_time 开始时间戮 stop_time 结构时间戮 sort 排序方式 groupby 分组方式 page 分页参数集 {'site':每页数据量,'num':页码} 默认返回所有记录 """ stat_db = mysqldb.get_db() rdb = rediswrap.get_redis() sql_item = {'fields':'*'} r_itmkey = RD_ITM_KEY_PRFX+gkey if itm: itm.append('mrk') itmids = rdb.hmget(r_itmkey,itm) mrk = itmids.pop().split(',') else: mrk = rdb.hget(r_itmkey,'mrk') itmids = rdb.hvals(r_itmkey) itmids.remove(mrk) mrk = mrk.split(',') ids = [k for k in itmids if k ] sql_item['table'] = get_hst_name(mrk[1]) sql_item['where'] = " itemid in (%s) " % ",".join(ids) start_time = utils.timestamp(start_time) if start_time else utils.timestamp(0,'d') stop_time = utils.timestamp(stop_time) if stop_time else int(time.time()) sql_item['where'] += " and clock>=%s and clock <%s" % (start_time,stop_time) sql_item['order'] = sort if groupby: if groupby ==1: sql_item['group'] = 'itemid' elif groupby == 2: sql_item['group'] = 'clock' else: sql_item['group'] = 'itemid,clock' sql_item['fields'] = "itemid,sum(val) as val,clock" #分页这个mark一下。待定 if page: s = page['num']*page['site'] sql_item['limit'] = "%s,%s" %(s,page['site']) res,desc = stat_db.query(sql_item) #取得items的名称 item_lab = {} if res == 0 and desc: itm_tb = "stat_item_" + mrk[0] if mrk[0] else "stat_item" rs, ds = stat_db.query("select name,id from %s where id in(%s)" %(itm_tb,",".join(ids) )) if rs==0 and ds: for row in ds : item_lab[row['id']]=row['name'] return 0,[item_lab,desc] return 0,[{},[]]
def test_event_comment_notification(self): to_notify = self.make_user(username='******') self.register_for_notifications(user=to_notify) self.register_for_notifications() event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': self.get_user()[u'username'], }) event.save() Attendant(**{ u'status': status.ATTENDING, u'timestamp': timestamp(), u'event': event[u'id'], u'user': to_notify[u'id'], }).save() comment = 'the test comment' response = self.post('/events/%s/comments/' % event[u'id'], {'comment': comment}) self.assertEqual(response.status, 200, 'comments POST 200') nots = self.get_new_notifications(user=to_notify) self.assertEqual(len(nots), 1, 'one new notification') notification = nots[0] self.assertTrue(u'type' in notification, 'poll response has type') self.assertEqual(notification[u'type'], 'comment', 'event has the correct type') self.assertTrue(u'event_revision' in notification, 'poll response has event rev') self.assertEqual(notification[u'event_revision'], event[u'revision'], 'event has the correct revision') self.assertTrue(u'comment' in notification, 'poll response has comment') self.assertEqual(notification[u'comment'], comment, 'event has the correct comment') self.assertTrue(u'commenter' in notification, 'poll response has commenter') self.assertEqual(notification[u'commenter'], self.get_user()[u'username'], 'event has the correct commenter') nots = self.get_new_notifications() self.assertEqual(len(nots), 0, 'no notification for the poster')
def test_event_list_sort(self): t1 = timestamp() t2 = timestamp() t3 = timestamp() e1 = Event(**{ u'when': t1, u'what': 'user2 created, broadcast', u'broadcast': True, u'creator': self.get_user()[u'username'], u'created': t1, }) e1.save() e2 = Event(**{ u'when': t2, u'what': 'user2 created, broadcast', u'broadcast': True, u'creator': self.get_user()[u'username'], u'created': t3, }) e2.save() e3 = Event(**{ u'when': t2, u'what': 'user2 created, broadcast', u'broadcast': True, u'creator': self.get_user()[u'username'], u'created': t2, }) e3.save() e4 = Event(**{ u'when': t3, u'what': 'user2 created, broadcast', u'broadcast': True, u'creator': self.get_user()[u'username'], u'created': t1, }) e4.save() response = self.get('/events/?sort=soon&filter=creator&' 'username=%s' % self.get_user()[u'username']) self.assertEqual(response.status, 200, 'response OK') events = json.loads(response.read())[u'events'] self.assertEqual(len(events), 4, 'correct number of events returned') self.assertEqual(events[0], e1[u'revision'], 'event 1 when correct, primary sort') #unconnect to test secondary sort # self.assertEqual(events[1], e3[u'revision'], # 'event 3 when correct, secondary sort') # self.assertEqual(events[2], e2[u'revision'], # 'event 2 when correct, secondary sort') self.assertEqual(events[3], e4[u'revision'], 'event 4 when correct, primary sort')
def compute_label_cooccurrence(filename, label_to_ind, ind_to_label, singles, skip_probability=0.0): print("Read label co-ocurrence from [%s] " % colored(filename, 'blue')) last_image_id = 'none' label_ind_set = set() label_names = label_to_ind.keys() num_labels = len(label_names) count = np.zeros((num_labels, num_labels, 2, 2)) pair_ind_count = dict() print('Shape of count = ' + str(count.shape)) print('Skip probability %s ' % colored(str(skip_probability), 'green')) num_images = 0 df = pd.read_csv(filename) for i_line, row in df.iterrows(): # Omit lines randomly for bootstrapping. if np.random.sample() < skip_probability: continue if i_line % 100000 == 0: print('line=%d %s' % (i_line, utils.timestamp(i_line))) # The line format is: ImageID,Source,LabelName,Confidence image_id = row['ImageID'] new_label = row['LabelName'] if i_line == 1 or i_line == 0: last_image_id = image_id if image_id == last_image_id: # Extend the label set if new_label in label_to_ind: label_ind_set.add(label_to_ind[new_label]) else: # New image. Add counts of all previous lines of the prev image num_images += 1 count, pair_ind_count, label_ind_set = update_counts( label_ind_set, count, pair_ind_count, label_to_ind, new_label) last_image_id = image_id num_images += 1 count, pair_ind_count, label_ind_set = update_counts( label_ind_set, count, pair_ind_count, label_to_ind, 'end-of-file') print('Loop over label_pairs, num_labels = ' + str(num_labels)) pair_count, c22_dict = dict(), dict() for pair in itertools.product(range(0, num_labels), repeat=2): l1, l2 = pair[0], pair[1] if l1 * num_labels + l2 % 1000 == 0: print(l1 * num_labels + l2, utils.timestamp(l1 * num_labels + l2)) c1, c2 = singles[ind_to_label[l1]], singles[ind_to_label[l2]] both = count[l1][l2][1][1] count[l1][l2][0][1] = c2 - both count[l1][l2][1][0] = c1 - both count[l1][l2][0][0] = num_images - (c1 + c2 - both) pair_name = ind_to_label[l1] + ':' + ind_to_label[l2] pair_count[pair_name] = count[l1][l2][1][1] c22_dict[pair_name] = count[l1, l2, :, :] return count, pair_count, c22_dict, num_images
def check_login(self, stage=0): # self.curl.request("http:/pan.baidu.com/") ret = self.api_request("https://pan.baidu.com/api/account/thirdinfo") if ret["errno"] == 0: logger.debug("Login check success!") return True # More than twice landing check if stage >= 2: logger.debug("Login check failed!") return False # Get token token = self.get_token() # Check require verifycode params = dict(token=token, tpl="netdisk", apiver="v3", tt=utils.timestamp(), username=self.username, isphone="false") check_login_url = "https://passport.baidu.com/v2/api/?logincheck&" + urllib.urlencode( params) ret = self.api_request(check_login_url) code_string = ret["data"]["codeString"] if code_string: logger.debug("Login check require verifycode") verifycode = self.get_verifycode(code_string) else: verifycode = "" # try to login login_params = dict( staticpage= "http://pan.baidu.com/res/static/thirdparty/pass_v3_jump.html", charset="utf-8", token=token, tpl="netdisk", tt=utils.timestamp(), codestring=code_string, isPhone="false", safeflg=0, u="http://pan.baidu.com/", username=self.username, password=self.password, verifycode=verifycode, mem_pass="******", ) login_url = "https://passport.baidu.com/v2/api/?login" html = self.curl.request(login_url, data=login_params, method="POST") url = re.findall(r"encodeURI\('(.*?)'\)", html)[0] self.curl.request(url) return self.check_login(stage + 1)
def test_event_list_invited(self): user = self.get_user() user2 = self.make_user(username='******') user3 = self.make_user(username='******') self.follow(user, user2, reciprocal=True) self.follow(user3, user, reciprocal=True) event1 = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'user2 created', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': user2[u'username'], }) event1.save() Attendant(user=user[u'id'], event=event1[u'id']).save() event2 = Event(**{ u'where': 'test', u'what': 'user3 created', u'broadcast': False, u'creator': user3[u'username'], }) event2.save() Attendant(user=user[u'id'], event=event2[u'id'], status=status.ATTENDING).save() event3 = Event(**{ u'where': 'test', u'what': 'user2 created, broadcast', u'broadcast': True, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': user2[u'username'], }) event3.save() event4 = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'user3 created', u'broadcast': False, u'creator': user3[u'username'], }) event4.save() response = self.get('/events/?filter=invited') self.assertEqual(response.status, 200, 'response OK') events = json.loads(response.read())[u'events'] self.assertEqual(len(events), 3, 'correct number of events returned') self.assertTrue(event1[u'revision'] in events, 'event 1 returned') self.assertTrue(event2[u'revision'] in events, 'event 2 returned') self.assertTrue(event3[u'revision'] in events, 'event 3 returned')
def check_login(self, stage=0): # self.curl.request("http:/pan.baidu.com/") ret = self.api_request("https://pan.baidu.com/api/account/thirdinfo") if ret["errno"] == 0: logger.debug("Login check success!") return True # More than twice landing check if stage >= 2: logger.debug("Login check failed!") return False # Get token token = self.get_token() # Check require verifycode params = dict(token=token, tpl="netdisk", apiver="v3", tt=utils.timestamp(), username=self.username, isphone="false") check_login_url = "https://passport.baidu.com/v2/api/?logincheck&" + urllib.urlencode(params) ret = self.api_request(check_login_url) code_string = ret["data"]["codeString"] if code_string: logger.debug("Login check require verifycode") verifycode = self.get_verifycode(code_string) else: verifycode = "" # try to login login_params = dict(staticpage="http://pan.baidu.com/res/static/thirdparty/pass_v3_jump.html", charset="utf-8", token=token, tpl="netdisk", tt=utils.timestamp(), codestring=code_string, isPhone="false", safeflg=0, u="http://pan.baidu.com/", username=self.username, password=self.password, verifycode=verifycode, mem_pass="******", ) login_url = "https://passport.baidu.com/v2/api/?login" html = self.curl.request(login_url, data=login_params, method="POST") url = re.findall(r"encodeURI\('(.*?)'\)", html)[0] self.curl.request(url) return self.check_login(stage + 1)
def clear_plot(self): if self.real_time_x_axis_flag: self.setXRange(timestamp() - 50, timestamp() + 50) self.x = [] # list(range(100)) # 100 time points self.y = [[], [], [], [], []] # [randint(0, 100) for _ in range(100)] else: self.setXRange(self.x[-1] - 48, self.x[-1] + 2) self.x = [0, 0] # list(range(100)) # 100 time points self.y = [[0, 0], [0, 0], [0, 0], [0, 0]] # [randint(0, 100) for _ in range(100)] self.data_line1.setData(self.x, self.y[0]) self.data_line2.setData(self.x, self.y[1]) self.data_line3.setData(self.x, self.y[2]) self.data_line4.setData(self.x, self.y[3])
def list(self, dir="/", page=1, num='15'): # None for error params = dict(channel='chunlei', clienttype=0, web=1, num=num, t=utils.timestamp(), page=page, dir=dir, _=utils.timestamp()) ret = self.api_request("http://pan.baidu.com/api/list", extra_data=params) files = ret['list'] return files
def getLibrarySymbols(): global libSyms global wsSyms global wsLibs global scannerProcess global scanq if not libSyms: if not scanq: libSyms = {} wsSyms = {} wsLibs = {} else: utils.timestamp("Getting scan results from queue") (libSyms, wsSyms, wsLibs) = scanq.get() utils.timestamp("Done queue get") if scannerProcess: utils.timestamp("Joining scan process") scannerProcess.join() utils.timestamp("Done join") scannerProcess = None if scanq: scanq.close() scanq = None import symbolscanner symbolscanner.setInitialResults(workspacePath, libSyms, wsSyms, wsLibs) return libSyms
def getLibrarySymbols(): global libSyms global wsSyms global wsLibs global scannerProcess global scanq if not libSyms: if not scanq: libSyms = {} wsSyms = {} wsLibs = {} else: utils.timestamp('Getting scan results from queue') (libSyms, wsSyms, wsLibs) = scanq.get() utils.timestamp('Done queue get') if scannerProcess: utils.timestamp('Joining scan process') scannerProcess.join() utils.timestamp('Done join') scannerProcess = None if scanq: scanq.close() scanq = None import symbolscanner symbolscanner.setInitialResults(workspacePath, libSyms, wsSyms, wsLibs) return libSyms
def test_event_notification_attending_creator(self): to_attend = self.make_user(username='******') self.register_for_notifications(user=self.get_user()) event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': self.get_user()[u'username'], }) event.save() # this attendant will trigger the notification Attendant(**{ u'status': status.INVITED, u'timestamp': timestamp(), u'event': event[u'id'], u'user': to_attend[u'id'], }).save() response = self.post('/events/%s/attendants/' % event[u'id'], {u'status': status.ATTENDING}, auth_user=to_attend) self.assertEqual(response.status, 200, 'attendants POST 200') nots = self.get_new_notifications(user=self.get_user()) self.assertEqual(len(nots), 1, 'one new notification') notification = nots[0] self.assertTrue(u'type' in notification, 'poll response has type') self.assertEqual(notification[u'type'], 'attendant', 'event has the correct type') self.assertTrue(u'event_revision' in notification, 'poll response has event rev') self.assertEqual(notification[u'event_revision'], event[u'revision'], 'event has the corrrect revision') self.assertTrue(u'attendant' in notification, 'poll response has attendant') self.assertEqual(notification[u'attendant'], to_attend[u'username'], 'event has the corrrect attendant')
def read_request_proxy(self, client_conn): line = self.get_line(self.rfile) if line == "": return None if not self.proxy_connect_state: connparts = http.parse_init_connect(line) if connparts: host, port, httpversion = connparts headers = self.read_headers(authenticate=True) self.wfile.write( 'HTTP/1.1 200 Connection established\r\n' + ('Proxy-agent: %s\r\n'%self.server_version) + '\r\n' ) self.wfile.flush() self.establish_ssl(client_conn, host, port) self.proxy_connect_state = (host, port, httpversion) line = self.rfile.readline(line) if self.proxy_connect_state: r = http.parse_init_http(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s"%repr(line)) method, path, httpversion = r headers = self.read_headers(authenticate=False) host, port, _ = self.proxy_connect_state content = http.read_http_body_request( self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit ) return flow.Request( client_conn, httpversion, host, port, "https", method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp() ) else: r = http.parse_init_proxy(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s"%repr(line)) method, scheme, host, port, path, httpversion = r headers = self.read_headers(authenticate=True) content = http.read_http_body_request( self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit ) return flow.Request( client_conn, httpversion, host, port, scheme, method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp() )
def __init__(self, uid, account, amount, transaction_type, name="Transaction", description="Short transaction desc."): self.id = uid self.transaction_type = transaction_type.value self.name = name self.description = description self.account_id = account.id self.amount = "{:.2f}".format(float(amount)) _acc_bal, amt = float(account.balance), float(self.amount) self.balance_after = "{:.2f}".format( _acc_bal ) if transaction_type is TransactionType.PAY else "{:.2f}".format( _acc_bal) self.sign = self._sign() self.platform = "Mobile" if platform.node( ) == "localhost" else platform.node() self.os = platform.platform() self.timestamp = timestamp() print( f"{account.name}'s Transaction no. {self.id}: {self._sign()}{amount}{service.service.get_currency()}" )
def handle_request(self, cc): try: request, err = None, None request = self.read_request(cc) if request is None: return cc.requestcount += 1 app = self.server.apps.get(request) if app: err = app.serve(request, self.wfile) if err: self.log(cc, "Error in wsgi app.", err.split("\n")) return else: request_reply = self.channel.ask(request) if request_reply is None or request_reply == KILL: return elif isinstance(request_reply, flow.Response): request = False response = request_reply response_reply = self.channel.ask(response) else: request = request_reply if self.config.reverse_proxy: scheme, host, port = self.config.reverse_proxy elif self.config.forward_proxy: scheme, host, port = self.config.forward_proxy else: scheme, host, port = request.scheme, request.host, request.port # If we've already pumped a request over this connection, # it's possible that the server has timed out. If this is # the case, we want to reconnect without sending an error # to the client. while 1: sc = self.get_server_connection( cc, scheme, host, port, self.sni) sc.send(request) if sc.requestcount == 1: # add timestamps only for first request (others are not directly affected) request.tcp_setup_timestamp = sc.tcp_setup_timestamp request.ssl_setup_timestamp = sc.ssl_setup_timestamp sc.rfile.reset_timestamps() try: tsstart = utils.timestamp() peername = sc.connection.getpeername() if peername: request.ip = peername[0] httpversion, code, msg, headers, content = http.read_response( sc.rfile, request.method, self.config.body_size_limit) except http.HttpErrorConnClosed, v: self.del_server_connection() if sc.requestcount > 1: continue else: raise except http.HttpError, v: raise ProxyError(502, "Invalid server response.") else:
def __init__(self, bag_file=BAG_FILE, model_file=MODEL_FILE, sim_file=SIM_FILE, topics=TOPICS, save_folder=SAVE_FOLDER): self.bag_file = bag_file self.topic_list = topics self.sim_file = sim_file self.model_file = model_file self.save_folder = save_folder + "/" + utils.timestamp() + "/" self.act_sig = [] self.act_t = [] self.sens_sim_sig = [] self.sens_rob_sig = [] self.imu_sim_sig = [] self.imu_rob_sig = [] self.mot_sim_sig = [] self.norm_params = [] # Optimization metaparameter self.params_names = [ "Front Mass", "Back Mass", "FL Friction mu1", "BR Friction mu1", "FL Friction Contact Depth", "BR Friction Contact Depth", "FL Stiffness", "BR Stiffness", "FL Compression Tolerance", "BR Compression Tolerance" ] self.params_units = [ "kg", "kg", "", " ", " ", " ", "N/m", "N/m", "mm", "mm" ] self.params_unormed = [] self.params_normed = [0.1, 0.1, 0.8, 0.8, 0.8, 0.8, 0.2, 0.2, 0.2, 0.2] self.params_min = [ 0.1, 0.1, 0.001, 0.001, 0.0001, 0.0001, 50, 50, 0.7, 0.7 ] self.params_max = [0.5, 0.5, 50, 50, 0.01, 0.01, 1000, 1000, 1.2, 1.2] self.sim_time = 0 self.sim_timeout = 60 self.start_time = 20 self.stop_time = 62.1 self.pool_number = 1 self.max_iter = 10000 self.init_var = 0.3 self.min = 0 self.max = 1 self.pop_size = 0 self.score_method = "av_period_t_inv" self.sim_speed = "synchrone" self.it = 0 self.pool = 0 self.score = 0 self.score_var = 0 self.t_init = None self.t_it_stop = None super(Optimization, self).__init__()
def parse_manga_page(source: str, page_url: str): soup = BeautifulSoup(source) # get the manga title title = None title_divs = soup.select('#breadcrumb li') if len(title_divs) >= 3: title_div = title_divs[2] title = title_div.text if title is None: # fallback to timestamp title = timestamp() # get the list of chapters anchor_list = soup.find('ul', attrs={'class': 'table-view'}).find_all('a') chapters = [{ 'title': a.find('p', attrs={'class': 'text-bold'}).text, 'url': urljoin(page_url, a.attrs['href']), } for a in anchor_list if a.find('p', attrs={'class': 'text-bold'}) is not None] # reverse chapters order chapters.reverse() return { 'title': title, 'chapters': chapters, }
def test_comment_field_created(self): user = User(username='******', password='******', number='+16666666666') user.save() event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': user[u'username'], }) event.save() comment = 'the test comment' post = self.post('/events/%s/comments/' % event[u'id'], {'comment': comment}) self.assertEqual(post.status, 200, 'comment POST 200') comment = Comment(user=self.get_user()[u'id'], event=event[u'id']) self.assertTrue(u'created' in comment, 'comment has created') get = self.get('/events/%s/comments/' % event[u'id']) self.assertEqual(get.status, 200, 'got comment') got_comment = json.loads(get.read())['comments'][0] self.assertTrue(u'created' in got_comment, 'comment has created')
def create_pdf(folder, images): filename = folder + '/' + 'pdf_' + utils.timestamp() + '.pdf' pdf_creator = PdfBuilder(filename) for image in images: pdf_creator.add_image(image, 687, 809) pdf_creator.images_on_pdf() pdf_creator.save()
def _list(self, dir="/", page=1, initialCall=True): # None for error params = dict(channel='chunlei', clienttype=0, web=1, num=100, t=utils.timestamp(), page=page, dir=dir, _=utils.timestamp()) ret = self.api_request("http://pan.baidu.com/api/list", extra_data=params) files = ret['list'] if len(files) == 100: files.extend(self._list(dir, page=page+1, initialCall=False)) return files
def test_event_new_validation(self): response = self.post('/events/', { u'where': 'TestCase', u'when': timestamp(), u'broadcast': False, }) self.assertEqual(response.status, 400, 'new event 400') body = json.loads(response.read()) self.assertEqual(body['error'], 'MISSING_FIELDS') self.assertEqual(body['field'], 'what') response = self.post('/events/', { u'what': 'TestCase', u'where': '--------------------------', #len: 26 }) self.assertEqual(response.status, 400, 'new event 400') body = json.loads(response.read()) self.assertEqual(body['error'], 'FIELD_LENGTH') self.assertEqual(body['field'], 'where') response = self.post('/events/', { u'what': '-------------------------' '-------------------------' '-------------------------' '-------------------------' '-------------------------' '----------------', #len: 141 }) self.assertEqual(response.status, 400, 'new event 400') body = json.loads(response.read()) self.assertEqual(body['error'], 'FIELD_LENGTH') self.assertEqual(body['field'], 'what')
def mergeBuffers(self, undoMessage='Merge Buffers', log=False, timestamp=None): if timestamp is None and self.settings.log: timestamp = utils.timestamp() merge = True if layers.copyAllFeatures(self.pointsBuffer, self.pointsLayer, undoMessage + ' - copy points', log, self.pointsLog, timestamp): self._clearBuffer(self.pointsBuffer, undoMessage + ' - delete points') else: merge = False if layers.copyAllFeatures(self.linesBuffer, self.linesLayer, undoMessage + ' - copy lines', log, self.linesLog, timestamp): self._clearBuffer(self.linesBuffer, undoMessage + ' - delete lines') else: merge = False if layers.copyAllFeatures(self.polygonsBuffer, self.polygonsLayer, undoMessage + ' - copy polygons', log, self.polygonsLog, timestamp): self._clearBuffer(self.polygonsBuffer, undoMessage + ' - delete polygons') else: merge = False return merge
def _read_request_transparent(self, client_conn, scheme, host, port): """ Read a transparent HTTP request. Transparent means that the client isn't aware of proxying. In other words, the client request starts with "GET /foo.html HTTP/1.1" rather than "CONNECT example.com:80 HTTP/1.1" """ if scheme.lower() == "https" and not self.ssl_established: self.establish_ssl(client_conn, host, port) line = self.get_line(self.rfile) if line == "": return None r = http.parse_init_http(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s"%repr(line)) method, path, httpversion = r headers = self.read_headers(authenticate=False) content = http.read_http_body_request( self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit ) return flow.Request( client_conn,httpversion, host, port, scheme, method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp() )
def _get_formatted_events(self, events): for D_event in events: # Fall back to parent dict if sub-dicts are not available network_info = D_event.get('Network Information', D_event) account_info = D_event.get('New Logon', D_event) # The time format is set in rsyslog received_time = datetime.strptime( D_event['Received time'], '%Y-%m-%d %H:%M' ) _time = timestamp(received_time) - self.utcoffset computer = network_info.get('Workstation Name') if computer is not None: computer = (computer + self.domain_suffix).lower() # Skip local service accounts ending with $ user_name = account_info.get('Account Name', '') if user_name.endswith('$'): continue event_code = D_event['Event Code'] ip_address = network_info.get('Source Network Address', '') yield (_time, computer, user_name, event_code, ip_address)
def get_post_data(self,tt,token,codeString,verifycode): post_data={ 'apiver': 'v3', # 'callback': 'parent.bd__pcbs__gsd1lq', # 'charset': 'UTF-8', 'codestring': codeString, # 'crypttype': '12', # 'idc':'', 'isPhone':'', 'logLoginType':' pc_loginDialog', 'loginmerge': 'true', 'logintype': 'dialogLogin', 'mem_pass': '******', # 'rsakey': 'hWyCfSdv6iAMWmv2xhdrOqtQNxr1W0Pp', 'password': self.password, 'ppui_logintime': '5452', 'quick_user': '******', 'safeflg': '0', 'splogin': '******', 'staticpage': 'https://www.baidu.com/cache/user/html/v3Jump.html', # 'subpro':'', 'token': token, 'tpl': 'mn', 'tt': str(utils.timestamp()), 'u': 'https://www.baidu.com/', 'username': self.username, 'verifycode':verifycode, } return post_data
def test_sms_notification_comment(self): if not settings.TEST_SMS: return event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': self.get_user()[u'username'], }) event.save() number = '+16475551234' name = 'Testy Smoth' reg, out, is_user = SMS.register(event, [{u'number':number, u'name':name}]) self.assertEqual(len(reg), 1, 'correct ammout registered') result = notifications.send(reg[0][u'id'], {u'type': 'comment', u'event_revision': event[u'revision'], u'event_id': event[u'id'], u'comment': 'my awesome comment', u'commenter': self.make_user()}) self.assertEqual(len(result), 1, 'the correct number of notifications ' 'were sent') self.assertTrue(result[0], 'the notification was sent correctly')
def _read_request_origin_form(self, client_conn, scheme, host, port): """ Read a HTTP request with regular (origin-form) request line. An example origin-form request line would be: GET /foo.html HTTP/1.1 The request destination is already known from one of the following sources: 1) transparent proxy: destination provided by platform resolver 2) reverse proxy: fixed destination 3) regular proxy: known from CONNECT command. """ if scheme.lower() == "https" and not self.ssl_established: self.establish_ssl(client_conn, host, port) line = self.get_line(self.rfile) if line == "": return None r = http.parse_init_http(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s" % repr(line)) method, path, httpversion = r headers = self.read_headers(authenticate=False) content = http.read_http_body_request(self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit) return flow.Request(client_conn, httpversion, host, port, scheme, method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp())
def get_t0(directory): try: filepath = os.path.join(directory, 'age_zero') f = open(filepath, 'r') return timestamp(f.readline()) except IOError: return None
def __init__(self, excelName, sheetName, accessKey, secretKey, chunkSize_MB): """ Creates attribute variables and makes a log file dropboxAPI.log in the logs directory. Uploads files sequentially to Dropbox using API. """ self.excelName = excelName self.sheetName = sheetName self.names = ['inputFile', 'bucket', 'outputDir'] self.accessKey = accessKey self.secretKey = secretKey self.chunkSize = chunkSize_MB * 1024 * 1024 self.df = pandas.read_excel(self.excelName, sheet_name=self.sheetName, names=self.names) print('Stage 2 - Data upload to AWS S3') logFile = open('./logs/upload/awsS3.log', 'w') logFile.write('%s\tData upload using API\n' % (utils.timestamp())) logFile.close() self.s3 = boto3.client('s3', aws_access_key_id=self.accessKey, aws_secret_access_key=self.secretKey) self.getFileList() self.mkdirs() self.uploadFiles() self.dbx = dropbox.Dropbox(accessToken) self.getFileList() self.mkdirs() self.uploadFiles() logFile.close()
def test_attendants_with_event(self): event = Event(broadcast=True, what='This is a test', creator=self.get_user()[u'username']) event.save() att_user = self.make_user('att_user') Attendant(**{ u'status': status.ATTENDING, u'timestamp': timestamp(), u'event': event[u'id'], u'user': att_user[u'id'], }).save() response = self.get('/events/%s/?attendants=true' % event[u'revision']) self.assertEqual(response.status, 200, 'event get success') body = json.loads(response.read()) self.assertTrue(u'attendants' in body, 'response has attendants') self.assertEqual(len(body['attendants']), 1, 'event has 1 attendant') self.assertTrue(u'username' in body['attendants'][0], 'att has username') self.assertTrue(u'user' in body['attendants'][0], 'att has user id') self.assertTrue(u'event' in body['attendants'][0], 'att has event') self.assertTrue(u'status' in body['attendants'][0], 'att has status') self.assertTrue(u'timestamp' in body['attendants'][0], 'att has timestamp') for k, v in body['attendants'][0].iteritems(): self.assertTrue(v is not None, '%s is not None' % k)
def quota(self): params = dict(channel="chunlei", clienttype=0, web=1, t=utils.timestamp()) ret = self.api_request("http://pan.baidu.com/api/quota", extra_data=params) return ret
def get_verify_code(self): url = 'https://passport.baidu.com/v2/api/?logincheck&' data = { "callback": "bdPass.api.login._needCodestringCheckCallback", "tpl": "mn", "charset": "UTF-8", "index": 0, "username": self.username, "time": utils.timestamp() } data_query = urllib.urlencode(data) url = "%s&%s" % (url, data_query) req = urllib2.Request(url) data = self.opener.open(req).read().strip() data = data[data.find("(") + 1:data.rfind(")")] data = eval( data, type('Dummy', (dict, ), dict(__getitem__=lambda s, n: n))()) codestring = data["codestring"] if codestring != "": url = "https://passport.baidu.com/cgi-bin/genimage?%s" % codestring req = urllib2.Request(url) ret_data = self.opener.open(req).read() pic_image = utils.get_cache_file("pic") with open(pic_image, "wb") as fp: fp.write(ret_data) self.loginfo("Verify code pic download ok!") return raw_input("piz input code > ").strip()
def uploadFiles(self): """ Uploads files to cloud using Dropbox API. If the upload fails, it is attempted again for a maximum of three times before moving on to the next file. A large file is read in smaller chunks and uploaded. Usage: ----- self.uploadFiles() Returns: ------- NULL """ config = TransferConfig(multipart_threshold=self.chunkSize, multipart_chunksize=self.chunkSize, max_io_queue=3, use_threads=False) for fileName, fileSize, s3Bucket, s3File in zip( self.fileNameList, self.fileSizeList, self.s3BucketList, self.s3FileList): print('Uploading %s\tto\t%s' % (fileName, s3File)) s3.upload_file(fileName, s3Bucket, s3File, Config=config) logFile = open('./logs/upload/awsS3.log', 'a') logFile.write('%s\t%s\t%s\t%s\t%.6f GB\n' % (utils.timestamp(), fileName, bucket, s3File, fileSize / 1024 / 1024 / 1024)) logFile.close()
def get_verify_code(self): url = 'https://passport.baidu.com/v2/api/?logincheck&' data = { "callback" : "bdPass.api.login._needCodestringCheckCallback", "tpl" : "mn", "charset" : "UTF-8", "index" : 0, "username" : self.username, "time" : utils.timestamp() } data_query = urllib.urlencode(data) url = "%s&%s" % (url, data_query) req = urllib2.Request(url) data = self.opener.open(req).read().strip() data = data[data.find("(") + 1: data.rfind(")")] data = eval(data, type('Dummy', (dict,), dict(__getitem__=lambda s,n:n))()) codestring = data["codestring"] if codestring != "": url = "https://passport.baidu.com/cgi-bin/genimage?%s" % codestring req = urllib2.Request(url) ret_data = self.opener.open(req).read() pic_image = utils.get_cache_file("pic") with open(pic_image, "wb") as fp: fp.write(ret_data) self.loginfo("Verify code pic download ok!") return raw_input("piz input code > ").strip()
def prep_for_response_sms(self): user = User(username='******', password='******', number='+16666666666') user.save() event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': user[u'username'], }) event.save() smsuser = User(number='+16475555555', display_name='Testy Smoth') smsuser.save() Attendant(user=user[u'id'], event=event[u'id']).save() SMSRegister(contact_number=smsuser[u'number'], twilio_number=settings.TWILIO_NUMBERS[0], event=event[u'id'], expires=event[u'when'], user=user[u'id']).save() return user, event, smsuser
def read_request_transparent(self, client_conn): orig = self.config.transparent_proxy["resolver"].original_addr(self.connection) if not orig: raise ProxyError(502, "Transparent mode failure: could not resolve original destination.") self.log(client_conn, "transparent to %s:%s"%orig) host, port = orig if port in self.config.transparent_proxy["sslports"]: scheme = "https" if not self.ssl_established: self.establish_ssl(client_conn, host, port) else: scheme = "http" line = self.get_line(self.rfile) if line == "": return None r = http.parse_init_http(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s"%repr(line)) method, path, httpversion = r headers = self.read_headers(authenticate=False) content = http.read_http_body_request( self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit ) return flow.Request( client_conn,httpversion, host, port, scheme, method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp() )
def __init__(self, parent=None): QWidget.__init__(self, parent) self.plot = pg.PlotWidget( title="Ошибки в ЖР", labels={'left': 'Количество'}, axisItems={'bottom': TimeAxisItem(orientation='bottom')}) self.plot.setYRange(0, 1000) self.plot.setXRange(timestamp(), timestamp() + 100) self.plot.showGrid(x=True, y=True) self.layout = QGridLayout(self) self.layout.addWidget(self.plot, 0, 0) self.plotCurve = self.plot.plot(pen='y') self.plotData = {'x': [], 'y': []}
def POST(self): x = web.input() # check csrf token if (not x.has_key('csrf_token') or x["csrf_token"]!=session["csrf_token"]): print "tokens aren't matching" session["csrf_token"] = uuid4().hex else : # protect using csrf_token if "data" in x : fname = os.path.join("logs","wavs", \ session.dialog_state.id + "_" + utils.timestamp(include_date=False) + ".wav") savefile = open(fname,"wb") savefile.write(x["data"]) savefile.close() # Warn if this is not for the most recent utterance if not x.has_key("utterance_key") or int(session["utterance_key"]) != int(x["utterance_key"]) : print "got recording with wrong utterance_key" print "comparing", x["utterance_key"], session["utterance_key"] uttIndex = int(x["utterance_key"]) for i in range(len(session.dialog_state.recordings), uttIndex+1): session.dialog_state.recordings.append("") session.dialog_state.recordings[uttIndex] = fname session["utterance_key"] = max(uttIndex, session["utterance_key"]) web.header('Content-Type', 'application/json') return json.dumps({ "csrf_token":session["csrf_token"] })
def __init__(self, username, password): self.username = username self.password = password self.session = requests.session() # https://ss0.bdstatic.com/5LMZfyabBhJ3otebn9fN2DJv/passApi/js/login_tangram_f2e986d5.js # self.index_url = 'https://www.baidu.com/' # href="https://passport.baidu.com/v2/?login&tpl=mn&u=http%3A%2F%2Fwww.baidu.com%2F" # https://www.baidu.com/ 主页 # #https://passport.baidu.com/v2/?login 登录界面 # get token # https://passport.baidu.com/v2/api/?getapi&tpl=pp&apiver=v3&tt=1431305718340&class=login&logintype=basicLogin&callback=bd__cbs__k7txq4 ## ##要先访问主页,自动保存cookies ,再访问sign_url 才能显示页面内容 self.sign_url = 'https://passport.baidu.com/v2/api/?login' # self.mission_url = 'http://v2ex.com/mission/daily' self.config_section = 'baidu' '''配置文件字段''' self.config_cookies = 'cookies' '''配置文件字段''' self.tt = utils.timestamp();
def reportRequest(self, userCookie, startDate, endDate = utils.timestamp(False), category = None): try: uid = self.getUser(ck = userCookie)['_id'] if uid is None: return cursor = self.__conn.cursor(MySQLdb.cursors.DictCursor) if category is None: cursor.execute(""" SELECT _billId, _amount, _date, _category FROM """ + DbWorker.dbCfg['table.expense'] + """ WHERE _userId = %s AND _date >= %s AND _date <= %s """ , (uid, startDate, endDate)) else: cursor.execute(""" SELECT _billId, _amount, _date, _category FROM """ + DbWorker.dbCfg['table.expense'] + """ WHERE _userId = %s AND _date >= %s AND _date <= %s AND _category = %s """ , (uid, startDate, endDate, category)) result = cursor.fetchall() expenses = [self.__transformDateInRec(row) for row in result] cursor.close() return json.dumps(expenses) except: raise
def ios_hosts(host, device_type, *commands): timestamp = utils.timestamp() output_file = f"output/{timestamp}-{host.strip()}.xlsx" with pd.ExcelWriter(output_file) as writer: for command in commands: df = pd.DataFrame(utils.ios_connection(host, device_type, command)) df.to_excel(writer, sheet_name=utils.sheet(command), index=False)
def _normalize_sessions(self, sessions): for s in sessions: if not REQUIRED_KEYS.issubset(frozenset(s.keys())): continue if s['state'] != 'AUTHENTICATED': continue if not s.get('ipAddresses', []): continue # Skip MAC addresses reported in the user field user = s['adNormalizedUser'] if MAC_ADDRESS.match(user): continue yield { '_time': timestamp(dt_parse(s['timestamp'])), 'Computer': None, 'TargetUserName': self._normalize_item(user), 'EventCode': None, 'ComputerAddress': s['ipAddresses'][0], 'ActiveDirectoryDomain': self._normalize_item(s['adUserDomainName']), }
def flush_data(self, data, now, compress=True): # Loops through the collected lines and looks for ones we know # how to parse. # Example: "Jun 2 21:22:18 10.64.99.199 2016-06-02T21:19:00--5:00 # 10.64.99.199 CP-GW - Log [[email protected] ...]" for line in data: # Skip lines we can't interpret try: i = line.index(self.START_STRING) j = line.rindex(']') fields = [x.split('=') for x in shlex.split(line[i:j])] except ValueError: continue else: D_item = {x[0]: x[1] for x in fields if len(x) == 2} # Require all fields to be present if any(x not in D_item for x in self.SESSION_FIELDS): continue # Store the line's data at the associated 10 minute segments try: dt = self._get_dt(line.split()[4]) except ValueError: continue else: D_item['timestamp'] = timestamp(dt) segment = dt.replace(minute=(dt.minute // 10) * 10, second=0, microsecond=0) self.parsed_data[segment].append(D_item)
def test_event_new_notification(self): to_invite = self.make_user(username='******') self.register_for_notifications(user=to_invite) event = Event(**{ u'where': 'test', u'when': timestamp(), u'what': 'test', u'broadcast': False, u'posted_from': [37.422834216666665, -122.08536667833332], u'creator': self.get_user()[u'username'], }) event.save() response = self.post('/events/%s/invites/' % event[u'id'], {'users': [to_invite[u'username']]}) self.assertEqual(response.status, 200, 'user invited') nots = self.get_new_notifications(user=to_invite) self.assertEqual(len(nots), 1, 'one new notification') notification = nots[0] self.assertTrue(u'event_revision' in notification, 'poll response has event rev') self.assertEqual(notification[u'event_revision'], event[u'revision'], 'event has the corrrect revision')
def make_logfile_name(params=None, net_save_filename=None, timestamp = None, train=True): ''' Returns the name of the logfile for the current training/forward pass run ''' #Need to specify either a params object, or the net save prefix utils.assert_arglist(params, [net_save_filename]) if params is not None: if train: _net_save_filename = params['train_save_net'] else: _net_save_filename = params['output_prefix'] if net_save_filename is not None: _net_save_filename = net_save_filename save_prefix = os.path.splitext( _net_save_filename )[0] save_prefix_valid = len(save_prefix) > 0 assert(save_prefix_valid) if timestamp is None: timestamp = utils.timestamp() mode = "train" if train else "forward" directory_name = os.path.dirname( save_prefix ) if not os.path.exists(directory_name): os.mkdir(directory_name) save_filename = "{}_{}_{}.log".format(save_prefix, mode, timestamp) return save_filename
def response(code): if code < 400: msg = { 'response': code, 'time': timestamp(), 'alert': RESPONSE_CODE[code], 'error': '' } else: msg = { 'response': code, 'time': timestamp(), 'alert': '', 'error': ERROR_CODE[code] } return msg
def _read_request_origin_form(self, client_conn, scheme, host, port): """ Read a HTTP request with regular (origin-form) request line. An example origin-form request line would be: GET /foo.html HTTP/1.1 The request destination is already known from one of the following sources: 1) transparent proxy: destination provided by platform resolver 2) reverse proxy: fixed destination 3) regular proxy: known from CONNECT command. """ if scheme.lower() == "https" and not self.ssl_established: self.establish_ssl(client_conn, host, port) line = self.get_line(self.rfile) if line == "": return None r = http.parse_init_http(line) if not r: raise ProxyError(400, "Bad HTTP request line: %s"%repr(line)) method, path, httpversion = r headers = self.read_headers(authenticate=False) self.handle_expect_header(headers, httpversion) content = http.read_http_body( self.rfile, headers, self.config.body_size_limit, True ) r = flow.Request( client_conn, httpversion, host, port, scheme, method, path, headers, content, self.rfile.first_byte_timestamp, utils.timestamp() ) r.set_live(self.rfile, self.wfile) return r
def get_result(self): self.statistic() result = { 'ctime' : timestamp(), 'rx' : self.rx, 'tx' : self.tx } return result
def get_result(self): self.statistic() result = { 'read': self.read_iops, 'write': self.write_iops, 'ctime': timestamp(), } return result
def _api_defaults(self, kwargs): kwargs.update({ 'AWSAccessKeyId': self.mws.mws_access_key, 'SellerId': self.mws.merchant_id, 'SignatureMethod': 'HmacSHA256', 'SignatureVersion': '2', 'Timestamp': timestamp(), })
def __init__(self, form): self.created_time = timestamp() self.clicked = 0 self.title = form.get('title', '') self.content = form.get('content', '') self.node_id = int(form.get('node', '0')) node = Node.query.filter_by(id=self.node_id).first() self.top_node_id = node.top_node_id
def get_token(self, user=None): if not user: user = self.get_user() token = str(db.objects.session.insert({ u'timestamp': timestamp(), u'username': user[u'username'], })) return token