def create_comment(self, content, user, ip, commit=False): ref_name = get_remote_side(self, 'comments') cls = get_remote_side_class(self, 'comments') # verify that the user has not created any comment for this article within the last 30s # TODO: cache this shit last_comment = cls.query.filter_by(ip=hash(ip)).order_by(cls.date_created.desc()).first() if last_comment: time_diff = now_ms() - last_comment.date_created limit = self.comment_limit if time_diff < limit: raise ModelException( type='VALIDATION_FAILED', message=_(u'Please wait %(time)s seconds before sending new comment', time=int(round((limit-time_diff)/1000))) ) comment = cls() setattr(comment, ref_name, self.id) comment.user_id = user.id comment.content = content comment.ip = hash(ip) # also update the comment count cache.update_user_comment_count(self, user.id) db_session.add(comment) if commit: db_session.commit() return comment
def send_sms(self, phone, template_id='1', datas=None): url = '%s/sms/sendtemplate.action' % self.baseurl headers = { 'AppKey': self.appkey, 'Nonce': '%s%s' % (int(time.time()*1000), random.randint(100000, 999999)), 'CurTime': '%s' % (int(time.time())), 'Content-Type': 'application/x-www-form-urlencoded', 'charset': 'utf-8' } headers['CheckSum'] = utils.hash('sha1', '%s%s%s' % (self.appsecret, headers.get('Nonce'), headers.get('CurTime'))) data = 'templateid=%s&mobiles=["%s"]¶ms=%s' % (template_id, phone, json.dumps(datas)) rs = self.send(url, data=data, headers=headers) rs_data = rs.read() logger.info('NeteaseSMS', 'send sms', headers, data, rs_data) rs_data = json.loads(rs_data) ############## #检查发送状态 url = '%s/sms/querystatus.action' % self.baseurl headers = { 'AppKey': self.appkey, 'Nonce': '%s%s' % (int(time.time()*1000), random.randint(100000, 999999)), 'CurTime': '%s' % (int(time.time())), 'Content-Type': 'application/x-www-form-urlencoded', 'charset': 'utf-8' } headers['CheckSum'] = utils.hash('sha1', '%s%s%s' % (self.appsecret, headers.get('Nonce'), headers.get('CurTime'))) data = 'sendid=%s' % rs_data.get('obj') rs = self.send(url, data=data, headers=headers) logger.debug('NeteaseSMS', 'send sms check', data, rs.read()) ############## if rs_data.get('code') != 200: raise RuntimeError(json.dumps(rs_data)) return True
def create_topic(self, title, content, tags, ip): # verify that the user has not created any topic within the last minute last_topic = Topic.query.filter_by(ip=hash(ip)).order_by('date_created DESC').first() if last_topic: time_diff = now_ms() - last_topic.date_created limit = 60*1000 #ms if time_diff < limit: raise ModelException( type='VALIDATION_FAILED', message=_(u'Please wait %(time)s seconds before creating new topic', time=int(round((limit-time_diff)/1000))) ) topic = Topic() topic.title = title topic.content = content topic.user_id = self.id topic.ip = hash(ip) db_session.add(topic) db_session.commit() tags = process_tags_input(tags) topic.create_tags(tags, commit=True) return topic
def create_topic(self, title, content, tags, ip): # verify that the user has not created any topic within the last minute last_topic = Topic.query.filter_by( ip=hash(ip)).order_by('date_created DESC').first() if last_topic: time_diff = now_ms() - last_topic.date_created limit = 60 * 1000 #ms if time_diff < limit: raise ModelException( type='VALIDATION_FAILED', message= _(u'Please wait %(time)s seconds before creating new topic', time=int(round((limit - time_diff) / 1000)))) topic = Topic() topic.title = title topic.content = content topic.user_id = self.id topic.ip = hash(ip) db_session.add(topic) db_session.commit() tags = process_tags_input(tags) topic.create_tags(tags, commit=True) return topic
def lastpage_extractor_for_blacklist(self): print "Extract Last Pages" query = self.getSession.query(Comic) x = os.path.join(AppFolders.appBlacklistPages(),"lastpage") y = os.path.join(AppFolders.appBlacklistPages(),"lastpage_double") z = os.path.join(AppFolders.appBlacklistPages(),"firstpage") w = os.path.join(AppFolders.appBlacklistPages(),"firstpage_double") if not os.path.isdir(x): os.makedirs(x) if not os.path.isdir(y): os.makedirs(y) if not os.path.isdir(z): os.makedirs(z) if not os.path.isdir(w): os.makedirs(w) c = 0 for row in query: print('Extracting last page from ' + str(c) + ' ' + row.path) c += 1 ca = self.getComicArchive(row.id,row.path) # auto convert webp (disable for chunky or fix web book reader) image_data = ca.getPage(row.page_count-1) hash = utils.hash(image_data) # ascept ratio check #im = Image.open(StringIO.StringIO(image_data)) #w,h = im.size #if h > w: # continue image_cover = ca.getPage(0) image_page2 = ca.getPage(1) if image_page2: imc = Image.open(StringIO.StringIO(image_cover)) hash2 = utils.hash(image_cover) im2 = Image.open(StringIO.StringIO(image_page2)) w1,h1 = imc.size w2,h2 = im2.size if h1 <= w1 and h2 > w2 and not self.checkHashBlacklist(hash2): if os.path.isfile(os.path.join(z,str(hash2))): if os.path.isfile(os.path.join(w,str(hash2))): print "Double Already Exists" else: print "Adding Double" file2 = open(os.path.join(w,str(hash2)), "w") file2.write(image_cover) file2.close() else: print "Adding Firstpage" file2 = open(os.path.join(z,str(hash2)), "w") #file2.write("1") file2.write(image_cover) file2.close() """
def lastpage_extractor_for_blacklist(self): print "Extract Last Pages" query = self.getSession.query(Comic) x = os.path.join(AppFolders.appBlacklistPages(), "lastpage") y = os.path.join(AppFolders.appBlacklistPages(), "lastpage_double") z = os.path.join(AppFolders.appBlacklistPages(), "firstpage") w = os.path.join(AppFolders.appBlacklistPages(), "firstpage_double") if not os.path.isdir(x): os.makedirs(x) if not os.path.isdir(y): os.makedirs(y) if not os.path.isdir(z): os.makedirs(z) if not os.path.isdir(w): os.makedirs(w) c = 0 for row in query: print('Extracting last page from ' + str(c) + ' ' + row.path) c += 1 ca = self.getComicArchive(row.id, row.path) # auto convert webp (disable for chunky or fix web book reader) image_data = ca.getPage(row.page_count - 1) hash = utils.hash(image_data) # ascept ratio check #im = Image.open(StringIO.StringIO(image_data)) #w,h = im.size #if h > w: # continue image_cover = ca.getPage(0) image_page2 = ca.getPage(1) if image_page2: imc = Image.open(StringIO.StringIO(image_cover)) hash2 = utils.hash(image_cover) im2 = Image.open(StringIO.StringIO(image_page2)) w1, h1 = imc.size w2, h2 = im2.size if h1 <= w1 and h2 > w2 and not self.checkHashBlacklist(hash2): if os.path.isfile(os.path.join(z, str(hash2))): if os.path.isfile(os.path.join(w, str(hash2))): print "Double Already Exists" else: print "Adding Double" file2 = open(os.path.join(w, str(hash2)), "w") file2.write(image_cover) file2.close() else: print "Adding Firstpage" file2 = open(os.path.join(z, str(hash2)), "w") #file2.write("1") file2.write(image_cover) file2.close() """
def merkle_root(self, transactions): hash_transactions = [hash(str(x)) for x in transactions] while (len(hash_transactions) != 1): if len(hash_transactions) % 2 != 0: last_transaction = hash_transactions[-1] hash_transactions.append(hash_transactions) transaction_s = [] for i in range(0, len(hash_transactions), 2): transaction_s.append( hash( str(hash_transactions[i]) + str(hash_transactions[i + 1]))) hash_transactions = transaction_s merkle = hash_transactions[0] return merkle
async def register(): username = input("Username:"******"Password:"******"\nCould not verify if the password is compromised.\n") else: print("\nPassword is compromised. Must use another password.\n") raise Exception("Compromised password at registration phase.") rpassword = getpass.getpass("Repeat password:"******"\nPasswords are not the same.\n") raise Exception("Passwords differ at registration phase.") password = utils.hash(password) creds = utils.create_json(usr=username, pwd=password) if utils.validate_user(creds): async with websockets.connect(uri) as websocket: await websocket.send(utils.create_json(request="register")) resp = await websocket.recv() if resp == "ok": await websocket.send(creds) resp = await websocket.recv() if resp == "success": print("registration successful") else: raise Exception('\nServer could not register the user.') else: raise Exception('\nServer could not register the user.')
def text2spvec(self, query): """Create a sparse tfidf-weighted word vector from query. tfidf = log(tf + 1) * log((N - Nt + 0.5) / (Nt + 0.5)) """ # Get hashed ngrams words = self.parse(utils.normalize(query)) wids = [utils.hash(w, self.hash_size) for w in words] if len(wids) == 0: if self.strict: raise RuntimeError('No valid word in: %s' % query) else: logger.warning('No valid word in: %s' % query) return sp.csr_matrix((1, self.hash_size)) # Count TF wids_unique, wids_counts = np.unique(wids, return_counts=True) tfs = np.log1p(wids_counts) # Count IDF Ns = self.doc_freqs[wids_unique] idfs = np.log((self.num_docs - Ns + 0.5) / (Ns + 0.5)) idfs[idfs < 0] = 0 # TF-IDF data = np.multiply(tfs, idfs) # One row, sparse csr matrix indptr = np.array([0, len(wids_unique)]) spvec = sp.csr_matrix((data, wids_unique, indptr), shape=(1, self.hash_size)) return spvec
def generate_merkle_root(transactions): txn_size = len(transactions) #if there is only 1 transaction then merkle root is the transaction id itself if txn_size == 1: return transactions[0]['transaction_id'] top_tree_layer = [] tree_layer = [] #if there are odd number of transactions in transactions list then double up the last one if txn_size % 2 != 0: transactions.append(transactions[-1]) for transaction in transactions: top_tree_layer.append(transaction['transaction_id']) while txn_size != 1: tree_layer[:] = [] for i in range(0, txn_size, 2): txn_duo = f'{top_tree_layer[i]}{top_tree_layer[i+1]}'.encode() tree_layer.append(utils.hash(txn_duo, serialized=True)) txn_size = len(tree_layer) top_tree_layer[:] = tree_layer return tree_layer[0]
def comicBlacklist(self,comic_id, pagenum): #obj = session.query(database.Blacklist).filter(database.Blacklist.comic_id == int(comic_id),database.Blacklist.page == int(pagenum)).first() # if obj is None: session = self.getSession() #self.getComic() #x = self.getSession().query(Comic.id, Comic.path, Comic.mod_ts) image_data = self.getComicPage(comic_id, pagenum, False) hash = utils.hash(image_data) #comichash = self.getHashEntity(Blacklist, hash) #self.getComic(comic_id).blacklist(comichash) obj = self.getSession().query(Blacklist.hash).filter(Blacklist.hash == hash).first() if obj is None: try: blacklist = Blacklist() blacklist.hash = hash blacklist.detect = len(image_data) file = open(os.path.join(AppFolders.appBlacklistPages(),str(blacklist.hash)), "w") file.write(image_data) file.close() #blacklist.comic_id = int(comic_id) #blacklist.page = int(pagenum) blacklist.ts = datetime.datetime.utcnow() session.add(blacklist) session.commit() session.close() except Exception, e: print str(e) logging.error("Blacklist: Problem blocking page {} on comic {}".format(pagenum, comic_id))
def Login(): BASEDIR = os.path.dirname(os.path.abspath(__file__)) try: login_options, ok = config.ReadConfig( os.path.join(BASEDIR, 'conf.ini'), 'LOGIN') except Exception as e: return MAX_LOGIN_TIMES = float(login_options['max_login_times']) ADMIN_NAME = login_options['admin_name'] ADMIN_PWD = login_options['admin_pwd'] is_login = False count = 1 while count <= MAX_LOGIN_TIMES: username = InputMsg("请输入管理员用户名:") if username == ADMIN_NAME: count = 1 while count <= MAX_LOGIN_TIMES: # password = InputMsg("请输入管理员密码:") password = getpass.getpass("请输入管理员密码:") if hash(password) == ADMIN_PWD: SuccMsg("你已经登陆成功!") is_login = True return is_login else: WarnMsg("你输入的密码有误,请重新输入!") count += 1 else: WarnMsg("你输入的用户名错误,请重新输入!") count += 1 WarnMsg("你输入错误已经超过三次,账号被锁定!") is_login = False return is_login
def read_config(self): self.logger.debug("Loading the config") primary_key = None context = "global" self.logger.debug(f"hunting for {self.primary_keys}") try: with open(self.filename, "r") as file: for line in file: # print(f"> {line.strip()}") self.logger.debug(f"> {line.strip()}") line = line.split("#", 1)[0].strip() tokens = line.split(": ", 1) if tokens[0] == "master config" and context == "global": self.master_config = tokens[1] elif tokens[0] in self.primary_keys: self.logger.debug( f"got one: {tokens[0]} :: {tokens[1]}") context = utils.hash(tokens[1]) self.data[context] = {} self.data[context][tokens[0]] = tokens[1] elif len(tokens) == 2: # not a "master" or "slave", must be a config option self.set(context, tokens[0], tokens[1]) self.logger.debug(self.data) except BaseException: self.logger.exception("Fatal error reading config") self.logger.error(f"Confirm {self.filename} is readable") sys.exit(1)
def update_view_count(self, ip, user=None, commit=False): """Updates the view count of the entry. The view count is only updated once very 2 hours to avoid duplication Args: ip (str): an ip address of the current user_id user (User): the current user (None if it is a guest) commit (bool): whether to commit changes Returns True if view_count is updated False otherwise """ updated = False last_view = self._last_ip_view(hash(ip)) threshold = 2*3600*1000 # update view_count once very 2 hours diff = now_ms() - last_view if diff == 0 or diff > threshold: cls = self.__class__ self.query.filter_by(id=self.id).update({cls.view_count: cls.view_count + 1}) updated = True if commit: db_session.commit() # add the entry to user log if user is not None: self._update_user_view_log(user.id) return updated
def transform(self, docs: List[str]) -> sp.sparse.csr_matrix: docs_tfidfs = [] # TODO Try to vectorize w/o for loop for doc in docs: ngrams = list(self.processing_fn([doc])) hashes = [hash(ngram, self.hash_size) for ngram in ngrams[0]] hashes_unique, q_hashes = np.unique(hashes, return_counts=True) tfs = np.log1p(q_hashes) # TODO ? revise policy if len(q_hashes) == 0 if len(q_hashes) == 0: return sp.sparse.csr_matrix((1, self.hash_size)) size = len(self.doc_index) Ns = self.term_freqs[hashes_unique] idfs = np.log((size - Ns + 0.5) / (Ns + 0.5)) idfs[idfs < 0] = 0 tfidf = np.multiply(tfs, idfs) indptr = np.array([0, len(hashes_unique)]) sp_tfidf = sp.sparse.csr_matrix((tfidf, hashes_unique, indptr), shape=(1, self.hash_size)) docs_tfidfs.append(sp_tfidf) transformed = sp.sparse.vstack(docs_tfidfs) return transformed
def mining(self, index, transactions, prevoius_hash, merkle_root): for i in range(1, self.max_nounce): string = str(index) + str(transactions) + str(i) + str( prevoius_hash) + str(merkle_root) current_hash = str(hash(string)) if current_hash.startswith('0000'): return i
def validate(provided_username, provided_password): if provided_username in users: if users[provided_username] == utils.hash(provided_password): return True else: return False else: return False
def store(self, raw): key = hash(str(uuid.uuid1())) upload_token = self.credentials.upload_token(self.bucket_name, key) ret, err = put_data(upload_token, key, raw) if ret is not None: return "%s/%s" % (self.imageServerUrl, ret['key']) else: logging.error('upload error.')
def set_pwd(username, password): """ Set a password to an existing user. """ # Hash the raw password. password = utils.hash(password) return sudo("echo '{}:{}' | chpasswd -e".format(username, password))
async def login(self, cridentail): user = await self.user_dao.find_by_username(cridentail['username']) if user: if utils.hash(cridentail['password'], user['salt']) == user['password']: return user raise CredentailInvalid()
async def login(self, cridentail): async with self.engine.acquire() as connection: user_dao = UserDAO(connection) user = await user_dao.find_by_username(cridentail['username']) if user: if utils.hash(cridentail['password'], user['salt']) == user['password']: return user raise CredentailInvalid()
async def register(self, user): salt = utils.rand() user['password'] = utils.hash(user['password'], salt) user['salt'] = salt result = await self.connection.execute(UserTable.insert().values(**user)) user['id'] = result.lastrowid return user
def create_answer(self, content, user, ip, commit=False): answer = Answer() answer.question_id = self.id answer.ip = hash(ip) answer.content = content answer.user_id = user.id db_session.add(answer) if commit: db_session.commit() return answer
def get_permuted_index(index: int, list_size: int, seed: Hash32) -> int: """ Return `p(index)` in a pseudorandom permutation `p` of `0...list_size-1` with ``seed`` as entropy. Utilizes 'swap or not' shuffling found in https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf See the 'generalized domain' algorithm on page 3. """ for round in range(SHUFFLE_ROUND_COUNT): pivot = bytes_to_int( hash(seed + int_to_bytes1(round))[0:8]) % list_size flip = (pivot - index) % list_size position = max(index, flip) source = hash(seed + int_to_bytes1(round) + int_to_bytes4(position // 256)) byte = source[(position % 256) // 8] bit = (byte >> (position % 8)) % 2 index = flip if bit else index return index
def generate_keys(company, password): company_id = str(company.id) company_key = utils.generate_id() company_key_encrypted = utils.encrypt(text=company_key, password=password) public_key = company_id + "_" + utils.generate_id() private_key = company_id + "_" + utils.generate_id() private_key_hash = utils.hash(string=private_key) private_key_encrypted = utils.encrypt(text=private_key, password=company_key) password_hash = utils.hash(string=password) return dict(public_key=public_key, company_key_encrypted=company_key_encrypted, private_key_hash=private_key_hash, private_key_encrypted=private_key_encrypted, password_hash=password_hash)
def create_article(self, title, content, tags, ip): article = Article() article.title = title article.content = content article.user_id = self.id article.ip = hash(ip) db_session.add(article) db_session.commit() tags = process_tags_input(tags) article.create_tags(tags, commit=True) return article
def create_question(self, title, content, tags, ip): question = Question() question.title = title question.content = content question.user_id = self.id question.ip = hash(ip) db_session.add(question) db_session.commit() tags = process_tags_input(tags) question.create_tags(tags, commit=True) return question
def new_block(self, proof, previous_hash): block = { 'index': len(self.chain) + 1, 'timestamp': time(), 'transactions': self.current_transactions, 'proof': proof, 'previous_hash': previous_hash or hash(self.chain[-1]), } self.current_transactions = [] self.chain.append(block) return block
def score_query(self, input_query, doc_ids): """ query : question + answer returns alignment score """ logger.debug(f'es_search') hash_to_ind, freq_table, matrix = self.hash_to_ind, self.freq_table, self.matrix tokenizer = self.tokenizer #formulate search query tokens = list(tokenizer.tokenize( input_query).words()) # globalize tokenize with init valid_hashes = [ utils.hash(token, self.hash_size) for token in tokens if utils.hash(token, self.hash_size) in hash_to_ind ] valid_hash_inds = np.array( [hash_to_ind[token_hash] for token_hash in valid_hashes]) valid_hash_inds.sort() # retrieve matrix cos_sim_matrix = matrix[valid_hash_inds[:, None], doc_ids] logger.debug(f'retrieve up matrix') # cos_sim_matrix = matrix[:,doc_ids][valid_hash_inds,:] # retrieve idfs logger.debug(f'Calculate the rest') num_docs = matrix.shape[1] Ns = np.array([freq_table[token_hash] for token_hash in valid_hashes ]) #doc frequencies array same order as idfs = np.log((num_docs - Ns + 0.5) / (Ns + 0.5)) idfs[idfs < 0] = 0 assert (cos_sim_matrix.shape[0] == idfs.size) scores = (np.dot(idfs, cos_sim_matrix)) #find correct axis, sum rows topn_scores = scores.argsort()[-self.topn:] # logger.info(f'Done') return np.sum(scores[topn_scores])
def new_block(self, proof, pre_hash=None): block = { 'id': len(self.chain) + 1, 'timestamp': time(), 'transactions': self.transactions, 'proof': proof, 'pre_hash': pre_hash or utils.hash(self.chain[-1]) } self.transations = [] self.chain.append(block) return block
def isBlacklist(self,image, hash=None): if hash is None: hash = utils.hash(image) # should be replaced with database query... obj = self.getSession().query(Blacklist.hash).filter(Blacklist.hash == hash).first() if obj is not None: with open(AppFolders.missingPath("blacklist.png"), 'rb') as fd: image_data = fd.read() return image_data else: return image
def shuffle(values: List[Any], seed: Hash32) -> List[Any]: """ Returns the shuffled ``values`` with ``seed`` as entropy. """ values_count = len(values) # Entropy is consumed from the seed in 3-byte (24 bit) chunks. rand_bytes = 3 # The highest possible result of the RNG. rand_max = 2**(rand_bytes * 8) - 1 # The range of the RNG places an upper-bound on the size of the list that # may be shuffled. It is a logic error to supply an oversized list. assert values_count < rand_max output = [x for x in values] source = seed index = 0 while index < values_count - 1: # Re-hash the `source` to obtain a new pattern of bytes. source = hash(source) # Iterate through the `source` bytes in 3-byte chunks. for position in range(0, 32 - (32 % rand_bytes), rand_bytes): # Determine the number of indices remaining in `values` and exit # once the last index is reached. remaining = values_count - index if remaining == 1: break # Read 3-bytes of `source` as a 24-bit big-endian integer. sample_from_source = int.from_bytes( source[position:position + rand_bytes], 'big') # Sample values greater than or equal to `sample_max` will cause # modulo bias when mapped into the `remaining` range. sample_max = rand_max - rand_max % remaining # Perform a swap if the consumed entropy will not cause modulo bias. if sample_from_source < sample_max: # Select a replacement index for the current index. replacement_position = (sample_from_source % remaining) + index # Swap the current index with the replacement index. output[index], output[replacement_position] = output[ replacement_position], output[index] index += 1 else: # The sample causes modulo bias. A new sample should be read. pass return output
def create_block(self, index, transactions, prevoius_hash): merkle_root = self.merkle_root(transactions) nounce = self.mining(index, transactions, prevoius_hash, merkle_root) string = str(index) + str(transactions) + str(nounce) + str( prevoius_hash) + str(merkle_root) current_hash = hash(string) block = {} block['index'] = index block['transactions'] = transactions block['nounce'] = nounce block['merkle_root'] = merkle_root block['prevoius_hash'] = prevoius_hash block['current_hash'] = current_hash return block
def isBlacklist(self, image, hash=None): if hash is None: hash = utils.hash(image) # should be replaced with database query... obj = self.getSession().query( Blacklist.hash).filter(Blacklist.hash == hash).first() if obj is not None: with open(AppFolders.missingPath("blacklist.png"), 'rb') as fd: image_data = fd.read() return image_data else: return image
def createBlock(self, pow, prevHash=None): block = { 'id': len(self.chain) + 1, 'timestamp': time(), 'transactions': self.transactions, 'pow': pow, 'prevHash': prevHash or hash(self.chain[-1]) } # We've created transactions for this block. Clear them so we can start making a new list of transactions for the next block self.transactions = [] self.chain.append(block) return block
def create_comment(self, content, user, ip, commit=False): ref_name = get_remote_side(self, 'comments') cls = get_remote_side_class(self, 'comments') # verify that the user has not created any comment for this article within the last 30s # TODO: cache this shit last_comment = cls.query.filter_by(ip=hash(ip)).order_by( cls.date_created.desc()).first() if last_comment: time_diff = now_ms() - last_comment.date_created limit = self.comment_limit if time_diff < limit: raise ModelException( type='VALIDATION_FAILED', message= _(u'Please wait %(time)s seconds before sending new comment', time=int(round((limit - time_diff) / 1000)))) comment = cls() setattr(comment, ref_name, self.id) comment.user_id = user.id comment.content = content comment.ip = hash(ip) # also update the comment count cache.update_user_comment_count(self, user.id) db_session.add(comment) if commit: db_session.commit() return comment
def record(self, user, ip, data, reason=None, commit=False): ref_name = get_remote_side(self, 'edits') cls = get_remote_side_class(self, 'edits') edit = cls() setattr(edit, ref_name, self.id) edit.user_id = user.id edit.ip = hash(ip) edit.data = data edit.reason = reason db_session.add(edit) if commit: db_session.commit()
def vote(self, user, ip, up=True, commit=False): ref_name = get_remote_side(self, 'votes') cls = get_remote_side_class(self, 'votes') data = { "user_id": user.id, ref_name: self.id, "up": up, "ip": hash(ip), "commit": commit } vote = cls.create_or_update(**data) self._update_vote_value_for_redis(user.id, vote) return vote
def authkey_check(self, ident, rhash): akrow = self.srv.get_authkey(ident) if not akrow: self.error("Authentication failed.", ident=ident) raise BadClient() akhash = utils.hash(self.authrand, akrow["secret"]) if not akhash == rhash: self.error("Authentication failed.", ident=ident) raise BadClient() self.ak = ident self.uid = akrow["owner"] self.pubchans = akrow.get("pubchans", []) self.subchans = akrow.get("subchans", [])
def store_file(self, file_path): name, extension = os.path.splitext(file_path) return self.api.store_file(file_path, hash(name) + extension)
def __call__(self, value): if self.key: return (hmac.new(self.key, value, sha512).hexdigest(), None) else: return (hash(value, self.digest_alg), None)
def __init__(self, name, email, password): self.name = name self.email = email self.password = utils.hash(password)