def push_users_fp_to_machine(sn, items): """ @param sn: @param items: [[pin, name, fid, tpl]..] """ sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() cmds = list() for item in items: create = r"DATA DEL_USER PIN=%i\r\nDATA USER PIN=%i\tName=%s\r\n" % (item[0], item[0], item[1]) update = r"DATA FP PIN=%i\tFID=%i\tTMP=%s\r\n" % (item[0], item[2], item[3]) vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = create vwc.create_date = Utils.current_datetime() vwc1 = ValidWorkCommands() vwc1.sn = sn vwc1.validworkmachine_id = id_ vwc1.cmd = update vwc1.create_date = Utils.current_datetime() cmds += [vwc, vwc1] sf.add_all(cmds) sf.commit()
def list(self, id_): name = DataViewModule.__view_mapping__.get(id_) if not name: self.set_status(403, "Error!") self.set_header("Content-Type", "text/json;charset=utf-8") display_start = Utils.parse_int(self.get_argument("iDisplayStart")) display_length = Utils.parse_int(self.get_argument("iDisplayLength")) total = 0 ds = list() custom_data_provider = ObjectPool.dataview_provider.get(name) if custom_data_provider: default_search_value = self.get_argument("sSearch") obj = custom_data_provider() if hasattr(obj, "count"): total = obj.count(default_search_value, self) if hasattr(obj, "list"): ds = obj.list(default_search_value, display_start, display_length, self) results = dict() results["sEcho"] = self.get_argument("sEcho") results["iTotalRecords"] = total results["iTotalDisplayRecords"] = total results["aaData"] = ds self.write(json.dumps(results, cls=JsonEncoder))
def post(self, *args, **kwargs): #记录机器访问数据 params_ = self.wrap_params_to_dict() sn = params_.get("SN") sf = SessionFactory.new() machine = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() table = params_.get("table") if table == "OPERLOG": op_stamp = params_.get("OpStamp") if op_stamp and machine: machine.opstamp = Utils.parse_int(op_stamp) sf.commit() self.write("OK") elif table == "ATTLOG": stamp = params_.get("Stamp") if stamp and machine: stamp = Utils.parse_int(stamp) machine.stamp = stamp sf.commit() if stamp and stamp > 0: records_text = self.request.body if records_text: records_text = records_text.decode("utf-8") records = list() for line in records_text.split("\n"): items = line.split("\t") if len(items) >= 2: records.append((items[0], items[1])) for r in records: archives_id = r[0] touch_time = datetime.strptime(r[1], "%Y-%m-%d %H:%M:%S") self.detect_chkon_status(archives_id, touch_time) self.write("OK")
def finger_tpl_save(self): index = Utils.parse_int(self.param("index")) tpl = self.param("tpl") archives_id = Utils.parse_int(self.param("archives_id")) sf = SessionFactory.new() obj = sf.query(ValidWorkFingerTemplate) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id) \ .filter(ValidWorkFingerTemplate.finger_index == index).limit(1).scalar() machines_sn = sf.query(ValidWorkMachine.sn).all() machines_sn = [sn[0] for sn in machines_sn] name = sf.query(Archives.name).filter(Archives.id == archives_id).limit(1).scalar() for sn in machines_sn: ValidWorkHelper.push_user_fp_to_machine(sn, archives_id, name, index, tpl) if obj: obj.tpl = tpl sf.commit() return ["success"] else: if archives_id: sf = SessionFactory.new() obj = ValidWorkFingerTemplate() obj.card_no = "" obj.finger_index = index obj.tpl = tpl obj.archives_id = archives_id sf.add(obj) sf.commit() return ["success"] return ["failure"]
def post(self, *args, **kwargs): """ do login action :param args: :param kwargs: """ login_id = self.get_argument("login_id") login_pwd = self.get_argument("login_pwd") if not login_id or not login_pwd: self.redirect(self.get_login_url()) cnn = SessionFactory.new() if Utils.is_email(Utils.trim(login_id)): rows = cnn.query(Account.id, Archives.name).outerjoin(Archives, Account.archives_id == Archives.id) \ .filter(Archives.email == login_id).filter(Account.login_pwd == Utils.md5(login_pwd)) \ .filter(Account.enabled == 1).limit(1).all() if len(rows) > 0: id_ = rows[0][0] name = rows[0][1] self.set_secure_cookie(IRequest.__key_account_id__, "%i" % id_) self.set_secure_cookie(IRequest.__key_account_name__, name) Login.update_last_login_datetime(id_) else: rows = cnn.query(Account.id, Archives.name).outerjoin(Archives, Account.archives_id == Archives.id) \ .filter(Account.login_name == login_id).filter(Account.login_pwd == Utils.md5(login_pwd)) \ .filter(Account.enabled == 1).limit(1).all() if len(rows) > 0: id_ = rows[0][0] name = rows[0][1] self.set_secure_cookie(IRequest.__key_account_id__, "%i" % id_) self.set_secure_cookie(IRequest.__key_account_name__, name) Login.update_last_login_datetime(id_) self.redirect("/workbench/dashboard")
def delete(self, id_): self.set_header("Content-Type", "text/json;charset=utf-8") meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) custom_filter_obj = None if custom_filter: custom_filter_obj = custom_filter() valid_msg = "" message = dict() message["flag"] = "delete" rec_id = self.get_argument("id") sf = SessionFactory.new() cur_row = sf.query(entity).get(rec_id) if hasattr(custom_filter_obj, "before_delete"): valid_msg = custom_filter_obj.before_delete(cur_row, sf, self) if not valid_msg: sf.delete(cur_row) sf.commit() if hasattr(custom_filter_obj, "after_delete"): custom_filter_obj.after_delete(cur_row, sf, self) message["success"] = True message["msg"] = "Deleted" self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message))
def list(self, kw, start, limit, http_req): sf = SessionFactory.new() subq = sf.query(Archives.id, Archives.name).subquery() q = sf.query(ValidWorkAskForLeave, Archives.name, subq.c.name.label("creator"))\ .join(Archives, ValidWorkAskForLeave.archives_id == Archives.id)\ .outerjoin(subq, ValidWorkAskForLeave.creator == subq.c.id) if kw: q = q.filter(Archives.name.contains(kw)) dataset = q.order_by(ValidWorkAskForLeave.id.desc()).offset(start).limit(limit).all() items = list() for row in dataset: ot = row[0] name = row[1] creator = row[2] item = dict() item["id"] = ot.id item["kind"] = ot.kind item["ask_date"] = Utils.format_date(ot.ask_date) item["start_datetime"] = Utils.format_datetime_short(ot.start_datetime) item["end_datetime"] = Utils.format_datetime_short(ot.end_datetime) item["create_datetime"] = Utils.format_datetime_short(ot.create_datetime) item["name"] = name item["creator"] = creator items.append(item) return items
def get(self, *args, **kwargs): self.set_header("Content-Type", "text/plain;charset=utf-8") params_ = self.wrap_params_to_dict() sn = params_.get("SN") ip = self.request.remote_ip #记录当前机器信息 sf = SessionFactory.new() obj = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() if not obj: m = ValidWorkMachine() m.sn = sn m.ip = ip m.last_connect_time = Utils.current_datetime() sf.add(m) sf.commit() else: obj.ip = ip obj.last_connect_time = Utils.current_datetime() sf.commit() #取出一条命令下发给机器执行 cmd = sf.query(ValidWorkCommands.id, ValidWorkCommands.cmd) \ .filter(ValidWorkCommands.sn == sn) \ .order_by(asc(ValidWorkCommands.id)).limit(1).scalar() if cmd: self.write("C:%s:%s" % (cmd[0], cmd[1])) pass
def list(self, id_): meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) self.datatable_display_cols = meta["cols"] self.set_header("Content-Type", "text/json;charset=utf-8") display_start = Utils.parse_int(self.get_argument("iDisplayStart")) display_length = Utils.parse_int(self.get_argument("iDisplayLength")) # cols_num = self.get_argument("iColumns") # 全局搜索处理段落 default_search_value = self.get_argument("sSearch") default_search_fields = DataTableModule.__default_search_fields__.get(id_) default_search_sqlwhere = "" default_search_sqlwhere_params = dict() if default_search_value and default_search_fields: temp_sql = list() for field_name in default_search_fields: temp_sql.append("%s like :%s" % (field_name, field_name)) default_search_sqlwhere_params[field_name] = "%" + default_search_value + "%" default_search_sqlwhere = " OR ".join(temp_sql) # 排序处理段落 sort_params = self.parse_sort_params() order_sqlwhere = "" for k, v in sort_params.items(): order_sqlwhere += "1=1 ORDER BY %s %s" % (k, v) break # DataGrid数据查询段落 cnn = SessionFactory.new() # here place custom filter total_query = cnn.query(func.count(1)).select_from(entity) ds_query = cnn.query(entity) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) if custom_filter: custom_filter_obj = custom_filter() if hasattr(custom_filter_obj, "total"): total_query = custom_filter_obj.total(total_query, self) if hasattr(custom_filter_obj, "dataset"): ds_query = custom_filter_obj.dataset(ds_query, self) if default_search_value: total_query = total_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) ds_query = ds_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) if order_sqlwhere: ds_query = ds_query.filter(order_sqlwhere) total = total_query.scalar() ds = ds_query.offset(display_start).limit(display_length) results = dict() results["sEcho"] = self.get_argument("sEcho") results["iTotalRecords"] = total results["iTotalDisplayRecords"] = total results["aaData"] = [item.dict() for item in ds] self.write(json.dumps(results, cls=JsonEncoder))
def list_timeblocks(): sf = SessionFactory.new() rows = sf.query(ValidWorkTimeBlock.id, ValidWorkTimeBlock.name, ValidWorkTimeBlock.start_time, ValidWorkTimeBlock.end_time).all() items = list() for row in rows: items.append((row[0], row[1], Utils.format_time(row[2]), Utils.format_time(row[3]))) return items
def before_add(self, entity_obj, sf, req): num = sf.query(func.count(ValidWorkTimeBlock.id)).filter(ValidWorkTimeBlock.name == entity_obj.name).scalar() if num > 0: return "名称已经存在!" num = sf.query(func.count(ValidWorkTimeBlock.id)) \ .filter(cast(ValidWorkTimeBlock.start_time, Time) == Utils.format_time(entity_obj.start_time)) \ .filter(cast(ValidWorkTimeBlock.end_time, Time) == Utils.format_time(entity_obj.end_time)).scalar() if num > 0: return "班次/时间段已经存在!" return ""
def get(self, id_): cls = ObjectPool.autocomplete_keys.get(id_) self.set_header("Content-Type", "text/json;charset=utf-8") if cls: obj = cls() if hasattr(obj, "text"): text = obj.text(self.get_argument("id"), self) self.write(Utils.encode([text])) else: self.write(Utils.encode([""]))
def post(self, id_): cls = ObjectPool.autocomplete_keys.get(id_) self.set_header("Content-Type", "text/json;charset=utf-8") if cls: obj = cls() if hasattr(obj, "data"): search_word = self.get_argument("search_word") data = obj.data(search_word, self) self.write(Utils.encode(data)) else: self.write(Utils.encode(list()))
def task_assign(self): archives_id = Utils.parse_int(self.param("id")) task_id = Utils.parse_int(self.param("st_id")) if archives_id and task_id: sf = SessionFactory.new() usr = sf.query(Archives).get(archives_id) if usr: usr.validworkscheduletasks = [] task = sf.query(ValidWorkScheduleTask).get(task_id) usr.validworkscheduletasks.append(task) sf.commit() return ["success"] return ["failure"]
def count(self, search_text, http_req): current_date = None if not current_date: current_date = Utils.format_date(Utils.current_datetime()) sf = SessionFactory.new() subq = sf.query(Term.name.label("term_name"), TermTaxonomy.id).filter(TermTaxonomy.term_id == Term.id).subquery() q = sf.query(func.count(ValidWorkCheckOn.id))\ .join(Archives, ValidWorkCheckOn.archives_id == Archives.id)\ .outerjoin(subq, subq.c.id == Archives.org_id) if search_text: q = q.filter(Archives.name.contains(search_text)) q = q.filter(cast(ValidWorkCheckOn.valid_start_time, Date) == current_date) return q.scalar()
def pack_client(self): user_id = self.get_current_user() root_path = self.get_webroot_path() download_path = root_path + "/download/validwork/" Utils.mkdirs(download_path) client_zip_name = download_path + Utils.md5("%i_client" % user_id) + ".zip" key_file_name = download_path + Utils.md5("%i_keyfile" % user_id) ip_file_name = download_path + "ip.txt" exe_file_name = download_path + "FingerTemplateHelper.exe" libcurl = download_path + "libcurl.dll" zlib1 = download_path + "zlib1.dll" if os.path.exists(key_file_name): os.remove(key_file_name) key = self.kengen() Utils.text_write(key_file_name, [key], "") if not os.path.exists(ip_file_name): Utils.text_write(ip_file_name, [self.request.host], "") if os.path.exists(client_zip_name): os.remove(client_zip_name) f = ZipFile(client_zip_name, "w") self.compress(f, ip_file_name, "ip.txt") self.compress(f, key_file_name, "temp.key") self.compress(f, exe_file_name, "指纹采集助手.exe") self.compress(f, libcurl, "libcurl.dll") self.compress(f, zlib1, "zlib1.dll") f.close() return client_zip_name
def update(self, id_): message = dict() self.set_header("Content-Type", "text/json;charset=utf-8") meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) custom_filter_obj = None if custom_filter: custom_filter_obj = custom_filter() rec_id = self.get_argument("id") valid_msg = "" if not rec_id: message["flag"] = "add" sf = SessionFactory.new() obj = self.wrap_entity(entity()) if hasattr(custom_filter_obj, "before_add"): valid_msg = custom_filter_obj.before_add(obj, sf, self) # 检查没有数据上的问题才执行保存动作 if not valid_msg: sf.add(obj) sf.commit() if hasattr(custom_filter_obj, "after_add"): custom_filter_obj.after_add(obj, sf, self) message["success"] = True message["msg"] = obj.id self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message)) else: message["flag"] = "update" sf = SessionFactory.new() cur_row = sf.query(entity).get(rec_id) self.wrap_entity(cur_row) if hasattr(custom_filter_obj, "before_modify"): valid_msg = custom_filter_obj.before_modify(cur_row, sf, self) if not valid_msg: sf.commit() if hasattr(custom_filter_obj, "after_modify"): custom_filter_obj.after_modify(cur_row, sf, self) message["success"] = True message["msg"] = "Updated" self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message))
def post(self, key): data_type = self.get_argument("__data_type__") if data_type == "json": self.set_header("Content-Type", "text/json;charset=utf-8") elif data_type == "script": self.set_header("Content-Type", "text/javascript;charset=utf-8") elif data_type == "html": self.set_header("Content-Type", "text/html;charset=utf-8") if not key: self.write("alert('Ajax key require.')") else: cls = ObjectPool.ajax.get(key) if not cls: self.write("Class not found.") else: method_name = self.get_argument("__method_name__") obj = cls() if hasattr(obj, method_name): setattr(obj, "request", self) setattr(obj, "__params__", self.wrap_params_to_dict()) setattr(obj, "param", lambda key_: obj.__params__.get(key_)) func = getattr(obj, method_name) result = func() if data_type == "json": self.write(Utils.encode(result)) else: self.write(result)
def list_fingers(self): sf = SessionFactory.new() archives_id = Utils.parse_int(self.param("archives_id")) items = sf.query(ValidWorkFingerTemplate.finger_index) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id).all() items = [item[0] for item in items] return items
def list(self, keywords, start, limit, http_req): org_id = Utils.parse_int(http_req.get_argument("org_id")) sf = SessionFactory.new() q = sf.query(Archives.id, Archives.name, Archives.sex, Archives.org_id, ValidWorkScheduleTask.name, ValidWorkScheduleTask.id) \ .outerjoin(ValidWorkScheduleTask, Archives.validworkscheduletasks) q = q.filter(Archives.name != "超级管理员") if keywords: q = q.filter(Archives.name.like("%" + keywords + "%")) if org_id: q = q.filter(Archives.org_id == org_id) rows = q.order_by(Archives.name).offset(start).limit(limit).all() items = list() for row in rows: item = dict() item["id"] = row[0] item["name"] = row[1] item["sex"] = row[2] item["org_id"] = row[3] item["st_name"] = row[4] if not row[4]: item["st_name"] = "" item["st_id"] = row[5] if not row[5]: item["st_id"] = 0 items.append(item) return items
def save(self): account_id = Utils.parse_int(self.param("id")) role_id = Utils.parse_int(self.param("role_id")) state = Utils.parse_int(self.param("state")) cnn = SessionFactory.new() account = cnn.query(Account).get(account_id) if account: role = cnn.query(Role).get(role_id) if role: if state == 0: account.roles.append(role) else: account.roles.remove(role) cnn.commit() return ["success"] return ["failure"]
def update(self, id_): message = dict() message["success"] = False message["msg"] = "Miss DataProvider!" self.set_header("Content-Type", "text/json;charset=utf-8") name = DataViewModule.__view_mapping__.get(id_) if not name: self.set_status(403, "Error!") custom_data_provider = ObjectPool.dataview_provider.get(name) obj = None if custom_data_provider: obj = custom_data_provider() rec_id = self.get_argument("id") if not rec_id: message["flag"] = "add" if hasattr(obj, "add"): msg = obj.add(self) if type(msg) == int: message["success"] = True message["msg"] = msg else: message["success"] = False message["msg"] = msg else: message["flag"] = "update" if hasattr(obj, "modify"): msg = obj.modify(rec_id, self) if msg: message["success"] = False message["msg"] = msg else: message["success"] = True message["msg"] = "Updated" self.write(Utils.encode(message))
def get(self, *args, **kwargs): insp = reflection.Inspector.from_engine(SessionFactory.__engine__) context_data = dict() context_data["entity_map"] = Utils.encode(SessionFactory.entitys) table_cols = dict() table_cols_text = dict() for table_name in SessionFactory.entitys: cols_def = insp.get_columns(table_name) cols = list() for col_def in cols_def: cols.append(col_def["name"]) table_cols[table_name] = cols table_cols_text[table_name] = Utils.encode(cols) context_data["table_cols"] = table_cols context_data["table_cols_text"] = table_cols_text return self.render("workbench/dev.widgets.html", context=context_data)
def render(self, id_, target, type_="table"): """ :param id_: dom id :param target: DataTable's entity property Or DataView's view property value :param type_: 'table' or 'view' :return: """ key = Utils.md5(target) point = None if type_ == "table": point = DataTableModule.__security_points__.get(key) elif type_ == "view": point = DataViewModule.__security_points__.get(key) html = list() if AccountHelper.auth(self.current_user, {point.add, point.update, point.delete}): html.append('<div class="form-group"><div class="col-lg-9 col-lg-offset-3">') html.append( '<input type="button" class="btn btn-primary btn-sm" id="%s_form_save" onclick="%s_.form.save(this,%s);" value="保存"></button>' % (id_, id_, "''") ) html.append( ' <input type="button" class="btn btn-white btn-sm" id="%s_form_save_continue" onclick="%s_.form.save(this,%s);" value="保存并继续"></button>' % (id_, id_, "'clear'") ) html.append( ' <input type="button" class="btn btn-white btn-sm" id="%s_form_reset" onclick="%s_.form.reset(this);" value="重填"></button>' % (id_, id_) ) html.append("</div></div>") html.append("</form>") return "".join(html)
def req(self, key, method_name): """ Url: localhost/api/key/method :param key: example: com.tinyms.category.v2 :return: example: @api("com.tinyms.category.v2") class ApiTest(): def create(): prama1 = self.param("prama1"); req = self.request # IRequest client side: $.post("/api/com.tinyms.category.v2/create",params,func,"json"); """ self.set_header("Content-Type", "text/json;charset=utf-8") if not key: self.write("Key require.") else: cls = ObjectPool.api.get(key) if not cls: self.write("Object not found.") else: obj = cls() if hasattr(obj, method_name): setattr(obj, "request", self) setattr(obj, "body", self.request.body) setattr(obj, "files", self.request.files) setattr(obj, "__params__", self.wrap_params_to_dict()) setattr(obj, "param", lambda key: obj.__params__.get(key)) func = getattr(obj, method_name) result = func() self.write(Utils.encode(result))
def dict(self, dict_=None): """ 1, object to map 2, map to object :param dict_: :return: """ if not dict_: columns = [c.key for c in class_mapper(self.__class__).columns] return dict((c, getattr(self, c)) for c in columns) else: metas = self.cols_meta() for k, v in dict_.items(): print(k, v) if not hasattr(self, k): continue for m in metas: if m["name"] == k: if m["type"] == "int": if type(v) == str: setattr(self, k, Utils.parse_int(v)) else: setattr(self, k, v) elif m["type"] == "numeric": if type(v) == str: setattr(self, k, Utils.parse_float(v)) else: setattr(self, k, v) elif m["type"] == "datetime": if type(v) == str: setattr(self, k, Utils.parse_datetime(v)) else: setattr(self, k, v) elif m["type"] == "date": if type(v) == str: setattr(self, k, Utils.parse_date(v)) else: setattr(self, k, v) elif m["type"] == "time": if type(v) == str: setattr(self, k, Utils.parse_time(v)) else: setattr(self, k, v) else: setattr(self, k, v) pass pass
def push_user_fp_to_machine(sn, pin, name, fid, tpl): create = r"DATA DEL_USER PIN=%i\r\nDATA USER PIN=%i\tName=%s\r\n" % (pin, pin, name) update = r"DATA FP PIN=%i\tFID=%i\tTMP=%s\r\n" % (pin, fid, tpl) sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = create vwc.create_date = Utils.current_datetime() vwc1 = ValidWorkCommands() vwc1.sn = sn vwc1.validworkmachine_id = id_ vwc1.cmd = update vwc1.create_date = Utils.current_datetime() sf.add_all([vwc, vwc1]) sf.commit()
def list(self): account_id = Utils.parse_int(self.param("id")) cnn = SessionFactory.new() ds = cnn.query(Role.id).join(Account, Role.accounts).filter(Account.id == account_id).all() roles = list() for row in ds: roles.append(row[0]) return roles
def post(self, *args, **kwargs): self.set_header("Content-Type", "text/json;charset=utf-8") #build path upload_path = "/upload/%s/%s" % (Utils.format_year_month(Utils.current_datetime()), Utils.current_datetime().date().day) path = self.get_webroot_path() + upload_path Utils.mkdirs(path) files = self.request.files items = list() for key in files: item = dict() meta = files[key] file_name = meta["filename"] ext_name = os.path.splitext(file_name) name_ = Utils.uniq_index() if not ext_name: new_file_name = name_ else: new_file_name = "%s.%s" % (name_, ext_name) save_path = "%s/%s" % (path, new_file_name) with open(save_path, "wb") as uploader: uploader.write(meta["body"]) item["local_name"] = file_name item["ext_name"] = ext_name item["upload_path"] = "%s/%s" % (upload_path, new_file_name) item["archives_id"] = self.get_current_user() items.append(item) self.write(Utils.encode(["OK"]))
def exists_other(self, id, name_): cnn = SessionFactory.new() num = cnn.query(func.count(TermTaxonomy.id)).filter(TermTaxonomy.id != id) \ .filter(TermTaxonomy.term.has(name=Utils.trim(name_))) \ .filter(TermTaxonomy.taxonomy == self.taxonomy) \ .limit(1).scalar() if num > 0: return True return False