def finger_tpl_save(self): index = Utils.parse_int(self.param("index")) tpl = self.param("tpl") archives_id = Utils.parse_int(self.param("archives_id")) sf = SessionFactory.new() obj = sf.query(ValidWorkFingerTemplate) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id) \ .filter(ValidWorkFingerTemplate.finger_index == index).limit(1).scalar() machines_sn = sf.query(ValidWorkMachine.sn).all() machines_sn = [sn[0] for sn in machines_sn] name = sf.query(Archives.name).filter(Archives.id == archives_id).limit(1).scalar() for sn in machines_sn: ValidWorkHelper.push_user_fp_to_machine(sn, archives_id, name, index, tpl) if obj: obj.tpl = tpl sf.commit() return ["success"] else: if archives_id: sf = SessionFactory.new() obj = ValidWorkFingerTemplate() obj.card_no = "" obj.finger_index = index obj.tpl = tpl obj.archives_id = archives_id sf.add(obj) sf.commit() return ["success"] return ["failure"]
def update(self, id_): message = dict() self.set_header("Content-Type", "text/json;charset=utf-8") meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) custom_filter_obj = None if custom_filter: custom_filter_obj = custom_filter() rec_id = self.get_argument("id") valid_msg = "" if not rec_id: message["flag"] = "add" sf = SessionFactory.new() obj = self.wrap_entity(entity()) if hasattr(custom_filter_obj, "before_add"): valid_msg = custom_filter_obj.before_add(obj, sf, self) # 检查没有数据上的问题才执行保存动作 if not valid_msg: sf.add(obj) sf.commit() if hasattr(custom_filter_obj, "after_add"): custom_filter_obj.after_add(obj, sf, self) message["success"] = True message["msg"] = obj.id self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message)) else: message["flag"] = "update" sf = SessionFactory.new() cur_row = sf.query(entity).get(rec_id) self.wrap_entity(cur_row) if hasattr(custom_filter_obj, "before_modify"): valid_msg = custom_filter_obj.before_modify(cur_row, sf, self) if not valid_msg: sf.commit() if hasattr(custom_filter_obj, "after_modify"): custom_filter_obj.after_modify(cur_row, sf, self) message["success"] = True message["msg"] = "Updated" self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message))
def count(self, kw, http_req): sf = SessionFactory.new() q = sf.query(func.count(ValidWorkAskForLeave.id))\ .join(Archives, ValidWorkAskForLeave.archives_id == Archives.id) if kw: q = q.filter(Archives.name.contains(kw)) return q.limit(1).scalar()
def list(self, kw, start, limit, http_req): sf = SessionFactory.new() subq = sf.query(Archives.id, Archives.name).subquery() q = sf.query(ValidWorkAskForLeave, Archives.name, subq.c.name.label("creator"))\ .join(Archives, ValidWorkAskForLeave.archives_id == Archives.id)\ .outerjoin(subq, ValidWorkAskForLeave.creator == subq.c.id) if kw: q = q.filter(Archives.name.contains(kw)) dataset = q.order_by(ValidWorkAskForLeave.id.desc()).offset(start).limit(limit).all() items = list() for row in dataset: ot = row[0] name = row[1] creator = row[2] item = dict() item["id"] = ot.id item["kind"] = ot.kind item["ask_date"] = Utils.format_date(ot.ask_date) item["start_datetime"] = Utils.format_datetime_short(ot.start_datetime) item["end_datetime"] = Utils.format_datetime_short(ot.end_datetime) item["create_datetime"] = Utils.format_datetime_short(ot.create_datetime) item["name"] = name item["creator"] = creator items.append(item) return items
def post(self, *args, **kwargs): #记录机器访问数据 params_ = self.wrap_params_to_dict() sn = params_.get("SN") sf = SessionFactory.new() machine = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() table = params_.get("table") if table == "OPERLOG": op_stamp = params_.get("OpStamp") if op_stamp and machine: machine.opstamp = Utils.parse_int(op_stamp) sf.commit() self.write("OK") elif table == "ATTLOG": stamp = params_.get("Stamp") if stamp and machine: stamp = Utils.parse_int(stamp) machine.stamp = stamp sf.commit() if stamp and stamp > 0: records_text = self.request.body if records_text: records_text = records_text.decode("utf-8") records = list() for line in records_text.split("\n"): items = line.split("\t") if len(items) >= 2: records.append((items[0], items[1])) for r in records: archives_id = r[0] touch_time = datetime.strptime(r[1], "%Y-%m-%d %H:%M:%S") self.detect_chkon_status(archives_id, touch_time) self.write("OK")
def push_users_fp_to_machine(sn, items): """ @param sn: @param items: [[pin, name, fid, tpl]..] """ sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() cmds = list() for item in items: create = r"DATA DEL_USER PIN=%i\r\nDATA USER PIN=%i\tName=%s\r\n" % (item[0], item[0], item[1]) update = r"DATA FP PIN=%i\tFID=%i\tTMP=%s\r\n" % (item[0], item[2], item[3]) vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = create vwc.create_date = Utils.current_datetime() vwc1 = ValidWorkCommands() vwc1.sn = sn vwc1.validworkmachine_id = id_ vwc1.cmd = update vwc1.create_date = Utils.current_datetime() cmds += [vwc, vwc1] sf.add_all(cmds) sf.commit()
def get(self, *args, **kwargs): sn = self.get_argument("SN") opts = self.get_argument("options") self.set_header("Content-Type", "text/plain;charset=utf-8") if opts == "all": self.write("GET OPTION FROM:" + sn + "\n") self.write("ErrorDelay=60\n") self.write("Delay=15\n") self.write("TransInterval=1\n") self.write("TransFlag=1111000000\n") self.write("Realtime=1\n") self.write("Encrypt=0\n") self.write("TransTimes=00:00;14:05\n") sf = SessionFactory.new() machine = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() if machine: if machine.stamp: self.write("Stamp=" + str(machine.stamp) + "\n") if machine.opstamp: self.write("OpStamp=" + str(machine.opstamp) + "\n") if machine.photo_stamp: self.write("PhotoStamp=" + str(machine.photo_stamp) + "\n") else: #初始化一个起始交互日期 a = "100000000" self.write("Stamp=" + a + "\n") self.write("OpStamp=" + a + "\n") self.write("PhotoStamp=" + a + "\n")
def render(self, **prop): self.dom_id = prop.get("id") self.cols = prop.get("cols") self.sort_sql = prop.get("sort_sql") self.entity_full_name = prop.get("entity") self.query_class = prop.get("query_class") # obj prop `data` func return [(k,v),(k,v)...] self.allow_blank = prop.get("allow_blank") html = list() html.append("<select id='%s' name='%s' class='form-control'>" % (self.dom_id, self.dom_id)) if self.allow_blank: html.append("<option value=''> </option>") if not self.query_class: if not self.entity_full_name: return "<small>Require entity full name.</small>" if self.entity_full_name: cls = import_object(self.entity_full_name) cnn = SessionFactory.new() q = cnn.query(cls) if self.sort_sql: q = q.order_by(self.sort_sql) items = q.all() all = list() for item in items: all.append([(getattr(item, col)) for col in self.cols.split(",")]) for opt in all: html.append("<option value='%s'>%s</option>" % (opt[0], opt[1])) else: obj = import_object(self.query_class)() if hasattr(obj, "data"): items = getattr(obj, "data")() for item in items: html.append("<option value='%s'>%s</option>" % (item[0], item[1])) html.append("</select>") return "".join(html)
def count(self, keywords, http_req): sf = SessionFactory.new() q = sf.query(func.count(Archives.id)).outerjoin(ValidWorkScheduleTask, Archives.validworkscheduletasks) q = q.filter(Archives.name != "超级管理员") if keywords: q = q.filter(Archives.name.like("%" + keywords + "%")) return q.scalar()
def delete(self, id_, http_req): sf = SessionFactory.new() num = sf.query(ValidWorkAskForLeave).filter(ValidWorkAskForLeave.id == id_).delete(synchronize_session='fetch') sf.commit() if num > 0: return "" return "failure"
def is_holiday(current_date_str): sf = SessionFactory.new() num = sf.query(func.count(Holiday.id))\ .filter(Holiday.start_date <= current_date_str).filter(Holiday.end_date >= current_date_str).scalar() if num > 0: return True return False
def delete(self, id_): self.set_header("Content-Type", "text/json;charset=utf-8") meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) custom_filter_obj = None if custom_filter: custom_filter_obj = custom_filter() valid_msg = "" message = dict() message["flag"] = "delete" rec_id = self.get_argument("id") sf = SessionFactory.new() cur_row = sf.query(entity).get(rec_id) if hasattr(custom_filter_obj, "before_delete"): valid_msg = custom_filter_obj.before_delete(cur_row, sf, self) if not valid_msg: sf.delete(cur_row) sf.commit() if hasattr(custom_filter_obj, "after_delete"): custom_filter_obj.after_delete(cur_row, sf, self) message["success"] = True message["msg"] = "Deleted" self.write(Utils.encode(message)) else: message["success"] = False message["msg"] = valid_msg self.write(Utils.encode(message))
def value(self): sf = SessionFactory.new() val = sf.query(ValidWorkFingerTemplateKey.tpl) \ .filter(ValidWorkFingerTemplateKey.account_id == self.request.current_user).limit(1).scalar() if val: return [val] return [""]
def post(self, *args, **kwargs): """ do login action :param args: :param kwargs: """ login_id = self.get_argument("login_id") login_pwd = self.get_argument("login_pwd") if not login_id or not login_pwd: self.redirect(self.get_login_url()) cnn = SessionFactory.new() if Utils.is_email(Utils.trim(login_id)): rows = cnn.query(Account.id, Archives.name).outerjoin(Archives, Account.archives_id == Archives.id) \ .filter(Archives.email == login_id).filter(Account.login_pwd == Utils.md5(login_pwd)) \ .filter(Account.enabled == 1).limit(1).all() if len(rows) > 0: id_ = rows[0][0] name = rows[0][1] self.set_secure_cookie(IRequest.__key_account_id__, "%i" % id_) self.set_secure_cookie(IRequest.__key_account_name__, name) Login.update_last_login_datetime(id_) else: rows = cnn.query(Account.id, Archives.name).outerjoin(Archives, Account.archives_id == Archives.id) \ .filter(Account.login_name == login_id).filter(Account.login_pwd == Utils.md5(login_pwd)) \ .filter(Account.enabled == 1).limit(1).all() if len(rows) > 0: id_ = rows[0][0] name = rows[0][1] self.set_secure_cookie(IRequest.__key_account_id__, "%i" % id_) self.set_secure_cookie(IRequest.__key_account_name__, name) Login.update_last_login_datetime(id_) self.redirect("/workbench/dashboard")
def list_fingers(self): sf = SessionFactory.new() archives_id = Utils.parse_int(self.param("archives_id")) items = sf.query(ValidWorkFingerTemplate.finger_index) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id).all() items = [item[0] for item in items] return items
def list(self, taxonomy): cnn = SessionFactory.new() items = cnn.query(TermTaxonomy.id, Term.name) \ .outerjoin((Term, Term.id == TermTaxonomy.term_id)) \ .filter(TermTaxonomy.taxonomy == taxonomy) \ .all() return items
def list(self, keywords, start, limit, http_req): org_id = Utils.parse_int(http_req.get_argument("org_id")) sf = SessionFactory.new() q = sf.query(Archives.id, Archives.name, Archives.sex, Archives.org_id, ValidWorkScheduleTask.name, ValidWorkScheduleTask.id) \ .outerjoin(ValidWorkScheduleTask, Archives.validworkscheduletasks) q = q.filter(Archives.name != "超级管理员") if keywords: q = q.filter(Archives.name.like("%" + keywords + "%")) if org_id: q = q.filter(Archives.org_id == org_id) rows = q.order_by(Archives.name).offset(start).limit(limit).all() items = list() for row in rows: item = dict() item["id"] = row[0] item["name"] = row[1] item["sex"] = row[2] item["org_id"] = row[3] item["st_name"] = row[4] if not row[4]: item["st_name"] = "" item["st_id"] = row[5] if not row[5]: item["st_id"] = 0 items.append(item) return items
def list(self): role_id = self.param("id") if not role_id: return [] cnn = SessionFactory.new() points = cnn.query(SecurityPoint.id).join((Role, SecurityPoint.roles)).filter(Role.id == role_id).all() return [p[0] for p in points]
def get(self, *args, **kwargs): self.set_header("Content-Type", "text/plain;charset=utf-8") params_ = self.wrap_params_to_dict() sn = params_.get("SN") ip = self.request.remote_ip #记录当前机器信息 sf = SessionFactory.new() obj = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() if not obj: m = ValidWorkMachine() m.sn = sn m.ip = ip m.last_connect_time = Utils.current_datetime() sf.add(m) sf.commit() else: obj.ip = ip obj.last_connect_time = Utils.current_datetime() sf.commit() #取出一条命令下发给机器执行 cmd = sf.query(ValidWorkCommands.id, ValidWorkCommands.cmd) \ .filter(ValidWorkCommands.sn == sn) \ .order_by(asc(ValidWorkCommands.id)).limit(1).scalar() if cmd: self.write("C:%s:%s" % (cmd[0], cmd[1])) pass
def list(self, id_): meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) self.datatable_display_cols = meta["cols"] self.set_header("Content-Type", "text/json;charset=utf-8") display_start = Utils.parse_int(self.get_argument("iDisplayStart")) display_length = Utils.parse_int(self.get_argument("iDisplayLength")) # cols_num = self.get_argument("iColumns") # 全局搜索处理段落 default_search_value = self.get_argument("sSearch") default_search_fields = DataTableModule.__default_search_fields__.get(id_) default_search_sqlwhere = "" default_search_sqlwhere_params = dict() if default_search_value and default_search_fields: temp_sql = list() for field_name in default_search_fields: temp_sql.append("%s like :%s" % (field_name, field_name)) default_search_sqlwhere_params[field_name] = "%" + default_search_value + "%" default_search_sqlwhere = " OR ".join(temp_sql) # 排序处理段落 sort_params = self.parse_sort_params() order_sqlwhere = "" for k, v in sort_params.items(): order_sqlwhere += "1=1 ORDER BY %s %s" % (k, v) break # DataGrid数据查询段落 cnn = SessionFactory.new() # here place custom filter total_query = cnn.query(func.count(1)).select_from(entity) ds_query = cnn.query(entity) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) if custom_filter: custom_filter_obj = custom_filter() if hasattr(custom_filter_obj, "total"): total_query = custom_filter_obj.total(total_query, self) if hasattr(custom_filter_obj, "dataset"): ds_query = custom_filter_obj.dataset(ds_query, self) if default_search_value: total_query = total_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) ds_query = ds_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) if order_sqlwhere: ds_query = ds_query.filter(order_sqlwhere) total = total_query.scalar() ds = ds_query.offset(display_start).limit(display_length) results = dict() results["sEcho"] = self.get_argument("sEcho") results["iTotalRecords"] = total results["iTotalDisplayRecords"] = total results["aaData"] = [item.dict() for item in ds] self.write(json.dumps(results, cls=JsonEncoder))
def update(self, key, desc): sf = SessionFactory.new() row = sf.query(SecurityPoint).filter(SecurityPoint.key_ == key).limit(1).scalar() if row: row.description = desc sf.commit() return True return False
def list(self): account_id = Utils.parse_int(self.param("id")) cnn = SessionFactory.new() ds = cnn.query(Role.id).join(Account, Role.accounts).filter(Account.id == account_id).all() roles = list() for row in ds: roles.append(row[0]) return roles
def set_object_count(self, id, chang_num=0): cnn = SessionFactory.new() tt = cnn.query(TermTaxonomy).filter(TermTaxonomy.id == id).limit(1).scalar() if tt: tt.object_count += chang_num cnn.commit() return True return False
def delete(id_): cnn = SessionFactory.new() a = cnn.query(Account).get(id_) if a.login_name == "root": return "RootIsSupperAccount" cnn.delete(a) cnn.commit() return "Success"
def exists(self, name_): cnn = SessionFactory.new() num = cnn.query(func.count(TermTaxonomy.id)).filter(TermTaxonomy.term.has(name=Utils.trim(name_))) \ .filter(TermTaxonomy.taxonomy == self.taxonomy) \ .limit(1).scalar() if num > 0: return True return False
def list_timeblocks(): sf = SessionFactory.new() rows = sf.query(ValidWorkTimeBlock.id, ValidWorkTimeBlock.name, ValidWorkTimeBlock.start_time, ValidWorkTimeBlock.end_time).all() items = list() for row in rows: items.append((row[0], row[1], Utils.format_time(row[2]), Utils.format_time(row[3]))) return items
def clear(self): sf = SessionFactory.new() obj = sf.query(ValidWorkFingerTemplateKey) \ .filter(ValidWorkFingerTemplateKey.account_id == self.request.current_user).limit(1).scalar() if obj: obj.tpl = "" sf.commit() return ["success"] return ["failure"]
def add(self, http_req): obj = ValidWorkAskForLeave() http_req.wrap_entity(obj) obj.creator = http_req.get_current_user() obj.create_datetime = Utils.current_datetime() sf = SessionFactory.new() sf.add(obj) sf.commit() return obj.id
def post(self, *args, **kwargs): self.set_header("Content-Type", "text/plain;charset=utf-8") sn = self.get_argument("SN") id_ = self.get_argument("ID") rt = self.get_argument("Return") sf = SessionFactory.new() obj = sf.query(ValidWorkCommands).get(Utils.parse_int(id_)) sf.delete(obj) sf.commit() self.write("OK")
def count(self, default_search_val, http_req): db_cnn = SessionFactory.new() q = db_cnn.query(Account, Archives.name, Archives.email) \ .outerjoin(Archives, Account.archives_id == Archives.id) \ .filter(Account.login_name != "root") if default_search_val: q = q.filter(or_(Account.login_name.like('%' + default_search_val + '%'), Archives.name.like('%' + default_search_val + '%'), Archives.email.like('%' + default_search_val + '%'))) return q.count()