def finger_tpl_save(self): index = Utils.parse_int(self.param("index")) tpl = self.param("tpl") archives_id = Utils.parse_int(self.param("archives_id")) sf = SessionFactory.new() obj = sf.query(ValidWorkFingerTemplate) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id) \ .filter(ValidWorkFingerTemplate.finger_index == index).limit(1).scalar() machines_sn = sf.query(ValidWorkMachine.sn).all() machines_sn = [sn[0] for sn in machines_sn] name = sf.query(Archives.name).filter(Archives.id == archives_id).limit(1).scalar() for sn in machines_sn: ValidWorkHelper.push_user_fp_to_machine(sn, archives_id, name, index, tpl) if obj: obj.tpl = tpl sf.commit() return ["success"] else: if archives_id: sf = SessionFactory.new() obj = ValidWorkFingerTemplate() obj.card_no = "" obj.finger_index = index obj.tpl = tpl obj.archives_id = archives_id sf.add(obj) sf.commit() return ["success"] return ["failure"]
def post(self, *args, **kwargs): #记录机器访问数据 params_ = self.wrap_params_to_dict() sn = params_.get("SN") sf = SessionFactory.new() machine = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() table = params_.get("table") if table == "OPERLOG": op_stamp = params_.get("OpStamp") if op_stamp and machine: machine.opstamp = Utils.parse_int(op_stamp) sf.commit() self.write("OK") elif table == "ATTLOG": stamp = params_.get("Stamp") if stamp and machine: stamp = Utils.parse_int(stamp) machine.stamp = stamp sf.commit() if stamp and stamp > 0: records_text = self.request.body if records_text: records_text = records_text.decode("utf-8") records = list() for line in records_text.split("\n"): items = line.split("\t") if len(items) >= 2: records.append((items[0], items[1])) for r in records: archives_id = r[0] touch_time = datetime.strptime(r[1], "%Y-%m-%d %H:%M:%S") self.detect_chkon_status(archives_id, touch_time) self.write("OK")
def list(self, id_): name = DataViewModule.__view_mapping__.get(id_) if not name: self.set_status(403, "Error!") self.set_header("Content-Type", "text/json;charset=utf-8") display_start = Utils.parse_int(self.get_argument("iDisplayStart")) display_length = Utils.parse_int(self.get_argument("iDisplayLength")) total = 0 ds = list() custom_data_provider = ObjectPool.dataview_provider.get(name) if custom_data_provider: default_search_value = self.get_argument("sSearch") obj = custom_data_provider() if hasattr(obj, "count"): total = obj.count(default_search_value, self) if hasattr(obj, "list"): ds = obj.list(default_search_value, display_start, display_length, self) results = dict() results["sEcho"] = self.get_argument("sEcho") results["iTotalRecords"] = total results["iTotalDisplayRecords"] = total results["aaData"] = ds self.write(json.dumps(results, cls=JsonEncoder))
def list(self, id_): meta = DataTableModule.__entity_mapping__.get(id_) if not meta: self.set_status(403, "Error!") entity = import_object(meta["name"]) self.datatable_display_cols = meta["cols"] self.set_header("Content-Type", "text/json;charset=utf-8") display_start = Utils.parse_int(self.get_argument("iDisplayStart")) display_length = Utils.parse_int(self.get_argument("iDisplayLength")) # cols_num = self.get_argument("iColumns") # 全局搜索处理段落 default_search_value = self.get_argument("sSearch") default_search_fields = DataTableModule.__default_search_fields__.get(id_) default_search_sqlwhere = "" default_search_sqlwhere_params = dict() if default_search_value and default_search_fields: temp_sql = list() for field_name in default_search_fields: temp_sql.append("%s like :%s" % (field_name, field_name)) default_search_sqlwhere_params[field_name] = "%" + default_search_value + "%" default_search_sqlwhere = " OR ".join(temp_sql) # 排序处理段落 sort_params = self.parse_sort_params() order_sqlwhere = "" for k, v in sort_params.items(): order_sqlwhere += "1=1 ORDER BY %s %s" % (k, v) break # DataGrid数据查询段落 cnn = SessionFactory.new() # here place custom filter total_query = cnn.query(func.count(1)).select_from(entity) ds_query = cnn.query(entity) custom_filter = ObjectPool.datatable_provider.get(meta["name"]) if custom_filter: custom_filter_obj = custom_filter() if hasattr(custom_filter_obj, "total"): total_query = custom_filter_obj.total(total_query, self) if hasattr(custom_filter_obj, "dataset"): ds_query = custom_filter_obj.dataset(ds_query, self) if default_search_value: total_query = total_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) ds_query = ds_query.filter(default_search_sqlwhere).params(**default_search_sqlwhere_params) if order_sqlwhere: ds_query = ds_query.filter(order_sqlwhere) total = total_query.scalar() ds = ds_query.offset(display_start).limit(display_length) results = dict() results["sEcho"] = self.get_argument("sEcho") results["iTotalRecords"] = total results["iTotalDisplayRecords"] = total results["aaData"] = [item.dict() for item in ds] self.write(json.dumps(results, cls=JsonEncoder))
def task_assign(self): archives_id = Utils.parse_int(self.param("id")) task_id = Utils.parse_int(self.param("st_id")) if archives_id and task_id: sf = SessionFactory.new() usr = sf.query(Archives).get(archives_id) if usr: usr.validworkscheduletasks = [] task = sf.query(ValidWorkScheduleTask).get(task_id) usr.validworkscheduletasks.append(task) sf.commit() return ["success"] return ["failure"]
def save(self): account_id = Utils.parse_int(self.param("id")) role_id = Utils.parse_int(self.param("role_id")) state = Utils.parse_int(self.param("state")) cnn = SessionFactory.new() account = cnn.query(Account).get(account_id) if account: role = cnn.query(Role).get(role_id) if role: if state == 0: account.roles.append(role) else: account.roles.remove(role) cnn.commit() return ["success"] return ["failure"]
def list_fingers(self): sf = SessionFactory.new() archives_id = Utils.parse_int(self.param("archives_id")) items = sf.query(ValidWorkFingerTemplate.finger_index) \ .filter(ValidWorkFingerTemplate.archives_id == archives_id).all() items = [item[0] for item in items] return items
def list(self, keywords, start, limit, http_req): org_id = Utils.parse_int(http_req.get_argument("org_id")) sf = SessionFactory.new() q = sf.query(Archives.id, Archives.name, Archives.sex, Archives.org_id, ValidWorkScheduleTask.name, ValidWorkScheduleTask.id) \ .outerjoin(ValidWorkScheduleTask, Archives.validworkscheduletasks) q = q.filter(Archives.name != "超级管理员") if keywords: q = q.filter(Archives.name.like("%" + keywords + "%")) if org_id: q = q.filter(Archives.org_id == org_id) rows = q.order_by(Archives.name).offset(start).limit(limit).all() items = list() for row in rows: item = dict() item["id"] = row[0] item["name"] = row[1] item["sex"] = row[2] item["org_id"] = row[3] item["st_name"] = row[4] if not row[4]: item["st_name"] = "" item["st_id"] = row[5] if not row[5]: item["st_id"] = 0 items.append(item) return items
def list(self): account_id = Utils.parse_int(self.param("id")) cnn = SessionFactory.new() ds = cnn.query(Role.id).join(Account, Role.accounts).filter(Account.id == account_id).all() roles = list() for row in ds: roles.append(row[0]) return roles
def after_add(self, entity_obj, sf, req): length = len(str(sf.query(func.count(Archives.id)).scalar())) obj = sf.query(Archives).get(entity_obj.id) max_length = AppSettingHelper.get("s_usr_code_fmt_length", "5") prefix = AppSettingHelper.get("s_usr_code_prefix", "P") if length > Utils.parse_int(max_length): max_length = "%s" % (length + 1) fmt = prefix + "%0" + max_length + "d" obj.code = fmt % entity_obj.id sf.commit()
def post(self, *args, **kwargs): self.set_header("Content-Type", "text/plain;charset=utf-8") sn = self.get_argument("SN") id_ = self.get_argument("ID") rt = self.get_argument("Return") sf = SessionFactory.new() obj = sf.query(ValidWorkCommands).get(Utils.parse_int(id_)) sf.delete(obj) sf.commit() self.write("OK")
def save(self): id_ = Utils.parse_int(self.param("id")) tb_id = Utils.parse_int(self.param("tb_id")) state = self.param("s") if id_ > 0 and tb_id > 0: if state == "1": sf = SessionFactory.new() task = sf.query(ValidWorkScheduleTask).get(id_) timeblock = sf.query(ValidWorkTimeBlock).get(tb_id) task.validworktimeblocks.append(timeblock) sf.commit() else: sf = SessionFactory.new() task = sf.query(ValidWorkScheduleTask).get(id_) timeblock = sf.query(ValidWorkTimeBlock).get(tb_id) task.validworktimeblocks.remove(timeblock) sf.commit() return ["success"] return ["failure"]
def push_finger_tpl_to_machine(self): sf = SessionFactory.new() sn = sf.query(ValidWorkMachine.sn) \ .filter(ValidWorkMachine.id == Utils.parse_int(self.param("machine_id"))).limit(1).scalar() peoples = sf.query(ValidWorkFingerTemplate.archives_id, Archives.name, ValidWorkFingerTemplate.finger_index, ValidWorkFingerTemplate.tpl) \ .join((Archives, ValidWorkFingerTemplate.archives)).all() finger_tpl_list = list() for p in peoples: finger_tpl_list.append(p) ValidWorkHelper.push_users_fp_to_machine(sn, finger_tpl_list) return ["success"]
def add(self, http_req): login_name = http_req.get_argument("login_name") if not login_name: return "UserLoginIdNotBlank" password = http_req.get_argument("password") if not password: return "PasswordNotBlank" repassword = http_req.get_argument("repassword") if password != repassword: return "PasswordNotSame" bind_target_user = http_req.get_argument("archives_id") enabled = Utils.parse_int(http_req.get_argument("enabled")) account_id = AccountHelper.create(login_name, password, bind_target_user, enabled) return account_id
def modify(self, id_, http_req): login_name = http_req.get_argument("login_name") if not login_name: return "UserLoginIdNotAllowedBlank" password = http_req.get_argument("password") print(password) if password: repassword = http_req.get_argument("repassword") if password != repassword: return "PasswordIsNotSame" bind_target_user = http_req.get_argument("archives_id") enabled = Utils.parse_int(http_req.get_argument("enabled")) msg = AccountHelper.update(id_, login_name, password, bind_target_user, enabled) if msg == "Updated": return "" return msg
def dict(self, dict_=None): """ 1, object to map 2, map to object :param dict_: :return: """ if not dict_: columns = [c.key for c in class_mapper(self.__class__).columns] return dict((c, getattr(self, c)) for c in columns) else: metas = self.cols_meta() for k, v in dict_.items(): print(k, v) if not hasattr(self, k): continue for m in metas: if m["name"] == k: if m["type"] == "int": if type(v) == str: setattr(self, k, Utils.parse_int(v)) else: setattr(self, k, v) elif m["type"] == "numeric": if type(v) == str: setattr(self, k, Utils.parse_float(v)) else: setattr(self, k, v) elif m["type"] == "datetime": if type(v) == str: setattr(self, k, Utils.parse_datetime(v)) else: setattr(self, k, v) elif m["type"] == "date": if type(v) == str: setattr(self, k, Utils.parse_date(v)) else: setattr(self, k, v) elif m["type"] == "time": if type(v) == str: setattr(self, k, Utils.parse_time(v)) else: setattr(self, k, v) else: setattr(self, k, v) pass pass
def create(login_id, pwd, archives_id=None, enabled=0): cnn = SessionFactory.new() obj = Account() obj.login_name = login_id obj.login_pwd = Utils.md5(pwd) obj.create_time = Utils.current_datetime() obj.enabled = enabled obj.archives_id = archives_id cnn.add(obj) cnn.flush() default_role_id = Utils.parse_int(AppSettingHelper.get("s_usr_register_default_role_name", "0")) if default_role_id > 0: default_role = cnn.query(Role).get(default_role_id) if default_role: obj.roles.append(default_role) cnn.commit() return obj.id
def dataset(self, query, req): return query.filter(LearningExperience.archives_id == Utils.parse_int(req.get_argument("archives_id")))
def dataset(self, session, req): return session.filter(TrainingExperience.archives_id == Utils.parse_int(req.get_argument("archives_id")))
def total(self, query, req): return query.filter(WorkExperience.archives_id == Utils.parse_int(req.get_argument("archives_id")))
def list(self): sf = SessionFactory.new() ds = sf.query(ValidWorkTimeBlock.id) \ .join(ValidWorkScheduleTask, ValidWorkTimeBlock.validworkscheduletasks) \ .filter(ValidWorkScheduleTask.id == Utils.parse_int(self.param("id"))).all() return [row[0] for row in ds]
def dataset(self, query, req): org_id = Utils.parse_int(req.get_argument("org_id")) q = query.filter(Archives.name != "超级管理员") if org_id: q = q.filter(Archives.org_id == org_id) return q
def create(self): account_name = self.param("account_name") if not account_name: return "AccountNameRequired" email = self.param("email") if not email: return "EmailRequired" pwd = self.param("pwd") if not pwd: return "PwdRequired" agree = self.param("agree") print(agree) if not agree: return "AgreeRequired" re_pwd = self.param("re_pwd") if pwd != re_pwd: return "PwdNotSame" sf = SessionFactory.new() num = sf.query(func.count(Account.id)).filter(Account.login_name == account_name).scalar() if num > 0: return "AccountExists" num = sf.query(func.count(Archives.id)).filter(Archives.email == email).scalar() if num > 0: return "EmailExists" #create a person length = len(str(sf.query(func.count(Archives.id)).scalar())) max_length = AppSettingHelper.get("s_usr_code_fmt_length", "5") prefix = AppSettingHelper.get("s_usr_code_prefix", "P") if length > Utils.parse_int(max_length): max_length = "%s" % (length + 1) fmt = prefix + "%0" + max_length + "d" p = Archives() p.email = email p.name = Utils.email_account_name(email) p.join_date = Utils.current_datetime() sf.add(p) sf.flush() p.code = fmt % p.id u = Account() u.login_name = account_name u.login_pwd = Utils.md5(pwd) u.create_time = Utils.current_datetime() u.last_logon_time = Utils.current_datetime() u.enabled = 1 u.archives_id = p.id sf.add(u) sf.flush() default_role_id = Utils.parse_int(AppSettingHelper.get("s_usr_register_default_role_name", 0)) if default_role_id > 0: default_role = sf.query(Role).get(default_role_id) if default_role: u.roles.append(default_role) sf.commit() self.request.set_secure_cookie(IRequest.__key_account_id__, "%i" % u.id) self.request.set_secure_cookie(IRequest.__key_account_name__, email) return "Success"
def parse_sort_params(self): params = dict() col_index = Utils.parse_int(self.get_argument("iSortCol_0")) direct = self.get_argument("sSortDir_0") params[self.datatable_display_cols[col_index]] = direct return params