def get(self, *args, **kwargs): self.set_header("Content-Type", "text/plain;charset=utf-8") params_ = self.wrap_params_to_dict() sn = params_.get("SN") ip = self.request.remote_ip #记录当前机器信息 sf = SessionFactory.new() obj = sf.query(ValidWorkMachine).filter(ValidWorkMachine.sn == sn).limit(1).scalar() if not obj: m = ValidWorkMachine() m.sn = sn m.ip = ip m.last_connect_time = Utils.current_datetime() sf.add(m) sf.commit() else: obj.ip = ip obj.last_connect_time = Utils.current_datetime() sf.commit() #取出一条命令下发给机器执行 cmd = sf.query(ValidWorkCommands.id, ValidWorkCommands.cmd) \ .filter(ValidWorkCommands.sn == sn) \ .order_by(asc(ValidWorkCommands.id)).limit(1).scalar() if cmd: self.write("C:%s:%s" % (cmd[0], cmd[1])) pass
def post(self, *args, **kwargs): self.set_header("Content-Type", "text/json;charset=utf-8") #build path upload_path = "/upload/%s/%s" % (Utils.format_year_month(Utils.current_datetime()), Utils.current_datetime().date().day) path = self.get_webroot_path() + upload_path Utils.mkdirs(path) files = self.request.files items = list() for key in files: item = dict() meta = files[key] file_name = meta["filename"] ext_name = os.path.splitext(file_name) name_ = Utils.uniq_index() if not ext_name: new_file_name = name_ else: new_file_name = "%s.%s" % (name_, ext_name) save_path = "%s/%s" % (path, new_file_name) with open(save_path, "wb") as uploader: uploader.write(meta["body"]) item["local_name"] = file_name item["ext_name"] = ext_name item["upload_path"] = "%s/%s" % (upload_path, new_file_name) item["archives_id"] = self.get_current_user() items.append(item) self.write(Utils.encode(["OK"]))
def push_users_fp_to_machine(sn, items): """ @param sn: @param items: [[pin, name, fid, tpl]..] """ sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() cmds = list() for item in items: create = r"DATA DEL_USER PIN=%i\r\nDATA USER PIN=%i\tName=%s\r\n" % (item[0], item[0], item[1]) update = r"DATA FP PIN=%i\tFID=%i\tTMP=%s\r\n" % (item[0], item[2], item[3]) vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = create vwc.create_date = Utils.current_datetime() vwc1 = ValidWorkCommands() vwc1.sn = sn vwc1.validworkmachine_id = id_ vwc1.cmd = update vwc1.create_date = Utils.current_datetime() cmds += [vwc, vwc1] sf.add_all(cmds) sf.commit()
def push_user_fp_to_machine(sn, pin, name, fid, tpl): create = r"DATA DEL_USER PIN=%i\r\nDATA USER PIN=%i\tName=%s\r\n" % (pin, pin, name) update = r"DATA FP PIN=%i\tFID=%i\tTMP=%s\r\n" % (pin, fid, tpl) sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = create vwc.create_date = Utils.current_datetime() vwc1 = ValidWorkCommands() vwc1.sn = sn vwc1.validworkmachine_id = id_ vwc1.cmd = update vwc1.create_date = Utils.current_datetime() sf.add_all([vwc, vwc1]) sf.commit()
def add(self, http_req): obj = ValidWorkAskForLeave() http_req.wrap_entity(obj) obj.creator = http_req.get_current_user() obj.create_datetime = Utils.current_datetime() sf = SessionFactory.new() sf.add(obj) sf.commit() return obj.id
def push_command_to_machine(sn, cmd): sf = SessionFactory.new() id_ = sf.query(ValidWorkCommands.id).filter(ValidWorkCommands.sn == sn).limit(1).scalar() vwc = ValidWorkCommands() vwc.sn = sn vwc.validworkmachine_id = id_ vwc.cmd = cmd vwc.create_date = Utils.current_datetime() sf.add(vwc) sf.commit()
def modify(self, id_, http_req): sf = SessionFactory.new() obj = sf.query(ValidWorkAskForLeave).get(id_) if obj: http_req.wrap_entity(obj) obj.creator = http_req.get_current_user() obj.create_datetime = Utils.current_datetime() sf.commit() return "" return "failure"
def count(self, search_text, http_req): current_date = None if not current_date: current_date = Utils.format_date(Utils.current_datetime()) sf = SessionFactory.new() subq = sf.query(Term.name.label("term_name"), TermTaxonomy.id).filter(TermTaxonomy.term_id == Term.id).subquery() q = sf.query(func.count(ValidWorkCheckOn.id))\ .join(Archives, ValidWorkCheckOn.archives_id == Archives.id)\ .outerjoin(subq, subq.c.id == Archives.org_id) if search_text: q = q.filter(Archives.name.contains(search_text)) q = q.filter(cast(ValidWorkCheckOn.valid_start_time, Date) == current_date) return q.scalar()
def list(self, search_text, start, limit, http_req): current_date = None if not current_date: current_date = Utils.format_date(Utils.current_datetime()) sf = SessionFactory.new() subq = sf.query(Term.name.label("term_name"), TermTaxonomy.id).filter(TermTaxonomy.term_id == Term.id).subquery() q = sf.query(ValidWorkCheckOn.id, Archives.code, Archives.name, ValidWorkTimeBlock.name, ValidWorkTimeBlock.start_time, ValidWorkTimeBlock.end_time, ValidWorkCheckOn.status_in, ValidWorkCheckOn.status_out, ValidWorkCheckOn.status_no_sign, ValidWorkCheckOn.check_in_time, ValidWorkCheckOn.check_out_time, subq.c.term_name, MinuteDiff(ValidWorkCheckOn.valid_start_time, ValidWorkCheckOn.valid_end_time).label("diff") ).select_from(ValidWorkCheckOn)\ .join(ValidWorkTimeBlock, ValidWorkCheckOn.time_block_id == ValidWorkTimeBlock.id)\ .join(Archives, ValidWorkCheckOn.archives_id == Archives.id)\ .outerjoin(subq, subq.c.id == Archives.org_id) if search_text: q = q.filter(Archives.name.contains(search_text)) q = q.order_by(Archives.name).filter(cast(ValidWorkCheckOn.valid_start_time, Date) == current_date) ds = q.offset(start).limit(limit).all() items = list() for row in ds: obj = EmptyClass() obj.id = row[0] obj.code = row[1] obj.name = row[2] obj.tb_name = row[3] obj.start_time = Utils.format_time(row[4]) obj.end_time = Utils.format_time(row[5]) obj.status_in = row[6] obj.status_out = row[7] obj.status_no_sign = row[8] obj.check_in_time = Utils.format_time(row[9]) obj.check_out_time = Utils.format_time(row[10]) obj.org_name = row[11] obj.no_work_timediff = row[12] items.append(obj.__dict__) return items
def count(self, search_text, http_req): current_date = None if not current_date: current_date = Utils.format_date(Utils.current_datetime()) cur_datetime = Utils.parse_datetime(current_date+" 00:00") year = cur_datetime.year month = cur_datetime.month sf = SessionFactory.new() #某月考勤人员,分组统计 q = sf.query(func.count(ValidWorkCheckOn.id))\ .join(Archives, ValidWorkCheckOn.archives_id == Archives.id)\ .filter(func.YEAR(ValidWorkCheckOn.valid_start_time) == year)\ .filter(func.MONTH(ValidWorkCheckOn.valid_start_time) == month)\ .group_by(ValidWorkCheckOn.archives_id, Archives.name) if search_text: q = q.filter(Archives.name.contains(search_text)) return q.scalar()
def create(login_id, pwd, archives_id=None, enabled=0): cnn = SessionFactory.new() obj = Account() obj.login_name = login_id obj.login_pwd = Utils.md5(pwd) obj.create_time = Utils.current_datetime() obj.enabled = enabled obj.archives_id = archives_id cnn.add(obj) cnn.flush() default_role_id = Utils.parse_int(AppSettingHelper.get("s_usr_register_default_role_name", "0")) if default_role_id > 0: default_role = cnn.query(Role).get(default_role_id) if default_role: obj.roles.append(default_role) cnn.commit() return obj.id
def detect_chkon_status(self, archives_id, touch_time): sf = SessionFactory.new() obj = sf.query(ValidWorkCheckOn) \ .filter(ValidWorkCheckOn.archives_id == archives_id) \ .filter(ValidWorkCheckOn.valid_start_time <= touch_time) \ .filter(ValidWorkCheckOn.valid_end_time >= touch_time).limit(1).scalar() if obj: timeblock = sf.query(ValidWorkTimeBlock).get(obj.time_block_id) if timeblock: #探测用户状态 current_date = Utils.current_datetime().date() #正常上班时间 work_start_time = datetime.combine(current_date, timeblock.start_time) #正常下班时间 normal_out_time = datetime.combine(current_date, timeblock.end_time) #正常上班打卡的开始时间(上班时间-正常上班打卡区间) touchin_starttime = work_start_time - timedelta(minutes=timeblock.normal_in_space) if touchin_starttime <= touch_time < work_start_time: obj.check_in_time = touch_time obj.status_in = 0 else: #视为迟到的结束时间(上班时间+视为迟到打卡区间) late_endtime = work_start_time + timedelta(minutes=timeblock.late_space) if work_start_time <= touch_time <= late_endtime: obj.status_in = 1 obj.check_in_time = touch_time else: #正常下班打卡的结束时间(下班时间+视为正常打卡区间) touchout_endtime = normal_out_time + timedelta(minutes=timeblock.normal_out_space) if normal_out_time < touch_time <= touchout_endtime: obj.status_out = 0 obj.check_out_time = touch_time else: #视为早退的打卡开始时间(下班时间-视为早退打卡区间) early_leave_starttime = normal_out_time - timedelta(minutes=timeblock.leave_early_space) if early_leave_starttime <= touch_time <= normal_out_time: obj.status_out = 1 obj.check_out_time = touch_time pass sf.commit()
def get(self, *args, **kwargs): current_date = Utils.format_date(Utils.current_datetime())+" 00:00" opt = dict() opt["current_date"] = current_date return self.render("validwork/overtime.html", context=opt)
def list(self, search_text, start, limit, http_req): current_date = None if not current_date: current_date = Utils.format_date(Utils.current_datetime()) cur_datetime = Utils.parse_datetime(current_date+" 00:00") year = cur_datetime.year month = cur_datetime.month sf = SessionFactory.new() #group by all people with the month checkon_subq = sf.query(ValidWorkCheckOn.archives_id)\ .join(Archives, ValidWorkCheckOn.archives_id == Archives.id)\ .filter(func.YEAR(ValidWorkCheckOn.valid_start_time) == year)\ .filter(func.MONTH(ValidWorkCheckOn.valid_start_time) == month)\ .group_by(ValidWorkCheckOn.archives_id).subquery() #迟到,早退,旷工,请假,加班分组统计 late_subq = sf.query(ValidWorkCheckOn.archives_id, (func.count(1)).label("total"))\ .filter(ValidWorkCheckOn.status_in == 1)\ .filter(func.YEAR(ValidWorkCheckOn.valid_start_time) == year)\ .filter(func.MONTH(ValidWorkCheckOn.valid_start_time) == month)\ .group_by(ValidWorkCheckOn.archives_id).subquery() early_leave_subq = sf.query(ValidWorkCheckOn.archives_id, (func.count(1)).label("total"))\ .filter(ValidWorkCheckOn.status_out == 1)\ .filter(func.YEAR(ValidWorkCheckOn.valid_start_time) == year)\ .filter(func.MONTH(ValidWorkCheckOn.valid_start_time) == month)\ .group_by(ValidWorkCheckOn.archives_id).subquery() no_work_subq = sf.query(ValidWorkCheckOn.archives_id, (func.sum(MinuteDiff(ValidWorkCheckOn.valid_start_time, ValidWorkCheckOn.valid_end_time))/(60*24)).label("total"))\ .filter(ValidWorkCheckOn.status_no_sign == 1)\ .filter(func.YEAR(ValidWorkCheckOn.valid_start_time) == year)\ .filter(func.MONTH(ValidWorkCheckOn.valid_start_time) == month)\ .group_by(ValidWorkCheckOn.archives_id).subquery() #事假 askforleave_subq1 = sf.query(ValidWorkAskForLeave.archives_id, (func.sum(MinuteDiff(ValidWorkAskForLeave.start_datetime, ValidWorkAskForLeave.end_datetime))/(60*24)).label("total"))\ .filter(func.YEAR(ValidWorkAskForLeave.start_datetime) == year)\ .filter(func.MONTH(ValidWorkAskForLeave.start_datetime) == month)\ .filter(ValidWorkAskForLeave.kind == 0)\ .group_by(ValidWorkAskForLeave.archives_id).subquery() #病假 askforleave_subq2 = sf.query(ValidWorkAskForLeave.archives_id, (func.sum(MinuteDiff(ValidWorkAskForLeave.start_datetime, ValidWorkAskForLeave.end_datetime))/(60*24)).label("total"))\ .filter(func.YEAR(ValidWorkAskForLeave.start_datetime) == year)\ .filter(func.MONTH(ValidWorkAskForLeave.start_datetime) == month)\ .filter(ValidWorkAskForLeave.kind == 1)\ .group_by(ValidWorkAskForLeave.archives_id).subquery() #其它假 askforleave_subq3 = sf.query(ValidWorkAskForLeave.archives_id, (func.sum(MinuteDiff(ValidWorkAskForLeave.start_datetime, ValidWorkAskForLeave.end_datetime))/(60*24)).label("total"))\ .filter(func.YEAR(ValidWorkAskForLeave.start_datetime) == year)\ .filter(func.MONTH(ValidWorkAskForLeave.start_datetime) == month)\ .filter(ValidWorkAskForLeave.kind == 2)\ .group_by(ValidWorkAskForLeave.archives_id).subquery() overtime_subq = sf.query(ValidWorkOvertime.archives_id, (func.sum(MinuteDiff(ValidWorkOvertime.start_datetime, ValidWorkOvertime.end_datetime))/60).label("total"))\ .filter(func.YEAR(ValidWorkOvertime.start_datetime) == year)\ .filter(func.MONTH(ValidWorkOvertime.start_datetime) == month)\ .group_by(ValidWorkOvertime.archives_id).subquery() term_subq = sf.query(Term.name.label("term_name"), TermTaxonomy.id)\ .filter(TermTaxonomy.term_id == Term.id).subquery() subq = sf.query(Archives.id, Archives.code, Archives.name, term_subq.c.term_name)\ .select_from(Archives).outerjoin(term_subq, Archives.org_id == term_subq.c.id).subquery() q = sf.query(checkon_subq.c.archives_id, subq.c.code, subq.c.name, late_subq.c.total, early_leave_subq.c.total, no_work_subq.c.total, askforleave_subq1.c.total, askforleave_subq2.c.total, askforleave_subq3.c.total, overtime_subq.c.total, subq.c.term_name).select_from(checkon_subq)\ .outerjoin(late_subq, checkon_subq.c.archives_id == late_subq.c.archives_id)\ .outerjoin(early_leave_subq, checkon_subq.c.archives_id == early_leave_subq.c.archives_id)\ .outerjoin(no_work_subq, checkon_subq.c.archives_id == no_work_subq.c.archives_id)\ .outerjoin(askforleave_subq1, checkon_subq.c.archives_id == askforleave_subq1.c.archives_id)\ .outerjoin(askforleave_subq2, checkon_subq.c.archives_id == askforleave_subq2.c.archives_id)\ .outerjoin(askforleave_subq3, checkon_subq.c.archives_id == askforleave_subq3.c.archives_id)\ .outerjoin(overtime_subq, checkon_subq.c.archives_id == overtime_subq.c.archives_id)\ .join(subq, checkon_subq.c.archives_id == subq.c.id) if search_text: q = q.filter(checkon_subq.c.name.contains(search_text)) ds = q.order_by(checkon_subq.c.archives_id).offset(start).limit(limit).all() items = list() for row in ds: obj = EmptyClass() obj.id = row[0] obj.code = row[1] obj.name = row[2] obj.late_total = row[3] obj.early_leave_total = row[4] obj.no_work_total = row[5] obj.askforleave_total1 = row[6] obj.askforleave_total2 = row[7] obj.askforleave_total3 = row[8] obj.overtime_total = row[9] obj.org_name = row[10] items.append(obj.__dict__) return items
def update_last_login_datetime(id_): cnn = SessionFactory.new() a = cnn.query(Account).get(id_) a.last_logon_time = Utils.current_datetime() cnn.commit()
def create(self): account_name = self.param("account_name") if not account_name: return "AccountNameRequired" email = self.param("email") if not email: return "EmailRequired" pwd = self.param("pwd") if not pwd: return "PwdRequired" agree = self.param("agree") print(agree) if not agree: return "AgreeRequired" re_pwd = self.param("re_pwd") if pwd != re_pwd: return "PwdNotSame" sf = SessionFactory.new() num = sf.query(func.count(Account.id)).filter(Account.login_name == account_name).scalar() if num > 0: return "AccountExists" num = sf.query(func.count(Archives.id)).filter(Archives.email == email).scalar() if num > 0: return "EmailExists" #create a person length = len(str(sf.query(func.count(Archives.id)).scalar())) max_length = AppSettingHelper.get("s_usr_code_fmt_length", "5") prefix = AppSettingHelper.get("s_usr_code_prefix", "P") if length > Utils.parse_int(max_length): max_length = "%s" % (length + 1) fmt = prefix + "%0" + max_length + "d" p = Archives() p.email = email p.name = Utils.email_account_name(email) p.join_date = Utils.current_datetime() sf.add(p) sf.flush() p.code = fmt % p.id u = Account() u.login_name = account_name u.login_pwd = Utils.md5(pwd) u.create_time = Utils.current_datetime() u.last_logon_time = Utils.current_datetime() u.enabled = 1 u.archives_id = p.id sf.add(u) sf.flush() default_role_id = Utils.parse_int(AppSettingHelper.get("s_usr_register_default_role_name", 0)) if default_role_id > 0: default_role = sf.query(Role).get(default_role_id) if default_role: u.roles.append(default_role) sf.commit() self.request.set_secure_cookie(IRequest.__key_account_id__, "%i" % u.id) self.request.set_secure_cookie(IRequest.__key_account_name__, email) return "Success"
def organization_of_work(): current_datetime = Utils.current_datetime() #是否为法定节日 isholiday = ValidWorkSchedulerThread.is_holiday(Utils.format_date(current_datetime)) if isholiday: return sf = SessionFactory.new() #标识-1的状态为旷工,这种情况是没有按指纹的,CheckOn 有效时间段结束时间已经过期的时候 updates = sf.query(ValidWorkCheckOn) \ .filter(or_(ValidWorkCheckOn.status_in == -1, ValidWorkCheckOn.status_out == -1)) \ .filter(ValidWorkCheckOn.valid_end_time < Utils.format_datetime(current_datetime)).all() for row in updates: row.status_no_sign = 1 sf.commit() #列出所有考勤计划 tasks = sf.query(ValidWorkScheduleTask.id).all() tasks = [task[0] for task in tasks] for task_id in tasks: #得到班次最小的时间点 #min_time = sf.query(ValidWorkTimeBlock.start_time) \ # .join(ValidWorkScheduleTask, ValidWorkTimeBlock.validworkscheduletasks) \ # .order_by(asc(ValidWorkTimeBlock.start_time)) \ # .filter(ValidWorkScheduleTask.id == task_id).limit(1).scalar() #if min_time: # #提前30分钟安排下一档工作 # min_datetime = datetime.combine(current_datetime.date(), min_time) # start_datetime = min_datetime - timedelta(minutes=30) # if start_datetime <= current_datetime <= min_datetime: #当天是否已经安排完成 e = sf.query(func.count(ValidWorkCheckOn.id)) \ .filter(ValidWorkCheckOn.task_id == task_id) \ .filter(cast(ValidWorkCheckOn.valid_start_time, Date) == Utils.format_date(current_datetime)).scalar() if e == 0: #安排新的工作 #1,得到拥有此考勤计划的所有人员ID usrs = sf.query(Archives.id).join(ValidWorkScheduleTask, Archives.validworkscheduletasks) \ .filter(ValidWorkScheduleTask.id == task_id).all() usrs = [u[0] for u in usrs] #2,得到此考勤计划的所有班次 time_blocks = sf.query(ValidWorkTimeBlock) \ .join(ValidWorkScheduleTask, ValidWorkTimeBlock.validworkscheduletasks) \ .filter(ValidWorkScheduleTask.id == task_id).all() #3,批量插入CheckOn workers_tb = list() for usr_id in usrs: for tb in time_blocks: vwco = ValidWorkCheckOn() vwco.archives_id = usr_id vwco.task_id = task_id vwco.time_block_id = tb.id start = datetime.combine(current_datetime.date(), tb.start_time) end = datetime.combine(current_datetime.date(), tb.end_time) start = start - timedelta(minutes=tb.normal_in_space) end = end + timedelta(minutes=tb.normal_out_space) vwco.valid_start_time = start vwco.valid_end_time = end workers_tb.append(vwco) pass pass pass sf.add_all(workers_tb) sf.commit() pass pass pass