def eager_load_tree(cls, session, visitor_fn=None): all_data = session.query(Category, func.count(Item.ID)).outerjoin(Item).group_by(Category.ID).order_by( desc(func.isnull(Category.ParentCategoryID)), asc(Category.Name)).all() # Create a mapping of parent => children parent_to_child = {} top_level = [] for tup in all_data: node = tup[0] if node.ParentCategoryID is None: top_level.append(tup) else: parent_to_child.setdefault(node.ParentCategoryID, []).append(tup) # Actually build the nested tree structure def recurse(nodes): ret = [] for node, item_count in nodes: children = recurse(parent_to_child.setdefault(node.ID, [])) if visitor_fn: ret.append(visitor_fn(node, item_count, children)) else: entry = { "category": node, "children": children, "item_count": item_count } ret.append(entry) return ret return recurse(top_level), parent_to_child
def eager_load_tree(cls, session, visitor_fn=None): all_data = session.query(Category, func.count( Item.ID)).outerjoin(Item).group_by(Category.ID).order_by( desc(func.isnull(Category.ParentCategoryID)), asc(Category.Name)).all() # Create a mapping of parent => children parent_to_child = {} top_level = [] for tup in all_data: node = tup[0] if node.ParentCategoryID is None: top_level.append(tup) else: parent_to_child.setdefault(node.ParentCategoryID, []).append(tup) # Actually build the nested tree structure def recurse(nodes): ret = [] for node, item_count in nodes: children = recurse(parent_to_child.setdefault(node.ID, [])) if visitor_fn: ret.append(visitor_fn(node, item_count, children)) else: entry = { "category": node, "children": children, "item_count": item_count } ret.append(entry) return ret return recurse(top_level), parent_to_child
def get_all_channels_ordered(db: Connection, user_id: int) -> List[Channel]: """ Get all channels sorted by specified user's order. """ query = select(CHANNELS.c)\ .select_from(CHANNELS.outerjoin(CHANNELS_ORDER, CHANNELS_ORDER.c.channel_id == CHANNELS.c.id))\ .where(or_(CHANNELS_ORDER.c.user_id == user_id, CHANNELS_ORDER.c.user_id.is_(None)))\ .order_by(func.isnull(CHANNELS_ORDER.c.order))\ .order_by(CHANNELS_ORDER.c.order) result = db.execute(query) return [map_object(Channel, row) for row in result]
def create_po_req(curr_user): if not curr_user.is_admin() and not curr_user.is_allow_pullout(): return ResponseMessage(False, message="Unauthorized user!").resp(), 401 whse = Warehouses.query.filter_by(whsecode=curr_user.whse).first() # if whse.is_cutoff(): # return ResponseMessage(False, message="Cutoff is enable, please disable it!").resp(), 401 if not whse.is_cutoff(): return ResponseMessage(False, message="Cutoff is disable").resp(), 401 date = request.args.get('date') if request.method == 'GET': try: if not curr_user.is_manager(): whse_inv_case = case([(WhseInv.quantity != 0, 1)], else_=0) whse_inv = db.session.query( WhseInv.item_code, WhseInv.item_code, WhseInv.quantity, Items.uom).filter( WhseInv.warehouse == curr_user.whse).outerjoin( Items, Items.item_code == WhseInv.item_code).order_by( whse_inv_case.desc(), WhseInv.item_code).all() whseinv_schema = WhseInvSchema(many=True) result = whseinv_schema.dump(whse_inv) return ResponseMessage(True, data=result).resp() elif curr_user.is_manager(): po_req_header = PullOutHeaderRequest po_req_row = PullOutRowRequest sales_case = case([(po_req_header.user_type == 'sales', po_req_row.quantity)]) auditor_case = case([(po_req_header.user_type == 'auditor', po_req_row.quantity)]) pull_out = db.session.query( po_req_row.item_code, func.sum(func.isnull(sales_case, 0)).label('sales_count'), func.sum(func.isnull(auditor_case, 0)).label('auditor_count'), func.sum( func.isnull(sales_case, 0) - func.isnull(auditor_case, 0)).label('variance'), po_req_row.uom).filter( and_( cast(po_req_header.transdate, DATE) == date, po_req_row.whsecode == curr_user.whse, po_req_header.id == po_req_row.pulloutreq_id, False == po_req_header.confirm) ).group_by(po_req_row.item_code, po_req_row.uom).having( func.sum( func.isnull(sales_case, 0) - func.isnull(auditor_case, 0)) != 0).all() po_req_schema = PullOutHeaderRequestSchema(many=True) result = po_req_schema.dump(pull_out) return ResponseMessage(True, data=result).resp() except (pyodbc.IntegrityError, exc.IntegrityError) as err: return ResponseMessage(False, message=f"{err}").resp(), 500 except Exception as err: return ResponseMessage(False, message=f"{err}").resp(), 500 finally: db.session.close() elif request.method == 'POST': try: # query the whse and check if the cutoff is true data = request.get_json() header = data['header'] rows = data['rows'] # add to headers header['created_by'] = curr_user.id header['updated_by'] = curr_user.id if curr_user.is_manager(): header['user_type'] = 'manager' elif curr_user.is_auditor(): header['user_type'] = 'auditor' elif curr_user.is_sales() and not curr_user.is_manager(): header['user_type'] = 'sales' pending_req_po = PullOutHeaderRequest.query.filter( and_( PullOutHeaderRequest.user_type == header['user_type'], func.cast(PullOutHeaderRequest.transdate, DATE) == header['transdate'], PullOutHeaderRequest.docstatus == 'O', PullOutHeaderRequest.confirm == False)).first() if pending_req_po: return ResponseMessage( False, message=f"You have an entry that still pending!" ).resp(), 401 # query the object type obj = ObjectType.query.filter_by(code='PORQ').first() # Check if has objtype if not obj: return ResponseMessage( False, message="Object type not found!").resp(), 401 # query the series series = Series.query.filter_by(whsecode=curr_user.whse, objtype=obj.objtype).first() # check if has series if not series: return ResponseMessage(False, message="Series not found!").resp(), 401 # check if next num is not greater done end num if series.next_num + 1 > series.end_num: return ResponseMessage( False, message="Series number is greater than next num!" ).resp(), 401 # construct reference reference = f"{series.code}-{obj.code}-{series.next_num}" # add to header header['series'] = series.id header['objtype'] = obj.objtype header['seriescode'] = series.code header['transnumber'] = series.next_num header['reference'] = reference # add 1 to next series series.next_num += 1 po_req_header = PullOutHeaderRequest(**header) db.session.add_all([series, po_req_header]) db.session.flush() for row in rows: # query the stock inventory whse_inv = WhseInv.query.filter_by( warehouse=curr_user.whse, item_code=row['item_code']).first() # check if the whse inv is less than the quantity to pullout # if true then raise an error. if whse_inv.quantity < row['quantity']: raise Exception( f"{row['item_code'].title()} below stock level!") # add to row row['whsecode'] = curr_user.whse row['objtype'] = po_req_header.objtype row['created_by'] = po_req_header.created_by row['updated_by'] = po_req_header.updated_by check = Check(**row) # check if valid if not check.itemcode_exist(): raise Exception("Invalid item code!") if not check.uom_exist(): raise Exception("Invalid uom!") po_req_row = PullOutRowRequest(pulloutreq_id=po_req_header.id, **row) db.session.add(po_req_row) db.session.commit() return ResponseMessage(True, message="Successfully added!").resp() except (pyodbc.IntegrityError, exc.IntegrityError) as err: db.session.rollback() return ResponseMessage(False, message=f"{err}").resp(), 500 except Exception as err: db.session.rollback() return ResponseMessage(False, message=f"{err}").resp(), 500 finally: db.session.close()
IF ( ISNULL(s1.salary), 0, s1.salary ) ) AS difference FROM salaries s LEFT JOIN salaries s1 ON s.emp_no = s1.emp_no AND YEAR (s1.from_date) = YEAR (s.from_date) - 1 WHERE s.emp_no = 10001 """ sql_data = [(d.emp_no, d.from_date, d.to_date, d.salary, d.last_salary, d.difference) for d in session.execute(sql)] '''使用 sqlalchemy 方式进行查询''' s1 = aliased(Salary) s2 = aliased(Salary) alchemy_data = session.query(s1.emp_no, s1.from_date, s1.to_date, s1.salary, func.IF(func.isnull(s2.salary), 0, s2.salary).label("last_salary"), (s1.salary - (func.IF(func.isnull(s2.salary), 0, s2.salary))).label("difference")).\ outerjoin(s2, and_(s2.emp_no==s1.emp_no, func.year(s2.from_date)==func.year(s1.from_date)-1)).\ filter(s1.emp_no==10001).all() '''比较两个结果,应该是True''' for d in zip(sql_data, alchemy_data): print(d) print('第四例结果是:{}'.format(operator.eq(sql_data, alchemy_data))) '''-------------------------------------------------------------------------------------------------''' session.commit() session.close()
def get(self, status): """ 获取投资组合列表数据 """ args = paginate_parser.parse_args() page_no = args['page_no'] count = args['count'] user_id = session.get('user_id') # 获取各个组合 最新交易日 date_latest_query = db.session.query( PortfolioValueDaily.pl_id, func.max(PortfolioValueDaily.trade_date).label( 'trade_date_max')).group_by( PortfolioValueDaily.pl_id).subquery('date_latest') # 获取各个投资组合 最新净值 nav_latest_query = db.session.query( PortfolioValueDaily.pl_id, PortfolioValueDaily.trade_date, PortfolioValueDaily.rr, PortfolioValueDaily.nav).filter( PortfolioValueDaily.pl_id == date_latest_query.c.pl_id, PortfolioValueDaily.trade_date == date_latest_query.c.trade_date_max).subquery('nav_latest') # 分页查询投资组合信息及最新净值 if status == 'my': filter_c = PortfolioInfo.create_user_id == user_id elif status == 'all': filter_c = or_(PortfolioInfo.access_type == 'public', PortfolioInfo.create_user_id == user_id) elif status == 'star': # TODO: 星标投资组合 filter_c = not_(func.isnull(FavoriteCompare.update_time)) else: filter_c = None if filter_c is None: raise KeyError('status 参数错误 status = %s' % status) else: pagination = PortfolioInfo.query.outerjoin( nav_latest_query, PortfolioInfo.pl_id == nav_latest_query.c.pl_id).add_columns( nav_latest_query.c.trade_date, nav_latest_query.c.rr, nav_latest_query.c.nav, ).outerjoin(User).add_columns(User.username).outerjoin( FavoritePortfolio, and_(PortfolioInfo.pl_id == FavoritePortfolio.pl_id, FavoritePortfolio.user_id == user_id)).add_columns( func.if_( func.isnull(FavoritePortfolio.update_time), 0, 1).label('favorite')).filter( filter_c).paginate(page_no, count) logger.debug('%d / %d 页 %d / %d 条数据', pagination.page, pagination.pages, len(pagination.items), pagination.total) ret_dic_list = [{ 'pl_id': data.PortfolioInfo.pl_id, 'name': data.PortfolioInfo.name, 'date_from': date_2_str(data.PortfolioInfo.date_from), 'date_to': date_2_str(data.PortfolioInfo.date_to), 'status': data.PortfolioInfo.status, 'desc': data.PortfolioInfo.desc, 'create_user_id': data.PortfolioInfo.create_user_id, 'username': data.username, 'favorite': data.favorite, 'trade_date': date_2_str(data.trade_date), 'rr': try_2_float(data.rr), 'nav': try_2_float(data.nav), 'access_type': data.PortfolioInfo.access_type, } for data in pagination.items] ret_dic = { 'page': pagination.page, 'pages': pagination.pages, 'count': len(pagination.items), 'total': pagination.total, 'has_prev': pagination.has_prev, 'has_next': pagination.has_next, 'data': ret_dic_list, } return ret_dic
def get(self, status): """ 获取比较列表数据(分页) """ args = paginate_parser.parse_args() page_no = args['page_no'] count = args['count'] user_id = session.get('user_id') logger.debug('get_cmp_list user_id:%s', user_id) if status == 'my': filter_c = PortfolioCompareInfo.create_user_id == user_id having_c = None elif status == 'all': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = None elif status == 'star': filter_c = and_( or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public'), not_(func.isnull(FavoriteCompare.update_time))) having_c = None elif status == 'verified': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = column('complete_rate') >= 1 elif status == 'unverified': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = or_( column('complete_rate').is_(None), column('complete_rate') < 1) else: raise KeyError('status 参数错误 status = %s' % status) # 整理数据 # logger.debug("data_list_df len:%d", data_list_df.shape[0]) # data_list_df = data_list_df.where(data_list_df.notna(), None) # data_list = data_list_df.to_dict('record') # data_table_dic = {'data': data_list} # logger.debug(data_table_dic) query = PortfolioCompareInfo.query.outerjoin( PortfolioCompareResult ).group_by(PortfolioCompareResult.cmp_id).add_columns( func.count().label('tot_count'), func.min( PortfolioCompareResult.trade_date).label('trade_date_min'), func.max( PortfolioCompareResult.trade_date).label('trade_date_max'), func.sum(PortfolioCompareResult.result).label('fit_count'), (func.sum(PortfolioCompareResult.result) / func.count()).label('fit_rate'), ((func.max(PortfolioCompareResult.trade_date) - PortfolioCompareInfo.date_from) / (PortfolioCompareInfo.date_to - PortfolioCompareInfo.date_from) ).label('complete_rate')).outerjoin(User).add_columns( User.username).outerjoin( FavoriteCompare, and_( PortfolioCompareInfo.cmp_id == FavoriteCompare.cmp_id, FavoriteCompare.user_id == user_id)).add_columns( func.if_( func.isnull(FavoriteCompare.update_time), 0, 1).label('favorite')).filter( and_(filter_c, PortfolioCompareInfo.is_del == 0)) if having_c is None: pagination = query.paginate(page_no, count) else: pagination = query.having(having_c).paginate(page_no, count) logger.debug('%d / %d 页 %d / %d 条数据', pagination.page, pagination.pages, len(pagination.items), pagination.total) ret_dic_list = [{ 'cmp_id': data.PortfolioCompareInfo.cmp_id, 'name': data.PortfolioCompareInfo.name, 'status': data.PortfolioCompareInfo.status, 'params': data.PortfolioCompareInfo.params, 'desc': data.PortfolioCompareInfo.desc, 'date_from': date_2_str(data.PortfolioCompareInfo.date_from), 'date_to': date_2_str(data.PortfolioCompareInfo.date_to), 'trade_date_min': date_2_str(data.trade_date_min), 'trade_date_max': date_2_str(data.trade_date_max), 'create_user_id': data.PortfolioCompareInfo.create_user_id, 'username': data.username, 'favorite': data.favorite, 'complete_rate': try_2_float(data.complete_rate), } for data in pagination.items] ret_dic = { 'page': pagination.page, 'pages': pagination.pages, 'count': len(pagination.items), 'total': pagination.total, 'has_prev': pagination.has_prev, 'has_next': pagination.has_next, 'data': ret_dic_list, } return ret_dic
def create_inv_count(curr_user): if not curr_user.is_admin() and not curr_user.is_allow_ending(): return ResponseMessage(False, message="Unauthorized user!").resp(), 401 whse = Warehouses.query.filter_by(whsecode=curr_user.whse).first() if not whse.is_cutoff(): return ResponseMessage(False, message="Cutoff is disable").resp(), 401 date = request.args.get('date') if request.method == 'GET': try: if not curr_user.is_manager(): whse_inv_case = case([(WhseInv.quantity != 0, 1)], else_=0) inv = db.session.query( WhseInv.item_code, WhseInv.item_code, WhseInv.quantity, Items.uom).filter( WhseInv.warehouse == curr_user.whse).outerjoin( Items, Items.item_code == WhseInv.item_code).order_by( whse_inv_case.desc(), WhseInv.item_code).all() inv_schema = WhseInvSchema(many=True) result = inv_schema.dump(inv) return ResponseMessage(True, data=result).resp() elif curr_user.is_manager(): count_header = CountingInventoryHeader count_row = CountingInventoryRow sales_case = case([(count_header.user_type == 'sales', count_row.actual_count)]) auditor_case = case([(count_header.user_type == 'auditor', count_row.actual_count)]) inv = db.session.query( count_row.item_code, WhseInv.quantity.label('quantity'), func.sum(func.isnull(sales_case, 0)).label('sales_count'), func.sum(func.isnull(auditor_case, 0)).label('auditor_count'), func.sum( func.isnull(sales_case, 0) - func.isnull(auditor_case, 0)).label('variance'), count_row.uom).outerjoin( WhseInv, and_(count_row.whsecode == WhseInv.warehouse, WhseInv.item_code == count_row.item_code)).filter( and_( cast(count_header.transdate, DATE) == date, count_row.whsecode == curr_user.whse, count_header.id == count_row.counting_id, False == count_header.confirm)).group_by( count_row.item_code, WhseInv.quantity, count_row.uom).having( func.sum( func.isnull(sales_case, 0) - func.isnull(auditor_case, 0)) != 0).all() inv_schema = CountingInventoryRowSchema(many=True) result = inv_schema.dump(inv) return ResponseMessage(True, data=result).resp() except (pyodbc.IntegrityError, exc.IntegrityError) as err: return ResponseMessage(False, message=f"{err}").resp(), 500 except Exception as err: return ResponseMessage(False, message=f"{err}").resp(), 500 elif request.method == 'POST': try: # query the whse and check if the cutoff is true data = request.get_json() header = data['header'] rows = data['rows'] # add to headers header['created_by'] = curr_user.id header['updated_by'] = curr_user.id if curr_user.is_manager(): header['user_type'] = 'manager' elif curr_user.is_auditor(): header['user_type'] = 'auditor' elif curr_user.is_sales() and not curr_user.is_manager(): header['user_type'] = 'sales' if CountingInventoryHeader.query.filter( and_( CountingInventoryHeader.user_type == header['user_type'], func.cast(CountingInventoryHeader.transdate, DATE) == header['transdate'], CountingInventoryHeader.docstatus == 'C', False == CountingInventoryHeader.confirm)).first(): return ResponseMessage( False, message=f"You're already added ending inventory this day" ).resp(), 401 obj = ObjectType.query.filter_by(code='ICNT').first() # Check if has objtype if not obj: return ResponseMessage( False, message="Object type not found!").resp(), 401 # query the series series = Series.query.filter_by(whsecode=curr_user.whse, objtype=obj.objtype).first() # check if has series if not series: return ResponseMessage(False, message="Series not found!").resp(), 401 # check if next num is not greater done end num if series.next_num + 1 > series.end_num: return ResponseMessage( False, message="Series number is greater than next num!" ).resp(), 401 # construct reference reference = f"{series.code}-{obj.code}-{series.next_num}" # add to header header['series'] = series.id header['objtype'] = obj.objtype header['seriescode'] = series.code header['transnumber'] = series.next_num header['reference'] = reference # add 1 to next series series.next_num += 1 inv_count_header = CountingInventoryHeader(**header) db.session.add_all([series, inv_count_header]) db.session.flush() for row in rows: row['whsecode'] = curr_user.whse check = Check(**row) # check if valid if not check.itemcode_exist(): raise Exception("Invalid item code!") inv_count_row = CountingInventoryRow( counting_id=inv_count_header.id, **row) inv_count_row.objtype = inv_count_header.objtype inv_count_row.created_by = inv_count_header.created_by inv_count_row.updated_by = inv_count_header.updated_by db.session.add(inv_count_row) db.session.commit() return ResponseMessage(True, message="Successfully added!").resp() except (pyodbc.IntegrityError, exc.IntegrityError) as err: db.session.rollback() return ResponseMessage(False, message=f"{err}").resp(), 500 except Exception as err: db.session.rollback() return ResponseMessage(False, message=f"{err}").resp(), 500 finally: db.session.close()
def get_lpuid(self, c_ogrn=''): # host code: SELECT ISNULL(MIN(LPUID), 0) FROM oms_LPU WHERE C_OGRN = '@C_OGRN' AND StLPU = '1'; select = session_mis.query( func.isnull(func.min(OmsLpuTable.lpuid), 0).label('lpuid')) return select.filter(OmsLpuTable.c_ogrn == c_ogrn).filter( OmsLpuTable.stlpu == '1').all()[0].lpuid