def get_max_value(cls, store, attr): """Get the maximum value for a given attr On text columns, trying to find the max value for them using MAX() on postgres would result in some problems, like '9' being considered greater than '10' (because the comparison is done from left to right). This will 0-"pad" the values for the comparison, making it compare the way we want. Note that because of that, in the example above, it would return '09' instead of '9' :para store: a store :param attr: the attribute to find the max value for :returns: the maximum value for the attr """ cls.validate_attr(attr, expected_type=UnicodeCol) max_length = Alias( Select(columns=[Alias(Max(CharLength(attr)), 'max_length')], tables=[cls]), '_max_length') # Using LPad with max_length will workaround most of the cases where # the string comparison fails. For example, the common case would # consider '9' to be greater than '10'. We could test just strings # with length equal to max_length, but than '010' would be greater # than '001' would be greater than '10' (that would be excluded from # the comparison). By doing lpad, '09' is lesser than '10' and '001' # is lesser than '010', working around those cases max_batch = store.using(cls, max_length).find(cls).max( LPad(attr, Field('_max_length', 'max_length'), u'0')) # Make the api consistent and return an ampty string instead of None # if there's no batch registered on the database return max_batch or u''
def test_quoting(self): # FIXME "with'quote" should be in the list below, but it doesn't # work because it breaks the parameter mark translation. for reserved_name in ["with space", 'with`"escape', "SELECT"]: reserved_name = SQLToken(reserved_name) expr = Select(reserved_name, tables=Alias(Select(Alias(1, reserved_name)))) result = self.connection.execute(expr) self.assertEquals(result.get_one(), (1, ))
def compile_set_expr_oracle(compile, expr, state): if isinstance(expr, Minus): # Build new set expression without arguments (order_by, etc). new_expr = expr.__class__() new_expr.exprs = expr.exprs new_expr.all = expr.all if expr.order_by is not Undef: # Make sure that state.aliases isn't None, since we want them to # compile our order_by statement below. no_aliases = state.aliases is None if no_aliases: state.push("aliases", {}) aliases = {} for subexpr in expr.exprs: if isinstance(subexpr, Select): columns = subexpr.columns if not isinstance(columns, (tuple, list)): columns = [columns] else: columns = list(columns) for i, column in enumerate(columns): if column not in aliases: if isinstance(column, Column): aliases[column] = columns[i] = Alias(column) elif isinstance(column, Alias): aliases[column.expr] = column subexpr.columns = columns aliases.update(state.aliases) state.aliases = aliases aliases = None set_stmt = SQLRaw('(%s)' % compile(expr.exprs, state, join=expr.oper)) if expr.order_by is not Undef: # Build order_by statement, using aliases. state.push("context", COLUMN_NAME) order_by_stmt = SQLRaw(compile(expr.order_by, state)) state.pop() else: order_by_stmt = Undef # Build wrapping select statement. select = Select(SQLRaw("*"), tables=Alias(set_stmt), limit=expr.limit, offset=expr.offset, order_by=order_by_stmt) return compile_select(compile, select, state) return compile_set_expr(compile, expr, state)
def _create_info_query(self, event_query_ta, meta_ta): return Select( columns=[ Alias(None, 'entity_id'), Alias(None, 'TIMESTAMP'), Count().as_('active_entity_count'), meta_ta.owner_id.as_('owner_id'), ], tables=LeftJoin( event_query_ta, meta_ta, on=(meta_ta.entity_id == event_query_ta.entity_id)), group_by=[meta_ta.owner_id])
class AccountView(Viewable): account = Account id = Account.id parent_id = Account.parent_id account_type = Account.account_type dest_account_id = Account.parent_id description = Account.description code = Account.code source_value = Field('source_sum', 'value') dest_value = Field('dest_sum', 'value') tables = [ Account, LeftJoin(Alias(_SourceSum, 'source_sum'), Field('source_sum', 'source_account_id') == Account.id), LeftJoin(Alias(_DestSum, 'dest_sum'), Field('dest_sum', 'account_id') == Account.id), ] @property def parent_account(self): """Get the parent account for this view""" return self.store.get(Account, self.parent_id) def matches(self, account_id): """Returns true if the account_id matches this account or its parent""" if self.id == account_id: return True if self.parent_id and self.parent_id == account_id: return True return False def get_combined_value(self): """Returns the combined value of incoming and outgoing transactions""" if not self.dest_value and not self.source_value: return 0 elif not self.dest_value: return -self.source_value elif not self.source_value: return self.dest_value else: return self.dest_value - self.source_value def __repr__(self): return '<AccountView %s>' % (self.description, )
def get_top_participants(self, list_name, start, end, limit=None): """ Return all the participants between two given dates. :param list_name: The name of the mailing list in which this email should be searched. :param start: A datetime object representing the starting date of the interval to query. :param end: A datetime object representing the ending date of the interval to query. :param limit: Limit the number of participants to return. If None or not supplied, return them all. :returns: The list of thread-starting messages. """ number = Alias(Count(Email.sender_email), "number") part = self.db.find( (Email.sender_name, Email.sender_email, number), And( Email.list_name == unicode(list_name), Email.date >= start, Email.date < end, )).group_by(Email.sender_email, Email.sender_name).order_by(Desc(number)) if limit is not None: part = part.config(limit=limit) return list(part)
def test_expressions_in_union_order_by(self): # The following statement breaks in postgres: # SELECT 1 AS id UNION SELECT 1 ORDER BY id+1; # With the error: # ORDER BY on a UNION/INTERSECT/EXCEPT result must # be on one of the result columns column = SQLRaw("1") Alias.auto_counter = 0 alias = Alias(column, "id") expr = Union(Select(alias), Select(column), order_by=alias + 1, limit=1, offset=1, all=True) state = State() statement = compile(expr, state) self.assertEquals( statement, 'SELECT * FROM ' '((SELECT 1 AS id) UNION ALL (SELECT 1)) AS "_1" ' 'ORDER BY id+? LIMIT 1 OFFSET 1') self.assertVariablesEqual(state.parameters, [Variable(1)]) result = self.connection.execute(expr) self.assertEquals(result.get_one(), (1, ))
def _create_event_query(self, where, having): event_ta = Table('entity_events_daily').as_('e') return Select( columns=[ Alias(None, 'TIMESTAMP'), Alias(None, 'user_id'), event_ta.entity_id.as_('entity_id'), ], tables=event_ta, where=((event_ta.site_id == P.site_id) & (event_ta.category_id == P.category_id) & (event_ta.timestamp >= P.min_timestamp) & (event_ta.timestamp < P.max_timestamp) & In(event_ta.event_id, where)), group_by=[event_ta.entity_id], having=((C.created_entity_count >= P.min_entity_creation_count) & (having > 0)))
def collect_link_statistics(store): one_week = datetime.date.today() - datetime.timedelta(days=7) query = Date(Sale.confirm_date) >= one_week # Profit Margin item_cost = Alias( Select(columns=[ SaleItem.sale_id, Alias(Sum(SaleItem.quantity * SaleItem.average_cost), 'cost') ], tables=SaleItem, group_by=[SaleItem.sale_id]), 'item_cost') column = ((Sum(Sale.total_amount) / Sum(Field('item_cost', 'cost')) - 1) * 100) tables = [Sale, Join(item_cost, Field('item_cost', 'sale_id') == Sale.id)] profit_margin = store.using(*tables).find(column, query).one() # Sale chart columns = (DateTrunc(u'day', Sale.confirm_date), Sum(Sale.total_amount)) sale_data = store.find(columns, query) sale_data.group_by(columns[0]) # Best selling tables = [ Sellable, Join(SaleItem, SaleItem.sellable_id == Sellable.id), Join(Sale, SaleItem.sale_id == Sale.id) ] columns = (Sellable.description, Sum(SaleItem.quantity), Sum(SaleItem.quantity * SaleItem.price)) product_data = store.using(*tables).find(columns, query).order_by(-columns[2]) product_data.group_by(Sellable.description) data = dict(sales_total=store.find(Sale, query).sum(Sale.total_amount), sales_count=store.find(Sale, query).count(), clients_served=store.find(Sale, query).count(Sale.client_id, distinct=True), profit_margin=format(float(profit_margin), '.2f'), best_selling=list(product_data), sales_chart=list(sale_data), timeline=_collect_timeline(store), timestamp=datetime.datetime.now()) return json.dumps(data, default=default)
def numOwnersOfDevice( self, bus=None, vendor_id=None, product_id=None, driver_name=None, package_name=None, distro_target=None): """See `IHWSubmissionSet`.""" store = IStore(HWSubmission) submitters_with_device_select, all_submitters_select = ( self._submissionsSubmitterSelects( HWSubmission.raw_emailaddress, bus, vendor_id, product_id, driver_name, package_name, distro_target)) submitters_with_device = store.execute( Select( columns=[Count()], tables=[Alias(submitters_with_device_select, 'addresses')])) all_submitters = store.execute( Select( columns=[Count()], tables=[Alias(all_submitters_select, 'addresses')])) return (submitters_with_device.get_one()[0], all_submitters.get_one()[0])
def test_expressions_in_union_in_union_order_by(self): column = SQLRaw("1") alias = Alias(column, "id") expr = Union(Select(alias), Select(column), order_by=alias + 1, limit=1, offset=1, all=True) expr = Union(expr, expr, order_by=alias + 1, all=True) result = self.connection.execute(expr) self.assertEquals(result.get_all(), [(1, ), (1, )])
def extractTrendingHashtags(store, limit=10, duration=None): """Extract information about trending hashtags and store it in FluidDB. @param store: The storm store to query and to save our result to. @param limit: Optionally, the number of objects to retrieve. @param duration: Optionally, the recent time period to look at when determining which hashtags are trending. Default is 28 days. The storm query below results in SQL like: SELECT COUNT(DISTINCT comments.object_id) AS count, about_tag_values.value, array_agg(ROW(comments.username, comments.creation_time)) FROM about_tag_values, comment_object_link, comments WHERE about_tag_values.value LIKE '#%' AND about_tag_values.object_id = comment_object_link.object_id AND comments.object_id = comment_object_link.comment_id AND comments.creation_time >= '2012-11-09 07:42:40'::TIMESTAMP AND CHAR_LENGTH(about_tag_values.value) >= 2 GROUP BY about_tag_values.value ORDER BY count DESC LIMIT 10 """ duration = timedelta(days=28) if duration is None else duration startTime = datetime.utcnow() - duration count = Alias(Count(Comment.objectID, distinct=True)) result = store.find( (count, AboutTagValue.value, Func('array_agg', Row(Comment.username, Comment.creationTime))), Like(AboutTagValue.value, u'#%'), AboutTagValue.objectID == CommentObjectLink.objectID, Comment.objectID == CommentObjectLink.commentID, Comment.creationTime >= startTime, Func('CHAR_LENGTH', AboutTagValue.value) >= 2) result.group_by(AboutTagValue.value) result.order_by(Desc(count)) result.config(limit=limit) data = [{ 'count': count, 'usernames': _sortUsernames(usernames), 'value': hashtag } for count, hashtag, usernames in result] user = getUser(u'fluidinfo.com') tagValues = TagValueAPI(user) objectID = ObjectAPI(user).create(u'fluidinfo.com') tagValues.set( {objectID: { u'fluidinfo.com/trending-hashtags': json.dumps(data) }}) store.commit()
def reportErrorSummary(store): """Get a count of errors grouped by exception class and message. @param store: The C{Store} to fetch data from. @return: A list of C{(count, exception-class, message)} 3-tuples. The count is automatically converted to a string. """ count = Alias(Count()) result = store.find((count, ErrorLine.exceptionClass, ErrorLine.message)) result = result.group_by(ErrorLine.exceptionClass, ErrorLine.message) result = result.order_by(Desc(count), ErrorLine.exceptionClass) return [(str(count), exceptionClass, message) for count, exceptionClass, message in result]
def reportErrorTracebacks(store): """ Generator yields a count of errors and their tracebacks grouped by exception class and message. @param store: The C{Store} to fetch data from. @return: A sequence of C{(count, exception-class, message, traceback)} 4-tuples. """ count = Alias(Count()) result = store.find((count, ErrorLine.exceptionClass, ErrorLine.message, ErrorLine.traceback)) result = result.group_by(ErrorLine.exceptionClass, ErrorLine.message) return result.order_by(Desc(count), ErrorLine.exceptionClass)
class ProductFullStockItemView(ProductFullStockView): # ProductFullStockView already joins with a 1 to Many table (Sellable # with ProductStockItem). # # This is why we must join PurchaseItem (another 1 to many table) in a # subquery minimum_quantity = Storable.minimum_quantity maximum_quantity = Storable.maximum_quantity to_receive_quantity = Coalesce(Field('_purchase_total', 'to_receive'), 0) difference = ProductFullStockView.stock - Storable.minimum_quantity tables = ProductFullStockView.tables[:] tables.append( LeftJoin(Alias(_PurchaseItemTotal, '_purchase_total'), Field('_purchase_total', 'sellable_id') == Sellable.id))
def getProductsWithInfo(num_products=None): """See `IBranchCloud`.""" distinct_revision_author = Func("distinct", RevisionCache.revision_author_id) commits = Alias(Count(RevisionCache.revision_id)) epoch = datetime.now(pytz.UTC) - timedelta(days=30) # It doesn't matter if this query is even a whole day out of date, so # use the slave store. result = ISlaveStore(RevisionCache).find( (Product.name, commits, Count(distinct_revision_author), Max(RevisionCache.revision_date)), RevisionCache.product == Product.id, Not(RevisionCache.private), RevisionCache.revision_date >= epoch) result = result.group_by(Product.name) result = result.order_by(Desc(commits)) if num_products: result.config(limit=num_products) return result
def compile_set_expr_postgres(compile, expr, state): if expr.order_by is not Undef: # The following statement breaks in postgres: # SELECT 1 AS id UNION SELECT 1 ORDER BY id+1 # With the error: # ORDER BY on a UNION/INTERSECT/EXCEPT result must # be on one of the result columns # So we transform it into something close to: # SELECT * FROM (SELECT 1 AS id UNION SELECT 1) AS a ORDER BY id+1 # Build new set expression without arguments (order_by, etc). new_expr = expr.__class__() new_expr.exprs = expr.exprs new_expr.all = expr.all # Make sure that state.aliases isn't None, since we want them to # compile our order_by statement below. no_aliases = state.aliases is None if no_aliases: state.push("aliases", {}) # Build set expression, collecting aliases. set_stmt = SQLRaw("(%s)" % compile_set_expr(compile, new_expr, state)) # Build order_by statement, using aliases. state.push("context", COLUMN_NAME) order_by_stmt = SQLRaw(compile(expr.order_by, state)) state.pop() # Discard aliases, if they were not being collected previously. if no_aliases: state.pop() # Build wrapping select statement. select = Select(SQLRaw("*"), tables=Alias(set_stmt), limit=expr.limit, offset=expr.offset, order_by=order_by_stmt) return compile_select(compile, select, state) else: return compile_set_expr(compile, expr, state)
SellableTaxConstant) from stoqlib.domain.stockdecrease import (StockDecrease, StockDecreaseItem) from stoqlib.lib.decorators import cached_property from stoqlib.lib.dateutils import localnow from stoqlib.lib.validators import is_date_in_interval # Use a subselect to count the number of items, because that takes a lot less # time (since it doesn't have a huge GROUP BY clause). # Note that there are two subselects possible. The first should be used when the # viewable is queried without the branch and the second when it is queried with # the branch. _StockSummary = Alias( Select(columns=[ ProductStockItem.storable_id, Alias(Sum(ProductStockItem.quantity), 'stock'), Alias(Sum(ProductStockItem.quantity * ProductStockItem.stock_cost), 'total_stock_cost') ], tables=[ProductStockItem], group_by=[ProductStockItem.storable_id]), '_stock_summary') _StockBranchSummary = Alias( Select(columns=_StockSummary.expr.columns + [ProductStockItem.branch_id], tables=_StockSummary.expr.tables[:], group_by=_StockSummary.expr.group_by + [ProductStockItem.branch_id]), '_stock_summary') class ProductFullStockView(Viewable): """Stores information about products. This view is used to query stock information on a certain branch.
def as_(self, name=Undef): return Alias(self, name)
def compile_select_oracle(compile, select, state): limit = select.limit offset = select.offset # make sure limit is Undef'ed select.offset = select.limit = Undef if select.default_tables is Undef: select.default_tables = ['DUAL'] if select.order_by is not Undef: # copied from expr.py's compile_set_expr aliases = {} columns = select.columns if not isinstance(columns, (tuple, list)): columns = [columns] else: columns = list(columns) for i, column in enumerate(columns): if column not in aliases: if isinstance(column, Column): aliases[column] = columns[i] = Alias(column) elif isinstance(column, Alias): aliases[column.expr] = column select.columns = columns # /copied from expr.py's compile_set_expr stmt = SQLRaw("(%s)" % compile_select(compile, select, state)) stmt_alias = Alias(stmt) # In order to force the alias to generate its auto-name. stmt_alias.get_name(state) select = Select(SQLRaw('*'), tables=stmt_alias) if (limit is not Undef) and (offset is not Undef): rownum_alias = Alias(SQLRaw('ROWNUM')) # if we have an SQLRaw here that is because we are dealing # with a subquery if isinstance(select.columns, SQLRaw): select.columns = [ SQLRaw('"' + select.tables.name + '".*'), rownum_alias ] else: select.columns.append(rownum_alias) where_expr = Le(SQLRaw('ROWNUM'), limit + offset) if select.where is Undef: select.where = where_expr else: select.where = And(select.where, where_expr) stmt = SQLRaw("(%s)" % compile_select(compile, select, state)) stmt_alias = Alias(stmt) # In order to force the alias to generate its auto-name. stmt_alias.get_name(state) select = Select(SQLRaw('*'), tables=stmt_alias, where=Gt(rownum_alias, offset)) elif limit is not Undef: expr = Le(SQLRaw('ROWNUM'), limit) if select.where is Undef: select.where = expr else: select.where = And(select.where, expr) elif offset is not Undef: rownum_alias = Alias(SQLRaw('ROWNUM')) # if we have an SQLRaw here that is because we are dealing # with a subquery if isinstance(select.columns, SQLRaw): select.columns = [ SQLRaw('"' + select.tables.name + '".*'), rownum_alias ] else: select.columns.append(rownum_alias) stmt = SQLRaw("(%s)" % compile_select(compile, select, state)) stmt_alias = Alias(stmt) # In order to force the alias to generate its auto-name. stmt_alias.get_name(state) select = Select(SQLRaw('*'), tables=stmt_alias, where=Gt(rownum_alias, offset)) return compile_select(compile, select, state)
from stoqlib.domain.payment.payment import Payment, PaymentChangeHistory from stoqlib.domain.payment.renegotiation import PaymentRenegotiation from stoqlib.domain.person import Person, Branch, Company from stoqlib.domain.purchase import PurchaseOrder from stoqlib.domain.receiving import (PurchaseReceivingMap, ReceivingInvoice, ReceivingOrder) from stoqlib.domain.sale import Sale from stoqlib.lib.dateutils import localtoday from stoqlib.lib.parameters import sysparam from stoqlib.lib.translation import stoqlib_gettext _ = stoqlib_gettext _CommentsSummary = Select(columns=[ PaymentComment.payment_id, Alias(Count(PaymentComment.id), 'comments_number') ], tables=[PaymentComment], group_by=[PaymentComment.payment_id]), CommentsSummary = Alias(_CommentsSummary, '_comments') class BasePaymentView(Viewable): PaymentGroup_Sale = ClassAlias(PaymentGroup, 'payment_group_sale') PaymentGroup_Purchase = ClassAlias(PaymentGroup, 'payment_group_purchase') payment = Payment group = PaymentGroup purchase = PurchaseOrder sale = Sale method = PaymentMethod
a, b = self.get_range_value() return a <= value <= b def can_remove(self): from stoqlib.domain.production import ProductionItemQualityResult if self.store.find(ProductionItemQualityResult, quality_test=self).count(): return False return True _StockSummary = Alias(Select( columns=[ ProductStockItem.batch_id, ProductStockItem.branch_id, Alias(Sum(ProductStockItem.quantity), 'stock')], tables=[ ProductStockItem], group_by=[ ProductStockItem.batch_id, ProductStockItem.branch_id]), '_stock_summary') class StorableBatchView(Viewable): """A view for |batches| This is used to get the most information of a |batch| without doing lots of database queries. It's bestly used with :meth:`.find_by_storable` """
def find_by_purchase(cls, store, purchase): return store.find(cls, purchase_id=purchase.id) # # Views # # Summary for Purchased items # Its faster to do the SUM() bellow in a subselect, since aggregate # functions require group by for every other column, and grouping all the # columns in PurchaseOrderView is extremelly slow, as it requires sorting all # those columns _ItemSummary = Select(columns=[ PurchaseItem.order_id, Alias(Sum(PurchaseItem.quantity), 'ordered_quantity'), Alias(Sum(PurchaseItem.quantity_received), 'received_quantity'), Alias(Sum(PurchaseItem.quantity * PurchaseItem.cost), 'subtotal') ], tables=[PurchaseItem], group_by=[PurchaseItem.order_id]) PurchaseItemSummary = Alias(_ItemSummary, '_purchase_item') class PurchaseOrderView(Viewable): """General information about purchase orders :cvar id: the id of purchase_order table :cvar status: the purchase order status :cvar open_date: the date when the order was started :cvar quote_deadline: the date when the quotation expires
# pylint: enable=E1101 """Views related to Daily Movement Reports""" from storm.expr import Join, LeftJoin, Sum, Alias, Select, Coalesce from storm.info import ClassAlias from stoqlib.database.expr import Field, NullIf, Round from stoqlib.domain.payment.views import InPaymentView, OutPaymentView from stoqlib.domain.person import Branch, Client, Company, Person, SalesPerson from stoqlib.domain.sale import Sale, SaleItem, InvoiceItemIpi from stoqlib.lib.defaults import DECIMAL_PRECISION _SaleItemSummary = Select(columns=[ SaleItem.sale_id, Alias( Sum( Round(SaleItem.quantity * SaleItem.price + InvoiceItemIpi.v_ipi, DECIMAL_PRECISION)), 'subtotal') ], tables=[ SaleItem, LeftJoin( InvoiceItemIpi, SaleItem.ipi_info_id == InvoiceItemIpi.id) ], group_by=[SaleItem.sale_id]) SaleItemSummary = Alias(_SaleItemSummary, '_sale_items') class DailyInPaymentView(InPaymentView):
## # pylint: enable=E1101 """Views related to Daily Movement Reports""" from storm.expr import Join, LeftJoin, Sum, Alias, Select, Coalesce from storm.info import ClassAlias from stoqlib.database.expr import Field, NullIf from stoqlib.domain.payment.views import InPaymentView, OutPaymentView from stoqlib.domain.person import Branch, Client, Company, Person, SalesPerson from stoqlib.domain.sale import Sale, SaleItem, InvoiceItemIpi _SaleItemSummary = Select(columns=[SaleItem.sale_id, Alias(Sum(SaleItem.quantity * SaleItem.price + InvoiceItemIpi.v_ipi), 'subtotal')], tables=[SaleItem, LeftJoin(InvoiceItemIpi, SaleItem.ipi_info_id == InvoiceItemIpi.id)], group_by=[SaleItem.sale_id]) SaleItemSummary = Alias(_SaleItemSummary, '_sale_items') class DailyInPaymentView(InPaymentView): SalesPersonPerson = ClassAlias(Person, 'salesperson_person') ClientPerson = ClassAlias(Person, 'client_person') PersonBranch = ClassAlias(Person, 'person_branch') salesperson_name = SalesPersonPerson.name
return store.find(cls, purchase_id=purchase.id) # # Views # # Summary for Purchased items # Its faster to do the SUM() bellow in a subselect, since aggregate # functions require group by for every other column, and grouping all the # columns in PurchaseOrderView is extremelly slow, as it requires sorting all # those columns from stoqlib.domain.receiving import ReceivingOrder, ReceivingInvoice, PurchaseReceivingMap _ItemSummary = Select( columns=[PurchaseItem.order_id, Alias(Sum(PurchaseItem.quantity), 'ordered_quantity'), Alias(Sum(PurchaseItem.quantity_received), 'received_quantity'), Alias(Sum(PurchaseItem.ipi_value), 'ipi_value'), Alias(Sum((PurchaseItem.quantity * PurchaseItem.cost) + PurchaseItem.ipi_value), 'subtotal')], tables=[PurchaseItem], group_by=[PurchaseItem.order_id]) PurchaseItemSummary = Alias(_ItemSummary, '_purchase_item') _ReceivingOrder = Select( columns=[Alias(Min(ReceivingOrder.receival_date), 'receival_date'), Alias(ArrayToString(ArrayAgg(ReceivingInvoice.invoice_number), ', '), 'invoice_numbers'), Alias(PurchaseReceivingMap.purchase_id, 'purchase_id')], tables=[ PurchaseReceivingMap,
@classmethod def find_by_purchase(cls, store, purchase): return store.find(cls, purchase_id=purchase.id) # # Views # # Summary for Purchased items # Its faster to do the SUM() bellow in a subselect, since aggregate # functions require group by for every other column, and grouping all the # columns in PurchaseOrderView is extremelly slow, as it requires sorting all # those columns _ItemSummary = Select(columns=[PurchaseItem.order_id, Alias(Sum(PurchaseItem.quantity), 'ordered_quantity'), Alias(Sum(PurchaseItem.quantity_received), 'received_quantity'), Alias(Sum(PurchaseItem.quantity * PurchaseItem.cost), 'subtotal')], tables=[PurchaseItem], group_by=[PurchaseItem.order_id]) PurchaseItemSummary = Alias(_ItemSummary, '_purchase_item') from stoqlib.domain.receiving import ReceivingOrder, PurchaseReceivingMap _ReceivingOrder = Select(columns=[Alias(Min(ReceivingOrder.receival_date), 'receival_date'), Alias(PurchaseReceivingMap.purchase_id, 'purchase_id')], tables=[PurchaseReceivingMap, LeftJoin(ReceivingOrder, ReceivingOrder.id == PurchaseReceivingMap.receiving_id)], group_by=[PurchaseReceivingMap.purchase_id])
from stoqlib.domain.payment.operation import get_payment_operation from stoqlib.domain.payment.payment import Payment, PaymentChangeHistory from stoqlib.domain.payment.renegotiation import PaymentRenegotiation from stoqlib.domain.person import Person from stoqlib.domain.purchase import PurchaseOrder from stoqlib.domain.sale import Sale from stoqlib.lib.dateutils import localtoday from stoqlib.lib.parameters import sysparam from stoqlib.lib.translation import stoqlib_gettext _ = stoqlib_gettext _CommentsSummary = Select(columns=[PaymentComment.payment_id, Alias(Count(PaymentComment.id), 'comments_number')], tables=[PaymentComment], group_by=[PaymentComment.payment_id]), CommentsSummary = Alias(_CommentsSummary, '_comments') class BasePaymentView(Viewable): PaymentGroup_Sale = ClassAlias(PaymentGroup, 'payment_group_sale') PaymentGroup_Purchase = ClassAlias(PaymentGroup, 'payment_group_purchase') payment = Payment group = PaymentGroup purchase = PurchaseOrder sale = Sale method = PaymentMethod
def calculate_bugsummary_rows(target): """Calculate BugSummary row fragments for the given `IBugTarget`. The data is re-aggregated from BugTaskFlat, BugTag and BugSubscription. """ # Use a CTE to prepare a subset of BugTaskFlat, filtered to the # relevant target and to exclude duplicates, and with has_patch # calculated. relevant_tasks = With( 'relevant_task', Select((BugTaskFlat.bug_id, BugTaskFlat.information_type, BugTaskFlat.status, BugTaskFlat.milestone_id, BugTaskFlat.importance, Alias(BugTaskFlat.latest_patch_uploaded != None, 'has_patch'), BugTaskFlat.access_grants, BugTaskFlat.access_policies), tables=[BugTaskFlat], where=And(BugTaskFlat.duplicateof_id == None, *get_bugtaskflat_constraint(target)))) # Storm class to reference the CTE. class RelevantTask(BugTaskFlat): __storm_table__ = 'relevant_task' has_patch = Bool() # Storm class to reference the union. class BugSummaryPrototype(RawBugSummary): __storm_table__ = 'bugsummary_prototype' # Prepare a union for all combination of privacy and taggedness. # It'll return a full set of # (status, milestone, importance, has_patch, tag, viewed_by, access_policy) # rows. common_cols = (RelevantTask.status, RelevantTask.milestone_id, RelevantTask.importance, RelevantTask.has_patch) null_tag = Alias(Cast(None, 'text'), 'tag') null_viewed_by = Alias(Cast(None, 'integer'), 'viewed_by') null_policy = Alias(Cast(None, 'integer'), 'access_policy') tag_join = Join(BugTag, BugTag.bugID == RelevantTask.bug_id) public_constraint = RelevantTask.information_type.is_in( PUBLIC_INFORMATION_TYPES) private_constraint = RelevantTask.information_type.is_in( PRIVATE_INFORMATION_TYPES) unions = Union( # Public, tagless Select(common_cols + (null_tag, null_viewed_by, null_policy), tables=[RelevantTask], where=public_constraint), # Public, tagged Select(common_cols + (BugTag.tag, null_viewed_by, null_policy), tables=[RelevantTask, tag_join], where=public_constraint), # Private, access grant, tagless Select(common_cols + (null_tag, Unnest(RelevantTask.access_grants), null_policy), tables=[RelevantTask], where=private_constraint), # Private, access grant, tagged Select(common_cols + (BugTag.tag, Unnest(RelevantTask.access_grants), null_policy), tables=[RelevantTask, tag_join], where=private_constraint), # Private, access policy, tagless Select( common_cols + (null_tag, null_viewed_by, Unnest(RelevantTask.access_policies)), tables=[RelevantTask], where=private_constraint), # Private, access policy, tagged Select( common_cols + (BugTag.tag, null_viewed_by, Unnest(RelevantTask.access_policies)), tables=[RelevantTask, tag_join], where=private_constraint), all=True) # Select the relevant bits of the prototype rows and aggregate them. proto_key_cols = (BugSummaryPrototype.status, BugSummaryPrototype.milestone_id, BugSummaryPrototype.importance, BugSummaryPrototype.has_patch, BugSummaryPrototype.tag, BugSummaryPrototype.viewed_by_id, BugSummaryPrototype.access_policy_id) origin = IStore(BugTaskFlat).with_(relevant_tasks).using( Alias(unions, 'bugsummary_prototype')) results = origin.find(proto_key_cols + (Count(), )) results = results.group_by(*proto_key_cols).order_by(*proto_key_cols) return results
@classmethod def find_by_sale(cls, store, sale): """Returns all |workorders| associated with the given |sale|. :param sale: The |sale| used to filter the existing |workorders| :resturn: An iterable with all work orders: :rtype: resultset """ return store.find(cls, sale=sale) _WorkOrderItemsSummary = Alias( Select(columns=[ WorkOrderItem.order_id, Alias(Sum(WorkOrderItem.quantity), 'quantity'), Alias(Sum(WorkOrderItem.quantity * WorkOrderItem.price), 'total') ], tables=[WorkOrderItem], group_by=[WorkOrderItem.order_id]), '_work_order_items') class WorkOrderView(Viewable): """A view for |workorders| This is used to get the most information of a |workorder| without doing lots of database queries. """ # TODO: Maybe we should have a cache for branches, to avoid all this # joins just to get the company name. _BranchOriginalBranch = ClassAlias(Branch, "branch_original_branch")