def __register__(cls, module_name): pool = Pool() Product = pool.get('product.product') TableHandler = backend.get('TableHandler') sql_table = cls.__table__() product = Product.__table__() cursor = Transaction().connection.cursor() exist = TableHandler.table_exist(cls._table) super(ProductCostPrice, cls).__register__(module_name) table = cls.__table_handler__(module_name) if not exist: # Create template column for property migration table.add_column('template', 'INTEGER') cls._migrate_property([], [], []) # Migration from 4.4: replace template by product if table.column_exist('template'): columns = [ 'create_uid', 'create_date', 'write_uid', 'write_date', 'cost_price' ] cursor.execute(*sql_table.insert( columns=[Column(sql_table, c) for c in columns] + [sql_table.product], values=sql_table.join( product, condition=sql_table.template == product.template). select(*[Column(sql_table, c) for c in columns] + [product.id], where=(sql_table.template != Null) & (sql_table.product == Null)))) cursor.execute(*sql_table.delete(where=(sql_table.template != Null) & (sql_table.product == Null))) table.drop_column('template')
def convert_domain_tree(self, domain, tables): Target = self.get_target() target = Target.__table__() table, _ = tables[None] name, operator, ids = domain red_sql = reduce_ids(target.id, (i for i in ids if i is not None)) if operator.endswith('child_of'): tree = With('id', recursive=True) tree.query = target.select(target.id, where=red_sql) tree.query |= (target .join(tree, condition=Column(target, name) == tree.id) .select(target.id)) else: tree = With('id', name, recursive=True) tree.query = target.select( target.id, Column(target, name), where=red_sql) tree.query |= (target .join(tree, condition=target.id == Column(tree, name)) .select(target.id, Column(target, name))) expression = table.id.in_(tree.select(tree.id, with_=[tree])) if operator.startswith('not'): return ~expression return expression
def table_query(cls): Opportunity = Pool().get('sale.opportunity') opportunity_history = Opportunity.__table_history__() columns = [ Min(Column(opportunity_history, '__id')).as_('id'), opportunity_history.id.as_('opportunity'), Min(Coalesce(opportunity_history.write_date, opportunity_history.create_date)).as_('date'), Coalesce(opportunity_history.write_uid, opportunity_history.create_uid).as_('user'), ] group_by = [ opportunity_history.id, Coalesce(opportunity_history.write_uid, opportunity_history.create_uid), ] for name, field in cls._fields.iteritems(): if name in ('id', 'opportunity', 'date', 'user'): continue if hasattr(field, 'set'): continue column = Column(opportunity_history, name) columns.append(column.as_(name)) group_by.append(column) return opportunity_history.select(*columns, group_by=group_by)
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() pool = Pool() Line = pool.get('account.invoice.payment_term.line') sql_table = cls.__table__() line = Line.__table__() # Migration from 4.0: rename long table old_model_name = 'account.invoice.payment_term.line.relativedelta' old_table = config.get('table', old_model_name, default=old_model_name.replace('.', '_')) if TableHandler.table_exist(old_table): TableHandler.table_rename(old_table, cls._table) super(PaymentTermLineRelativeDelta, cls).__register__(module_name) line_table = Line.__table_handler__(module_name) # Migration from 3.4 fields = ['day', 'month', 'weekday', 'months', 'weeks', 'days'] if any(line_table.column_exist(f) for f in fields): columns = ([line.id.as_('line')] + [Column(line, f) for f in fields]) cursor.execute( *sql_table.insert(columns=[sql_table.line] + [Column(sql_table, f) for f in fields], values=line.select(*columns))) for field in fields: line_table.drop_column(field)
def _get_target_tables(self, tables): Target = self.get_target() table, _ = tables[None] target_tables = tables.get(self.name) context = Transaction().context if target_tables is None: if Target._history and context.get('_datetime'): target = Target.__table_history__() target_history = Target.__table_history__() history_condition = Column(target, '__id').in_( target_history.select( Max(Column(target_history, '__id')), where=Coalesce( target_history.write_date, target_history.create_date) <= context['_datetime'], group_by=target_history.id)) else: target = Target.__table__() history_condition = None condition = target.id == self.sql_column(table) if history_condition: condition &= history_condition target_tables = { None: (target, condition), } tables[self.name] = target_tables return target_tables
def table_query(cls): Opportunity = Pool().get('sale.opportunity') opportunity_history = Opportunity.__table_history__() columns = [ Min(Column(opportunity_history, '__id')).as_('id'), opportunity_history.id.as_('opportunity'), Min( Coalesce(opportunity_history.write_date, opportunity_history.create_date)).as_('date'), Coalesce(opportunity_history.write_uid, opportunity_history.create_uid).as_('user'), ] group_by = [ opportunity_history.id, Coalesce(opportunity_history.write_uid, opportunity_history.create_uid), ] for name, field in cls._fields.iteritems(): if name in ('id', 'opportunity', 'date', 'user'): continue if hasattr(field, 'set'): continue column = Column(opportunity_history, name) columns.append(column.as_(name)) group_by.append(column) return opportunity_history.select(*columns, group_by=group_by)
def load_documents(court=None): with connection().cursor() as cursor: documents = Table('document') cases = Table('vw_case_for_advocates') pre_select = documents.join( cases, type_='INNER', condition=(cases.id_case==documents.case_id) ) specific_court_columns = [] for specific_court in Courts.ALL: court_documents = Table(specific_court.document_db_table) for col in specific_court.document_db_table_columns: specific_court_columns.append(Column(court_documents, col).as_('{}__{}'.format(specific_court.name, col))) pre_select = pre_select.join( court_documents, type_='LEFT', condition=(documents.id_document==court_documents.document_id) ) select = pre_select.select(Column(documents, '*'), *specific_court_columns) if court is not None: select.where = documents.court_id == court.id query, args = tuple(select) cursor.execute(query, args) cols = [col[0] for col in cursor.description] for fetched in cursor: row = dict(zip(cols, fetched)) current_court = court if court is not None else Courts.BY_ID[row['court_id']] row['court_id'] = current_court.name yield current_court.create_document(row)
def __register__(cls, module_name): pool = Pool() ProductCostPrice = pool.get('product.cost_price') TableHandler = backend.get('TableHandler') sql_table = cls.__table__() cost_price = ProductCostPrice.__table__() cursor = Transaction().connection.cursor() exist = TableHandler.table_exist(cls._table) cost_price_exist = TableHandler.table_exist(ProductCostPrice._table) super(ProductCostPriceMethod, cls).__register__(module_name) # Migrate from 4.4: move cost_price_method from ProductCostPrice if not exist and not cost_price_exist: cls._migrate_property([], [], []) elif not exist and cost_price_exist: cost_price_table = TableHandler(ProductCostPrice, module_name) if cost_price_table.column_exist('template'): columns = [ 'create_uid', 'create_date', 'write_uid', 'write_date', 'template', 'cost_price_method' ] cursor.execute(*sql_table.insert( columns=[Column(sql_table, c) for c in columns], values=cost_price.select( *[Column(cost_price, c) for c in columns])))
def union_column(cls, name, field, table, Model): value = Literal(None) if name == 'button': if 'button' in Model.__name__: value = Column(table, 'button') return value if name == 'field': if 'field' in Model.__name__: value = Column(table, 'field') return value return super(ViewConfiguratorLine, cls).union_column(name, field, table, Model)
def table_query(cls): pool = Pool() Lot = pool.get('stock.lot') lot = Lot.__table__() columns = [] for fname, field in cls._fields.items(): if not hasattr(field, 'set'): if (isinstance(field, fields.Many2One) and field.get_target() == Lot): column = Column(lot, 'id') else: column = Column(lot, fname) columns.append(column.as_(fname)) return lot.select(*columns)
def table_query(cls): pool = Pool() Product = pool.get('product.product') product = Product.__table__() columns = [] for fname, field in cls._fields.items(): if not hasattr(field, 'set'): if (isinstance(field, fields.Many2One) and field.get_target() == Product): column = Column(product, 'id') else: column = Column(product, fname) columns.append(column.as_(fname)) return product.select(*columns)
def _get_column(cls, tables, reverse_tables, name): table, _ = tables[None] reverse_table, _ = reverse_tables[None] if name == 'id': return As(table.id * 2, name), As(reverse_table.id * 2 + 1, name) elif name == 'from_': return table.from_, reverse_table.to.as_(name) elif name == 'to': return table.to, reverse_table.from_.as_(name) elif name == 'type': reverse_type, _ = reverse_tables[name][None] return table.type, reverse_type.reverse.as_(name) else: return Column(table, name), Column(reverse_table, name)
def table_query(cls): pool = Pool() Sheet = pool.get('lims.analysis_sheet') Compilation = pool.get('lims.interface.compilation') sheet = Sheet.__table__() compilation = Compilation.__table__() context = Transaction().context where = Literal(True) if context.get('laboratory'): where &= sheet.laboratory == context.get('laboratory') if context.get('from_date'): where &= compilation.date_time >= datetime.combine( context.get('from_date'), time(0, 0)) if context.get('to_date'): where &= compilation.date_time <= datetime.combine( context.get('to_date'), time(23, 59)) columns = [] for fname, field in cls._fields.items(): if hasattr(field, 'set'): continue if fname == 'date': column = Cast(compilation.date_time, 'date').as_(fname) else: column = Column(sheet, fname).as_(fname) columns.append(column) return sheet.join(compilation, condition=sheet.compilation == compilation.id).select(*columns, where=where)
def test_column(self): column = Column(Table('t'), 'c') self.assertEqual(str(column), '"c"') self.assertEqual(column.name, 'c') with AliasManager(): self.assertEqual(str(column), '"a"."c"')
def load_cases(court=None): with connection().cursor() as cursor: cases = Table('vw_case_for_advocates') annulment = Table('vw_computed_case_annulment') latest_advocates = Table('vw_latest_tagging_advocate') latest_results = Table('vw_latest_tagging_case_result') select = cases.join( latest_advocates, type_='LEFT', condition=(latest_advocates.case_id == cases.id_case) & (latest_advocates.status == 'processed') ).join( latest_results, type_='LEFT', condition=(latest_results.case_id == cases.id_case) & (latest_results.status == 'processed') ).join( annulment, type_='LEFT', condition=(annulment.annuled_case == cases.id_case) ).select( Column(cases, '*'), latest_advocates.advocate_id, latest_results.case_result, annulment.annuled_case, annulment.annuling_case ) if court is not None: select.where = cases.court_id == court.id query, args = tuple(select) cursor.execute(query, args) cols = [col[0] for col in cursor.description] for fetched in cursor: row = dict(zip(cols, fetched)) current_court = court if court is not None else Courts.BY_ID[row['court_id']] row['court_id'] = current_court.name yield Case(row)
def order_function(cls, tables): table, _ = tables[None] operator = Equal if not null_first: operator = NotEqual field = Column(table, field_name) return [Case((operator(field, Null), 0), else_=1), field]
def document_db_table_columns(self): with connection().cursor() as cursor: documents = Table(self.document_db_table) select = documents.select(Column(documents, '*'), limit=1) query, args = tuple(select) cursor.execute(query, args) return [col[0] for col in cursor.description]
def table_query(cls): pool = Pool() Property = pool.get('ir.property') Field = pool.get('ir.model.field') property_history = Property.__table_history__() field = Field.__table__() return property_history.join( field, condition=field.id == property_history.field).select( Max(Column(property_history, '__id')).as_('id'), Max(property_history.create_uid).as_('create_uid'), Max(property_history.create_date).as_('create_date'), Max(property_history.write_uid).as_('write_uid'), Max(property_history.write_date).as_('write_date'), Coalesce(property_history.write_date, property_history.create_date).as_('date'), Trim(Substring(property_history.res, ',.*'), 'LEADING', ',').cast(cls.template.sql_type().base).as_('template'), Trim(property_history.value, 'LEADING', ',').cast( cls.cost_price.sql_type().base).as_('cost_price'), where=(field.name == 'cost_price') & property_history.res.like('product.template,%'), group_by=(property_history.id, Coalesce(property_history.write_date, property_history.create_date), property_history.res, property_history.value))
def union_column(cls, name, field, table, Model): column = super(Region, cls).union_column(name, field, table, Model) if (name == 'parent' and Model.__name__ == 'sale.reporting.country.subdivision'): column = cls.union_shard(Column(table, 'country'), 'sale.reporting.country') return column
def table_query(cls): pool = Pool() Professional = pool.get('lims.laboratory.professional') Party = pool.get('party.party') professional = Professional.__table__() party = Party.__table__() columns = [] for fname, field in cls._fields.items(): if hasattr(field, 'set'): continue if fname == 'name': column = Column(party, 'name').as_(fname) else: column = Column(professional, fname).as_(fname) columns.append(column) return professional.join( party, condition=professional.party == party.id).select(*columns)
def _test_column_with_quote(self, quote): column = Column(Table('t'), 'c') self.assertEqual(str(column), quote + 'c' + quote) self.assertEqual(column.name, 'c') with AliasManager(): self.assertEqual(str(column), '%sa%s.%sc%s' % (quote, quote, quote, quote))
class TestOrder(unittest.TestCase): column = Column(Table('t'), 'c') def test_asc(self): self.assertEqual(str(Asc(self.column)), '"c" ASC') def test_desc(self): self.assertEqual(str(Desc(self.column)), '"c" DESC')
def test_order_query(self): table = Table('t') column = Column(table, 'c') query = table.select(column) self.assertEqual(str(Asc(query)), '(SELECT "a"."c" FROM "t" AS "a") ASC') self.assertEqual(str(Desc(query)), '(SELECT "a"."c" FROM "t" AS "a") DESC')
def _get_column(cls, tables, reverse_tables, name): table, _ = tables[None] reverse_table, _ = reverse_tables[None] if name == 'id': return As(table.id * 2, name), As(reverse_table.id * 2 + 1, name) elif name == 'from_': # FLA: Fix #11470: Use column in order to be able to use # _get_history_table return Column(table, 'from_'), reverse_table.to.as_(name) elif name == 'to': # FLA: Fix #11470: Use column in order to be able to use # _get_history_table return table.to, Column(reverse_table, 'from_').as_(name) elif name == 'type': reverse_type, _ = reverse_tables[name][None] return table.type, reverse_type.reverse.as_(name) else: return Column(table, name), Column(reverse_table, name)
def transition_replace(self): pool = Pool() Address = pool.get('party.address') ContactMechanism = pool.get('party.contact_mechanism') Identifier = pool.get('party.identifier') transaction = Transaction() self.check_similarity() source = self.ask.source destination = self.ask.destination Address.write(list(source.addresses), { 'active': False, }) ContactMechanism.write(list(source.contact_mechanisms), { 'active': False, }) Identifier.write(list(source.identifiers), { 'active': False, }) source.replaced_by = destination source.active = False source.save() cursor = transaction.connection.cursor() for model_name, field_name in self.fields_to_replace(): Model = pool.get(model_name) field = getattr(Model, field_name) table = Model.__table__() column = Column(table, field_name) if field._type == 'reference': source_value = str(source) destination_value = str(destination) else: source_value = source.id destination_value = destination.id where = column == source_value if transaction.database.has_returning(): returning = [table.id] else: cursor.execute(*table.select(table.id, where=where)) ids = [x[0] for x in cursor] returning = None cursor.execute(*table.update( [column], [destination_value], where=where, returning=returning)) if transaction.database.has_returning(): ids = [x[0] for x in cursor] Model._insert_history(ids) return 'end'
class TestCast(unittest.TestCase): column = Column(Table('t'), 'c') def test_cast(self): self.assertEqual(str(Cast(self.column, 'int')), 'CAST("c" AS int)') def test_cast_no_expression(self): cast = Cast(1.1, 'int') self.assertEqual(str(cast), 'CAST(%s AS int)') self.assertEqual(cast.params, (1.1, ))
def set(self, Model, name, ids, value, *args): transaction = Transaction() table = Model.__table__() cursor = transaction.connection.cursor() prefix = self.store_prefix if prefix is None: prefix = transaction.database.name args = iter((ids, value) + args) for ids, value in zip(args, args): if self.file_id: columns = [Column(table, self.file_id), Column(table, name)] values = [ filestore.set(value, prefix) if value else None, None] else: columns = [Column(table, name)] values = [self.sql_format(value)] cursor.execute(*table.update(columns, values, where=reduce_ids(table.id, ids)))
class TestOrder(unittest.TestCase): table = Table('t') column = Column(table, 'c') def test_as(self): self.assertEqual(str(As(self.column, 'foo')), '"foo"') def test_as_select(self): query = self.table.select(self.column.as_('foo')) self.assertEqual(str(query), 'SELECT "a"."c" AS "foo" FROM "t" AS "a"') self.assertEqual(query.params, ())
def order(cls, tables): pool = Pool() Template = pool.get('product.template') product, _ = tables[None] if 'template' not in tables: template = Template.__table__() tables['template'] = { None: (template, product.template == template.id), } else: template = tables['template'] return [Column(template, name)]
def union_column(cls, name, field, table, Model): column = Literal(None) union_field = Model._fields.get(name) if union_field: column = Column(table, union_field.name) if (isinstance(field, fields.Many2One) and field.model_name == cls.__name__): target_model = union_field.model_name if target_model in cls.union_models(): column = cls.union_shard(column, target_model) else: column = Literal(None) return column
class TestOrder(unittest.TestCase): column = Column(Table('t'), 'c') def test_asc(self): self.assertEqual(str(Asc(self.column)), '"c" ASC') def test_desc(self): self.assertEqual(str(Desc(self.column)), '"c" DESC') def test_nulls_first(self): self.assertEqual(str(NullsFirst(self.column)), '"c" NULLS FIRST') self.assertEqual(str(NullsFirst(Asc(self.column))), '"c" ASC NULLS FIRST') def test_nulls_last(self): self.assertEqual(str(NullsLast(self.column)), '"c" NULLS LAST') self.assertEqual(str(NullsLast(Asc(self.column))), '"c" ASC NULLS LAST') def test_no_null_ordering(self): try: Flavor.set(Flavor(null_ordering=False)) exp = NullsFirst(self.column) self.assertEqual( str(exp), 'CASE WHEN ("c" IS NULL) THEN %s ELSE %s END ASC, "c"') self.assertEqual(exp.params, (0, 1)) exp = NullsFirst(Desc(self.column)) self.assertEqual( str(exp), 'CASE WHEN ("c" IS NULL) THEN %s ELSE %s END ASC, "c" DESC') self.assertEqual(exp.params, (0, 1)) exp = NullsLast(Literal(2)) self.assertEqual( str(exp), 'CASE WHEN (%s IS NULL) THEN %s ELSE %s END ASC, %s') self.assertEqual(exp.params, (2, 1, 0, 2)) finally: Flavor.set(Flavor()) def test_order_query(self): table = Table('t') column = Column(table, 'c') query = table.select(column) self.assertEqual(str(Asc(query)), '(SELECT "a"."c" FROM "t" AS "a") ASC') self.assertEqual(str(Desc(query)), '(SELECT "a"."c" FROM "t" AS "a") DESC')
def compute_quantities_query(cls, location_ids, with_childs=False, grouping=('product',), grouping_filter=None): """ Prepare a query object to compute for each location and product the stock quantity in the default uom of the product. The context with keys: stock_date_end: if set the date of the stock computation. stock_date_start: if set return the delta of the stock between the two dates, (ignored if stock_date_end is missing). stock_assign: if set compute also the assigned moves as done. forecast: if set compute the forecast quantity. stock_destinations: A list of location ids. If set, restrict the computation to moves from and to those locations. stock_skip_warehouse: if set, quantities on a warehouse are no more quantities of all child locations but quantities of the storage zone. If with_childs, it computes also for child locations. grouping is a tuple of Move field names and defines how stock moves are grouped. grouping_filter is a tuple of values, for the Move's field at the same position in grouping tuple, used to filter which moves are used to compute quantities. It must be None or have the same number of elements than grouping. If no grouping_filter is provided it returns quantities for all products. The query return the location as first column, after the fields in grouping, and the last column is the quantity. """ pool = Pool() Rule = pool.get('ir.rule') Location = pool.get('stock.location') Date = pool.get('ir.date') Period = pool.get('stock.period') Move = pool.get('stock.move') Product = pool.get('product.product') Template = pool.get('product.template') move = Move.__table__() product = Product.__table__() template = Template.__table__() today = Date.today() if not location_ids: return None context = Transaction().context.copy() for field in grouping: if field not in Move._fields: raise ValueError('"%s" has no field "%s"' % (Move, field)) assert grouping_filter is None or len(grouping_filter) == len(grouping) move_rule_query = Rule.domain_get('stock.move') PeriodCache = Period.get_cache(grouping) period = None if PeriodCache: period_cache = PeriodCache.__table__() if not context.get('stock_date_end'): context['stock_date_end'] = datetime.date.max # date end in the past or today: filter on state done if (context['stock_date_end'] < today or (context['stock_date_end'] == today and not context.get('forecast'))): state_date_clause = ( move.state.in_(['done', context.get('stock_assign') and 'assigned' or 'done']) & ( ( (move.effective_date == None) & (move.planned_date <= context['stock_date_end']) ) | (move.effective_date <= context['stock_date_end']) ) ) # future date end: filter move on state done and date # before today, or on all state and date between today and # date_end. else: state_date_clause = ( (move.state.in_(['done', context.get('stock_assign') and 'assigned' or 'done']) & ( ( (move.effective_date == None) & (move.planned_date <= today) ) | (move.effective_date <= today) ) ) | (move.state.in_(['done', 'assigned', 'draft']) & ( ( (move.effective_date == None) & (Coalesce(move.planned_date, datetime.date.max) <= context['stock_date_end']) & (Coalesce(move.planned_date, datetime.date.max) >= today) ) | ( (move.effective_date <= context['stock_date_end']) & (move.effective_date >= today) ) ) ) ) if context.get('stock_date_start'): if context['stock_date_start'] > today: state_date_clause &= ( move.state.in_(['done', 'assigned', 'draft']) & ( ( (move.effective_date == None) & ( (move.planned_date >= context['stock_date_start']) | (move.planned_date == None) ) ) | (move.effective_date >= context['stock_date_start']) ) ) else: state_date_clause &= ( ( move.state.in_(['done', 'assigned', 'draft']) & ( ( (move.effective_date == None) & ( (move.planned_date >= today) | (move.planned_date == None) ) ) | (move.effective_date >= today) ) ) | ( move.state.in_(['done', context.get('stock_assign') and 'assigned' or 'done']) & ( ( (move.effective_date == None) & ( ( (move.planned_date >= context['stock_date_start']) & (move.planned_date < today) ) | (move.planned_date == None) ) ) | ( (move.effective_date >= context['stock_date_start']) & (move.effective_date < today) ) ) ) ) elif PeriodCache: periods = Period.search([ ('date', '<', context['stock_date_end']), ('state', '=', 'closed'), ], order=[('date', 'DESC')], limit=1) if periods: period, = periods state_date_clause &= ( Coalesce(move.effective_date, move.planned_date, datetime.date.max) > period.date) if with_childs: location_query = Location.search([ ('parent', 'child_of', location_ids), ], query=True, order=[]) else: location_query = location_ids[:] from_ = move if PeriodCache: from_period = period_cache if grouping_filter and any(grouping_filter): where = where_period = Literal(True) for fieldname, grouping_ids in zip(grouping, grouping_filter): if not grouping_ids: continue column = Column(move, fieldname) if PeriodCache: cache_column = Column(period_cache, fieldname) if isinstance(grouping_ids[0], (int, long, float, Decimal)): where &= reduce_ids(column, grouping_ids) if PeriodCache: where_period &= reduce_ids(cache_column, grouping_ids) else: where &= column.in_(grouping_ids) if PeriodCache: where_period &= cache_column.in_(grouping_ids) else: where = where_period = template.active == True from_ = from_.join(product, condition=move.product == product.id) from_ = from_.join(template, condition=product.template == template.id) if PeriodCache: from_period = from_period.join(product, condition=period_cache.product == product.id) from_period = from_period.join(template, condition=product.template == template.id) if context.get('stock_destinations'): destinations = context['stock_destinations'] dest_clause_from = move.from_location.in_(destinations) dest_clause_to = move.to_location.in_(destinations) if PeriodCache: dest_clause_period = period_cache.location.in_(destinations) else: dest_clause_from = dest_clause_to = dest_clause_period = \ Literal(True) # The main select clause is a union between three similar subqueries. # One that sums incoming moves towards locations, one that sums # outgoing moves and one for the period cache. UNION ALL is used # because we already know that there will be no duplicates. move_keys = [Column(move, key).as_(key) for key in grouping] query = from_.select(move.to_location.as_('location'), Sum(move.internal_quantity).as_('quantity'), *move_keys, where=state_date_clause & where & move.to_location.in_(location_query) & (move.id.in_(move_rule_query) if move_rule_query else Literal(True)) & dest_clause_from, group_by=[move.to_location] + move_keys) query = Union(query, from_.select(move.from_location.as_('location'), (-Sum(move.internal_quantity)).as_('quantity'), *move_keys, where=state_date_clause & where & move.from_location.in_(location_query) & (move.id.in_(move_rule_query) if move_rule_query else Literal(True)) & dest_clause_to, group_by=[move.from_location] + move_keys), all_=True) if PeriodCache: period_keys = [Column(period_cache, key).as_(key) for key in grouping] query = Union(query, from_period.select( period_cache.location.as_('location'), period_cache.internal_quantity.as_('quantity'), *period_keys, where=(period_cache.period == (period.id if period else None)) & where_period & period_cache.location.in_(location_query) & dest_clause_period), all_=True) query_keys = [Column(query, key).as_(key) for key in grouping] columns = ([query.location.as_('location')] + query_keys + [Sum(query.quantity).as_('quantity')]) query = query.select(*columns, group_by=[query.location] + query_keys) return query