def init(cursor): from trytond.modules import get_module_info sql_file = os.path.join(os.path.dirname(__file__), 'init.sql') with open(sql_file) as fp: for line in fp.read().split(';'): if (len(line) > 0) and (not line.isspace()): cursor.execute(line) ir_module = Table('ir_module') ir_module_dependency = Table('ir_module_dependency') for module in ('ir', 'res', 'webdav'): state = 'uninstalled' if module in ('ir', 'res'): state = 'to install' info = get_module_info(module) insert = ir_module.insert([ ir_module.create_uid, ir_module.create_date, ir_module.name, ir_module.state ], [[0, CurrentTimestamp(), module, state]]) cursor.execute(*insert) cursor.execute('SELECT last_insert_rowid()') module_id, = cursor.fetchone() for dependency in info.get('depends', []): insert = ir_module_dependency.insert([ ir_module_dependency.create_uid, ir_module_dependency.create_date, ir_module_dependency.module, ir_module_dependency.name ], [[0, CurrentTimestamp(), module_id, dependency]]) cursor.execute(*insert)
def init(self): from trytond.modules import get_module_info Flavor.set(self.flavor) with self.get_connection() as conn: cursor = conn.cursor() sql_file = os.path.join(os.path.dirname(__file__), 'init.sql') with open(sql_file) as fp: for line in fp.read().split(';'): if (len(line) > 0) and (not line.isspace()): cursor.execute(line) ir_module = Table('ir_module') ir_module_dependency = Table('ir_module_dependency') for module in ('ir', 'res'): state = 'not activated' if module in ('ir', 'res'): state = 'to activate' info = get_module_info(module) insert = ir_module.insert([ ir_module.create_uid, ir_module.create_date, ir_module.name, ir_module.state ], [[0, CurrentTimestamp(), module, state]]) cursor.execute(*insert) cursor.execute('SELECT last_insert_rowid()') module_id, = cursor.fetchone() for dependency in info.get('depends', []): insert = ir_module_dependency.insert([ ir_module_dependency.create_uid, ir_module_dependency.create_date, ir_module_dependency.module, ir_module_dependency.name, ], [[0, CurrentTimestamp(), module_id, dependency]]) cursor.execute(*insert) conn.commit()
def pull(cls, database, connection, name=None): cursor = connection.cursor() queue = cls.__table__() candidates = With('id', 'scheduled_at', 'expected_at', query=queue.select( queue.id, queue.scheduled_at, queue.expected_at, where=((queue.name == name) if name else Literal(True)) & (queue.dequeued_at == Null), order_by=[ queue.scheduled_at.nulls_first, queue.expected_at.nulls_first])) selected = With('id', query=candidates.select( candidates.id, where=((candidates.scheduled_at <= CurrentTimestamp()) | (candidates.scheduled_at == Null)) & database.lock_id(candidates.id), order_by=[ candidates.scheduled_at.nulls_first, candidates.expected_at.nulls_first], limit=1)) next_timeout = With('seconds', query=candidates.select( Min(Extract('second', candidates.scheduled_at - CurrentTimestamp()) ), where=candidates.scheduled_at >= CurrentTimestamp())) task_id, seconds = None, None if database.has_returning(): query = queue.update([queue.dequeued_at], [CurrentTimestamp()], where=queue.id == selected.select(selected.id), with_=[candidates, selected, next_timeout], returning=[ queue.id, next_timeout.select(next_timeout.seconds)]) cursor.execute(*query) row = cursor.fetchone() if row: task_id, seconds = row else: query = queue.select(queue.id, where=queue.id == selected.select(selected.id), with_=[candidates, selected]) cursor.execute(*query) row = cursor.fetchone() if row: task_id, = row query = queue.update([queue.dequeued_at], [CurrentTimestamp()], where=queue.id == task_id) cursor.execute(*query) query = next_timeout.select(next_timeout.seconds) cursor.execute(*query) row = cursor.fetchone() if row: seconds, = row if not task_id and database.has_channel(): cursor.execute('LISTEN "%s"', (cls.__name__,)) return task_id, seconds
def table_query(cls): pool = Pool() Move = pool.get('stock.move') Location = pool.get('stock.location') Product = pool.get('product.product') Date = pool.get('ir.date') move = from_ = Move.__table__() context = Transaction().context today = Date.today() if context.get('product_template') is not None: product = Product.__table__() from_ = move.join(product, condition=move.product == product.id) product_clause = (product.template == context['product_template']) else: product_clause = move.product == context.get('product', -1) if 'warehouse' in context: warehouse = Location(context.get('warehouse')) if context.get('stock_skip_warehouse'): location_id = warehouse.storage_location.id else: location_id = warehouse.id else: location_id = -1 warehouse = With('id', query=Location.search([ ('parent', 'child_of', [location_id]), ], query=True, order=[])) date_column = Coalesce(move.effective_date, move.planned_date) return (from_.select( Max(move.id).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), Literal(None).as_('write_uid'), Literal(None).as_('write_date'), date_column.as_('date'), move.company.as_('company'), where=product_clause & ((move.from_location.in_(warehouse.select(warehouse.id)) & ~move.to_location.in_(warehouse.select(warehouse.id))) | (~move.from_location.in_(warehouse.select(warehouse.id)) & move.to_location.in_(warehouse.select(warehouse.id)))) & ((date_column < today) & (move.state == 'done') | (date_column >= today)), group_by=(date_column, move.product, move.company), with_=warehouse) | Select([ Literal(0).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), Literal(None).as_('write_uid'), Literal(None).as_('write_date'), Literal(today).as_('date'), Literal(context.get('company', -1)).as_('company'), ]))
def commit(cls, transaction): table = Table(cls._table) reset = cls._reset.pop(transaction, None) if not reset: return database = transaction.database dbname = database.name if not _clear_timeout and transaction.database.has_channel(): with transaction.connection.cursor() as cursor: # The count computed as # 8000 (max notify size) / 64 (max name data len) for sub_reset in grouped_slice(reset, 125): cursor.execute( 'NOTIFY "%s", %%s' % cls._channel, (json.dumps(list(sub_reset), separators=(',', ':')),)) else: connection = database.get_connection( readonly=False, autocommit=True) try: with connection.cursor() as cursor: for name in reset: cursor.execute(*table.select(table.name, table.id, table.timestamp, where=table.name == name, limit=1)) if cursor.fetchone(): # It would be better to insert only cursor.execute(*table.update([table.timestamp], [CurrentTimestamp()], where=table.name == name)) else: cursor.execute(*table.insert( [table.timestamp, table.name], [[CurrentTimestamp(), name]])) cursor.execute(*table.select( Max(table.timestamp), where=table.name == name)) timestamp, = cursor.fetchone() cursor.execute(*table.select( _cast(Max(table.timestamp)), where=table.name == name)) timestamp, = cursor.fetchone() inst = cls._instances[name] inst._clear(dbname, timestamp) connection.commit() finally: database.put_connection(connection) cls._clean_last = datetime.now() reset.clear()
def commit(cls, transaction): table = Table(cls._table) reset = cls._reset.setdefault(transaction, set()) if not reset: return database = transaction.database dbname = database.name if not _clear_timeout and transaction.database.has_channel(): with transaction.connection.cursor() as cursor: # JCA: Fix for https://bugs.tryton.org/issue8781 resets = list(reset) for i in range(0, len(resets), 10): cursor.execute('NOTIFY "%s", %%s' % cls._channel, (json.dumps(resets[i:i + 10], separators=(',', ':')), )) else: connection = database.get_connection(readonly=False, autocommit=True) try: with connection.cursor() as cursor: for name in reset: cursor.execute(*table.select(table.name, table.id, table.timestamp, where=table.name == name, limit=1)) if cursor.fetchone(): # It would be better to insert only cursor.execute(*table.update( [table.timestamp], [CurrentTimestamp()], where=table.name == name)) else: cursor.execute( *table.insert([table.timestamp, table.name], [[CurrentTimestamp(), name]])) cursor.execute(*table.select(Max(table.timestamp), where=table.name == name)) timestamp, = cursor.fetchone() cursor.execute( *table.select(_cast(Max(table.timestamp)), where=table.name == name)) timestamp, = cursor.fetchone() inst = cls._instances[name] inst._clear(dbname, timestamp) connection.commit() finally: database.put_connection(connection) reset.clear()
def resets_cls(cls, dbname, cursor, table): with cls._resets_lock: cls._resets.setdefault(dbname, set()) for name in cls._resets[dbname]: cursor.execute( *table.select(table.name, where=table.name == name)) if cursor.fetchone(): # It would be better to insert only cursor.execute( *table.update([table.timestamp], [CurrentTimestamp()], where=table.name == name)) else: cursor.execute(*table.insert([table.timestamp, table.name], [[CurrentTimestamp(), name]])) cls._resets[dbname].clear()
def table_query(): pool = Pool() Move = pool.get('stock.move') Location = pool.get('stock.location') move = Move.__table__() product_id = Transaction().context.get('product') warehouse_id = Transaction().context.get('warehouse', -1) warehouse_query = Location.search([ ('parent', 'child_of', [warehouse_id]), ], query=True, order=[]) date_column = Coalesce(move.effective_date, move.planned_date).as_('date') return move.select( Max(move.id).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), Literal(None).as_('write_uid'), Literal(None).as_('write_date'), date_column, where=(move.product == product_id) & (move.from_location.in_(warehouse_query) | move.to_location.in_(warehouse_query)) & (Coalesce(move.effective_date, move.planned_date) != Null), group_by=(date_column, move.product))
def __register__(cls, module_name): Identifier = Pool().get('party.identifier') cursor = Transaction().connection.cursor() TableHandler = backend.get('TableHandler') sql_table = cls.__table__() identifier = Identifier.__table__() super(Party, cls).__register__(module_name) table = TableHandler(cls, module_name) # Migration from 4.0: Move sepa_creditor_identifier to identifier if table.column_exist('sepa_creditor_identifier'): select = sql_table.select( Literal(0), CurrentTimestamp(), sql_table.id, Literal('sepa'), sql_table.sepa_creditor_identifier, where=((sql_table.sepa_creditor_identifier != Null) & (sql_table.sepa_creditor_identifier != ""))) cursor.execute(*identifier.insert(columns=[ identifier.create_uid, identifier.create_date, identifier.party, identifier.type, identifier.code ], values=select)) table.drop_column('sepa_creditor_identifier')
def set_number(cls, invoices): numbered = [i for i in invoices if not i.number or not i.numbered_at] super(Invoice, cls).set_number(invoices) if numbered: cls.write(numbered, { 'numbered_at': CurrentTimestamp(), })
def _columns(cls, tables, withs): move = tables['move'] from_location = tables['move.from_location'] to_location = tables['move.to_location'] currency = tables['move.company.currency'] sign = Case((from_location.type.in_(cls._to_location_types()) & to_location.type.in_(cls._from_location_types()), -1), else_=1) cost = cls._column_cost(tables, withs, sign) revenue = cls._column_revenue(tables, withs, sign) profit = revenue - cost margin = Case((revenue != 0, profit / revenue), else_=Null) return [ cls._column_id(tables, withs).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), move.company.as_('company'), cls.cost.sql_cast(Round(cost, currency.digits)).as_('cost'), cls.revenue.sql_cast(Round(revenue, currency.digits)).as_('revenue'), cls.profit.sql_cast(Round(profit, currency.digits)).as_('profit'), cls.margin.sql_cast(Round(margin, cls.margin.digits[1])).as_('margin'), currency.id.as_('currency'), ]
def table_query(cls): pool = Pool() Timesheet = pool.get('timesheet.line') line = Timesheet.__table__() timesheet = line.select( Min(line.id * 2 + 1).as_('id'), line.company.as_('company'), line.employee.as_('employee'), Sum(line.duration).as_('duration'), line.date.as_('date'), group_by=[line.company, line.employee, line.date]) attendance = super().table_query() return (attendance.join( timesheet, 'FULL' if backend.name != 'sqlite' else 'LEFT', condition=(attendance.company == timesheet.company) & (attendance.employee == timesheet.employee) & (attendance.date == timesheet.date)).select( Coalesce(attendance.id, timesheet.id).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), Coalesce(attendance.company, timesheet.company).as_('company'), Coalesce(attendance.employee, timesheet.employee).as_('employee'), attendance.duration.as_('duration'), timesheet.duration.as_('timesheet_duration'), Coalesce(attendance.date, timesheet.date).as_('date'), ))
def table_query(): pool = Pool() Move = pool.get('stock.move') Location = pool.get('stock.location') Product = pool.get('product.product') move = from_ = Move.__table__() context = Transaction().context if context.get('product_template') is not None: product = Product.__table__() from_ = move.join(product, condition=move.product == product.id) product_clause = (product.template == context['product_template']) else: product_clause = move.product == context.get('product', -1) warehouse_id = context.get('warehouse', -1) warehouse_query = Location.search([ ('parent', 'child_of', [warehouse_id]), ], query=True, order=[]) date_column = Coalesce(move.effective_date, move.planned_date).as_('date') return from_.select( Max(move.id).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), Literal(None).as_('write_uid'), Literal(None).as_('write_date'), date_column, where=product_clause & (move.from_location.in_(warehouse_query) | move.to_location.in_(warehouse_query)) & (Coalesce(move.effective_date, move.planned_date) != Null), group_by=(date_column, move.product))
def test_create_with_sql_value(self): "Test create time with SQL value" Time = Pool().get('test.time_precision') time, = Time.create([{'time': Time.time.sql_cast(CurrentTimestamp())}]) self.assert_(time.time)
def test_set_sql_value(self): "Test cannot set SQL value" DateTime = Pool().get('test.datetime') datetime = DateTime() with self.assertRaises(ValueError): datetime.datetime = CurrentTimestamp()
def test_set_sql_value(self): "Test cannot set SQL value" Time = Pool().get('test.time') time = Time() with self.assertRaises(ValueError): time.time = Time.time.sql_cast(CurrentTimestamp())
def resets(dbname): table = Table('ir_cache') with Transaction().new_transaction(_nocache=True) as transaction,\ transaction.connection.cursor() as cursor,\ Cache._resets_lock: Cache._resets.setdefault(dbname, set()) for name in Cache._resets[dbname]: cursor.execute(*table.select( table.name, where=table.name == name, limit=1)) if cursor.fetchone(): # It would be better to insert only cursor.execute( *table.update([table.timestamp], [CurrentTimestamp()], where=table.name == name)) else: cursor.execute(*table.insert([table.timestamp, table.name], [[CurrentTimestamp(), name]])) Cache._resets[dbname].clear()
def test_create_with_sql_value(self): "Test create datetime with SQL value" DateTime = Pool().get('test.datetime') datetime, = DateTime.create([{ 'datetime': DateTime.datetime.sql_cast(CurrentTimestamp()), }]) self.assertTrue(datetime.datetime)
def add(cls, user): cursor = Transaction().connection.cursor() table = cls.__table__() cursor.execute(*table.delete( where=(Coalesce(table.write_date, table.create_date) - CurrentTimestamp()) > cls.timeout())) session = cls(user=user) session.save() return session.key
def test_current_timestamp_static_transaction(self): "Test CURRENT_TIMESTAMP is static during transaction" query = Select([CurrentTimestamp()]) cursor = Transaction().connection.cursor() cursor.execute(*query) current, = cursor.fetchone() cursor.execute(*query) second, = cursor.fetchone() self.assertEqual(current, second)
def test_current_timestamp_reset_after_commit(self): "Test CURRENT_TIMESTAMP is reset after commit" query = Select([CurrentTimestamp()]) cursor = Transaction().connection.cursor() cursor.execute(*query) current, = cursor.fetchone() Transaction().commit() cursor.execute(*query) second, = cursor.fetchone() self.assertNotEqual(current, second)
def test_current_timestamp_different_transaction(self): "Test CURRENT_TIMESTAMP is different per transaction" query = Select([CurrentTimestamp()]) cursor = Transaction().connection.cursor() cursor.execute(*query) current, = cursor.fetchone() with Transaction().new_transaction() as transaction: cursor = transaction.connection.cursor() cursor.execute(*query) second, = cursor.fetchone() self.assertNotEqual(current, second)
def _get_login(cls, login): result = cls._get_login_cache.get(login) if result: return result cursor = Transaction().connection.cursor() table = cls.__table__() cursor.execute(*table.select(table.id, table.password_hash, Case( (table.password_reset_expire > CurrentTimestamp(), table.password_reset), else_=None), where=(table.login == login) & (table.active == True))) result = cursor.fetchone() or (None, None, None) cls._get_login_cache.set(login, result) return result
def table_query(cls): pool = Pool() Line = pool.get('attendance.sheet.line') line = Line.__table__() return line.select( (Min(line.id * 2)).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), line.company.as_('company'), line.employee.as_('employee'), Sum(line.duration).as_('duration'), line.date.as_('date'), group_by=[line.company, line.employee, line.date])
def _columns(cls, tables, withs): line = tables['line'] currency_company = withs['currency_company'] currency_sale = withs['currency_sale'] revenue = cls.revenue.sql_cast( Sum(line.quantity * line.unit_price * currency_company.rate / currency_sale.rate)) return [ cls._column_id(tables, withs).as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), line.company.as_('company'), revenue.as_('revenue'), Count(line.order, distinct=True).as_('number'), ]
def table_query(cls): pool = Pool() Product = pool.get('product.product') Template = pool.get('product.template') product = Product.__table__() template = Template.__table__() return product.join( template, condition=product.template == template.id, ).select( product.id.as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), product.id.as_('product'), template.default_uom.as_('unit'), )
def load_module_graph(graph, pool, update=None, lang=None): if lang is None: lang = [config.get('database', 'language')] if update is None: update = [] modules_todo = [] models_to_update_history = set() with Transaction().connection.cursor() as cursor: modules = [x.name for x in graph] cursor.execute(*ir_module.select(ir_module.name, ir_module.state, where=ir_module.name.in_(modules))) module2state = dict(cursor.fetchall()) for package in graph: module = package.name if module not in MODULES: continue logger.info(module) classes = pool.fill(module) if update: pool.setup(classes) package_state = module2state.get(module, 'uninstalled') if (is_module_to_install(module, update) or (update and package_state in ('to install', 'to upgrade'))): if package_state not in ('to install', 'to upgrade'): if package_state == 'installed': package_state = 'to upgrade' elif package_state != 'to remove': package_state = 'to install' for child in package.childs: module2state[child.name] = package_state for type in classes.keys(): for cls in classes[type]: logger.info('%s:register %s', module, cls.__name__) cls.__register__(module) for model in classes['model']: if hasattr(model, '_history'): models_to_update_history.add(model.__name__) # Instanciate a new parser for the package: tryton_parser = convert.TrytondXmlHandler( pool=pool, module=module, module_state=package_state) for filename in package.info.get('xml', []): filename = filename.replace('/', os.sep) logger.info('%s:loading %s', module, filename) # Feed the parser with xml content: with tools.file_open(OPJ(module, filename), 'rb') as fp: tryton_parser.parse_xmlstream(fp) modules_todo.append((module, list(tryton_parser.to_delete))) localedir = '%s/%s' % (package.info['directory'], 'locale') for filename in itertools.chain( iglob('%s/*.po' % localedir), iglob('%s/override/*.po' % localedir)): filename = filename.replace('/', os.sep) lang2 = os.path.splitext(os.path.basename(filename))[0] if lang2 not in lang: continue logger.info('%s:loading %s', module, filename[len(package.info['directory']) + 1:]) Translation = pool.get('ir.translation') Translation.translation_import(lang2, module, filename) if package_state == 'to remove': continue cursor.execute(*ir_module.select( ir_module.id, where=(ir_module.name == package.name))) try: module_id, = cursor.fetchone() cursor.execute( *ir_module.update([ir_module.state], ['installed'], where=(ir_module.id == module_id))) except TypeError: cursor.execute(*ir_module.insert([ ir_module.create_uid, ir_module.create_date, ir_module.name, ir_module.state ], [ [0, CurrentTimestamp(), package.name, 'installed'], ])) module2state[package.name] = 'installed' Transaction().connection.commit() if not update: pool.setup() for model_name in models_to_update_history: model = pool.get(model_name) if model._history: logger.info('history:update %s', model.__name__) model._update_history_table() # Vacuum : while modules_todo: (module, to_delete) = modules_todo.pop() convert.post_import(pool, module, to_delete) logger.info('all modules loaded')
def trigger_action(self, ids): """ Trigger the action define on trigger for the records """ pool = Pool() TriggerLog = pool.get('ir.trigger.log') Model = pool.get(self.model.model) model, method = self.action.split('|') ActionModel = pool.get(model) cursor = Transaction().connection.cursor() trigger_log = TriggerLog.__table__() ids = [r.id for r in Model.browse(ids) if self.eval(r)] # Filter on limit_number if self.limit_number: new_ids = [] for sub_ids in grouped_slice(ids): sub_ids = list(sub_ids) red_sql = reduce_ids(trigger_log.record_id, sub_ids) cursor.execute(*trigger_log.select( trigger_log.record_id, Count(Literal(1)), where=red_sql & (trigger_log.trigger == self.id), group_by=trigger_log.record_id)) number = dict(cursor) for record_id in sub_ids: if record_id not in number: new_ids.append(record_id) continue if number[record_id] < self.limit_number: new_ids.append(record_id) ids = new_ids def cast_datetime(value): datepart, timepart = value.split(" ") year, month, day = map(int, datepart.split("-")) timepart_full = timepart.split(".") hours, minutes, seconds = map( int, timepart_full[0].split(":")) if len(timepart_full) == 2: microseconds = int(timepart_full[1]) else: microseconds = 0 return datetime.datetime( year, month, day, hours, minutes, seconds, microseconds) # Filter on minimum_time_delay if self.minimum_time_delay: new_ids = [] # Use now from the transaction to compare with create_date timestamp_cast = self.__class__.create_date.sql_cast cursor.execute(*Select([timestamp_cast(CurrentTimestamp())])) now, = cursor.fetchone() if isinstance(now, str): now = cast_datetime(now) for sub_ids in grouped_slice(ids): sub_ids = list(sub_ids) red_sql = reduce_ids(trigger_log.record_id, sub_ids) cursor.execute(*trigger_log.select( trigger_log.record_id, Max(trigger_log.create_date), where=(red_sql & (trigger_log.trigger == self.id)), group_by=trigger_log.record_id)) delay = dict(cursor) for record_id in sub_ids: if record_id not in delay: new_ids.append(record_id) continue # SQLite return string for MAX if isinstance(delay[record_id], str): delay[record_id] = cast_datetime(delay[record_id]) if now - delay[record_id] >= self.minimum_time_delay: new_ids.append(record_id) ids = new_ids records = Model.browse(ids) if records: getattr(ActionModel, method)(records, self) if self.limit_number or self.minimum_time_delay: to_create = [] for record in records: to_create.append({ 'trigger': self.id, 'record_id': record.id, }) if to_create: TriggerLog.create(to_create)
def table_query(cls): pool = Pool() Attendance = pool.get('attendance.line') transaction = Transaction() database = transaction.database attendance = Attendance.__table__() if database.has_window_functions(): window = Window([attendance.employee], order_by=[attendance.at.asc], frame='ROWS', start=0, end=1) type = NthValue(attendance.type, 1, window=window) from_ = NthValue(attendance.at, 1, window=window) to = NthValue(attendance.at, 2, window=window) date = NthValue(attendance.date, 1, window=window) query = attendance.select(attendance.id.as_('id'), attendance.company.as_('company'), attendance.employee.as_('employee'), type.as_('type'), from_.as_('from_'), to.as_('to'), date.as_('date')) sheet = (Min(query.id * 2, window=Window([query.employee, query.date]))) else: next_attendance = Attendance.__table__() to = next_attendance.select( next_attendance.at, where=(next_attendance.employee == attendance.employee) & (next_attendance.at > attendance.at), order_by=[next_attendance.at.asc], limit=1) query = attendance.select(attendance.id.as_('id'), attendance.company.as_('company'), attendance.employee.as_('employee'), attendance.type.as_('type'), attendance.at.as_('from_'), to.as_('to'), attendance.date.as_('date')) query2 = copy.copy(query) sheet = query2.select(Min(query2.id * 2), where=(query2.employee == query.employee) & (query2.date == query.date)) from_ = Column(query, 'from_') if backend.name == 'sqlite': # As SQLite does not support operation on datetime # we convert datetime into seconds duration = (SQLiteStrftime('%s', query.to) - SQLiteStrftime('%s', from_)) else: duration = query.to - from_ return query.select( query.id.as_('id'), Literal(0).as_('create_uid'), CurrentTimestamp().as_('create_date'), cls.write_uid.sql_cast(Literal(Null)).as_('write_uid'), cls.write_date.sql_cast(Literal(Null)).as_('write_date'), query.company.as_('company'), query.employee.as_('employee'), from_.as_('from_'), query.to.as_('to'), query.date.as_('date'), duration.as_('duration'), sheet.as_('sheet'), where=query.type == 'in')
def load_module_graph(graph, pool, update=None, lang=None): # Prevent to import backend when importing module from trytond.cache import Cache from trytond.ir.lang import get_parent_language if lang is None: lang = [config.get('database', 'language')] if update is None: update = [] modules_todo = [] models_to_update_history = set() # Load also parent languages lang = set(lang) for code in list(lang): while code: lang.add(code) code = get_parent_language(code) transaction = Transaction() with transaction.connection.cursor() as cursor: modules = [x.name for x in graph] module2state = dict() for sub_modules in tools.grouped_slice(modules): cursor.execute( *ir_module.select(ir_module.name, ir_module.state, where=ir_module.name.in_(list(sub_modules)))) module2state.update(cursor) modules = set(modules) for node in graph: module = node.name if module not in MODULES: continue logger.info(module) classes = pool.fill(module, modules) if update: pool.setup(classes) package_state = module2state.get(module, 'not activated') if (is_module_to_install(module, update) or (update and package_state in ('to activate', 'to upgrade'))): if package_state not in ('to activate', 'to upgrade'): if package_state == 'activated': package_state = 'to upgrade' elif package_state != 'to remove': package_state = 'to activate' for child in node: module2state[child.name] = package_state for type in list(classes.keys()): for cls in classes[type]: logger.info('%s:register %s', module, cls.__name__) cls.__register__(module) for model in classes['model']: if hasattr(model, '_history'): models_to_update_history.add(model.__name__) # Instanciate a new parser for the module tryton_parser = convert.TrytondXmlHandler( pool, module, package_state, modules, lang) for filename in node.info.get('xml', []): filename = filename.replace('/', os.sep) logger.info('%s:loading %s', module, filename) # Feed the parser with xml content: with tools.file_open(OPJ(module, filename), 'rb') as fp: tryton_parser.parse_xmlstream(fp) modules_todo.append((module, list(tryton_parser.to_delete))) load_translations(pool, node, lang) if package_state == 'to remove': continue cursor.execute(*ir_module.select( ir_module.id, where=(ir_module.name == module))) try: module_id, = cursor.fetchone() cursor.execute( *ir_module.update([ir_module.state], ['activated'], where=(ir_module.id == module_id))) except TypeError: cursor.execute(*ir_module.insert([ ir_module.create_uid, ir_module.create_date, ir_module.name, ir_module.state ], [ [0, CurrentTimestamp(), module, 'activated'], ])) module2state[module] = 'activated' # Avoid clearing cache to prevent dead lock on ir.cache table Cache.rollback(transaction) transaction.commit() # Clear transaction cache to update default_factory transaction.cache.clear() if not update: pool.setup() else: # Remove unknown models and fields Model = pool.get('ir.model') Model.clean() ModelField = pool.get('ir.model.field') ModelField.clean() transaction.commit() pool.setup_mixin(modules) for model_name in models_to_update_history: model = pool.get(model_name) if model._history: logger.info('history:update %s', model.__name__) model._update_history_table() # Vacuum : while modules_todo: (module, to_delete) = modules_todo.pop() convert.post_import(pool, module, to_delete) # Ensure cache is clear for other instances Cache.clear_all() Cache.refresh_pool(transaction) logger.info('all modules loaded')