def button_process_continue(self, ids, start=False): cr = Transaction().cursor categs = self.read(ids,['category','exclusif','party', 'sales_purchase_active', 'profiling_active']) for categ in categs: if start: if categ['exclusif']: cr.execute('delete from party_category_rel where category=%s', (categ['categ'][0],)) id = categ['id'] cr.execute('select id from res_party order by id ') partys = [x[0] for x in cr.fetchall()] if categ['sales_purchase_active']: to_remove_list=[] cr.execute('select id from ekd_crm_segmentation_line where segmentation=%s', (id,)) line_ids = [x[0] for x in cr.fetchall()] for pid in partys: if (not self.pool.get('ekd.crm.segmentation.line').test(cr, uid, line_ids, pid)): to_remove_list.append(pid) for pid in to_remove_list: partys.remove(pid) for party in partys: cr.execute('insert into party_category_rel (category,party) values (%s,%s)', (categ['categ'][0],party)) cr.commit() self.write([id], {'state':'not running', 'party':0}) cr.commit() return True
def get_total(cls, categories, names): pool = Pool() ReportingCategory = pool.get('sale.reporting.category') table = cls.__table__() reporting_category = ReportingCategory.__table__() cursor = Transaction().connection.cursor() categories = cls.search([ ('parent', 'child_of', [c.id for c in categories]), ]) ids = [c.id for c in categories] parents = {} reporting_categories = [] for sub_ids in grouped_slice(ids): sub_ids = list(sub_ids) where = reduce_ids(table.id, sub_ids) cursor.execute(*table.select(table.id, table.parent, where=where)) parents.update(cursor.fetchall()) where = reduce_ids(reporting_category.id, sub_ids) cursor.execute( *reporting_category.select(reporting_category.id, where=where)) reporting_categories.extend(r for r, in cursor.fetchall()) result = {} reporting_categories = ReportingCategory.browse(reporting_categories) for name in names: values = dict.fromkeys(ids, 0) values.update( (c.id, getattr(c, name)) for c in reporting_categories) result[name] = cls._sum_tree(categories, values, parents) return result
def validate_active(self): #Deactivate mandate as unit mandate on canceled state if (self.id > 0) and self.state=='canceled': condoparties = Pool().get('condo.party').__table__() condopayments = Pool().get('condo.payment').__table__() cursor = Transaction().connection.cursor() cursor.execute(*condopayments.select(condopayments.id, where=(condopayments.sepa_mandate == self.id) & ( (condopayments.state == 'draft') | (condopayments.state == 'approved')), )) ids = [ids for (ids,) in cursor.fetchall()] if len(ids): self.raise_user_error('Can\'t cancel mandate "%s".\nThere are %s payments in draft or approved state with this mandate!', (self.identification, len(ids))) cursor.execute(*condoparties.select(condoparties.id, where=(condoparties.sepa_mandate == self.id))) ids = [ids for (ids,) in cursor.fetchall()] if len(ids): self.raise_user_warning('warn_canceled_mandate', 'Mandate "%s" will be canceled as mean of payment in %d unit(s)/apartment(s)!', (self.identification, len(ids))) for sub_ids in grouped_slice(ids): red_sql = reduce_ids(condoparties.id, sub_ids) # Use SQL to prevent double validate loop cursor.execute(*condoparties.update( columns=[condoparties.sepa_mandate], values=[None], where=red_sql))
def _load_modules(): global res TableHandler = backend.get('TableHandler') cursor = Transaction().cursor # Migration from 3.6: remove double module old_table = 'ir_module_module' new_table = 'ir_module' if TableHandler.table_exist(cursor, old_table): TableHandler.table_rename(cursor, old_table, new_table) if update: cursor.execute( *ir_module.select(ir_module.name, where=ir_module.state.in_(('installed', 'to install', 'to upgrade', 'to remove')))) else: cursor.execute( *ir_module.select(ir_module.name, where=ir_module.state.in_(('installed', 'to upgrade', 'to remove')))) module_list = [name for (name, ) in cursor.fetchall()] if update: module_list += update graph = create_graph(module_list)[0] try: load_module_graph(graph, pool, update, lang) except Exception: cursor.rollback() raise if update: cursor.execute(*ir_module.select( ir_module.name, where=(ir_module.state == 'to remove'))) fetchall = cursor.fetchall() if fetchall: for (mod_name, ) in fetchall: # TODO check if ressource not updated by the user cursor.execute(*ir_model_data.select( ir_model_data.model, ir_model_data.db_id, where=(ir_model_data.module == mod_name), order_by=ir_model_data.id.desc)) for rmod, rid in cursor.fetchall(): Model = pool.get(rmod) Model.delete([Model(rid)]) cursor.commit() cursor.execute( *ir_module.update([ir_module.state], ['uninstalled'], where=(ir_module.state == 'to remove'))) cursor.commit() res = False Module = pool.get('ir.module') Module.update_list() cursor.commit() Cache.resets(database_name)
def _get_analysis_domain(self, sample): cursor = Transaction().connection.cursor() pool = Pool() Typification = pool.get('lims.typification') CalculatedTypification = pool.get('lims.typification.calculated') Analysis = pool.get('lims.analysis') if not sample.product_type or not sample.matrix: return [] cursor.execute('SELECT DISTINCT(analysis) ' 'FROM "' + Typification._table + '" ' 'WHERE product_type = %s ' 'AND matrix = %s ' 'AND valid', (sample.product_type.id, sample.matrix.id)) typified_analysis = [a[0] for a in cursor.fetchall()] if not typified_analysis: return [] cursor.execute('SELECT DISTINCT(analysis) ' 'FROM "' + CalculatedTypification._table + '" ' 'WHERE product_type = %s ' 'AND matrix = %s', (sample.product_type.id, sample.matrix.id)) typified_sets_groups = [a[0] for a in cursor.fetchall()] cursor.execute('SELECT id ' 'FROM "' + Analysis._table + '" ' 'WHERE behavior = \'additional\' ' 'AND state = \'active\'') additional_analysis = [a[0] for a in cursor.fetchall()] return typified_analysis + typified_sets_groups + additional_analysis
def _update_definitions(self, columns=None, indexes=None): if columns is None and indexes is None: columns = indexes = True cursor = Transaction().connection.cursor() # Fetch columns definitions from the table if columns: cursor.execute('PRAGMA table_info("' + self.table_name + '")') self._columns = {} for _, column, type_, notnull, hasdef, _ in cursor.fetchall(): column = re.sub(r'^\"|\"$', '', column) match = re.match(r'(\w+)(\((.*?)\))?', type_) if match: typname = match.group(1).upper() size = match.group(3) and int(match.group(3)) or 0 else: typname = type_.upper() size = -1 self._columns[column] = { 'notnull': notnull, 'hasdef': hasdef, 'size': size, 'typname': typname, } # Fetch indexes defined for the table if indexes: try: cursor.execute('PRAGMA index_list("' + self.table_name + '")') except IndexError: # There is sometimes IndexError cursor.execute('PRAGMA index_list("' + self.table_name + '")') self._indexes = [l[1] for l in cursor.fetchall()]
def _update_definitions(self, columns=None, indexes=None): if columns is None and indexes is None: columns = indexes = True cursor = Transaction().connection.cursor() # Fetch columns definitions from the table if columns: cursor.execute('PRAGMA table_info("' + self.table_name + '")') self._columns = {} for _, column, type_, notnull, hasdef, _ in cursor.fetchall(): column = re.sub(r"^\"|\"$", "", column) match = re.match(r"(\w+)(\((.*?)\))?", type_) if match: typname = match.group(1).upper() size = match.group(3) and int(match.group(3)) or 0 else: typname = type_.upper() size = -1 self._columns[column] = {"notnull": notnull, "hasdef": hasdef, "size": size, "typname": typname} # Fetch indexes defined for the table if indexes: try: cursor.execute('PRAGMA index_list("' + self.table_name + '")') except IndexError: # There is sometimes IndexError cursor.execute('PRAGMA index_list("' + self.table_name + '")') self._indexes = [l[1] for l in cursor.fetchall()]
def _purchase_cost(cls, works): 'Compute direct purchase cost' pool = Pool() Currency = pool.get('currency.currency') PurchaseLine = pool.get('purchase.line') InvoiceLine = pool.get('account.invoice.line') Invoice = pool.get('account.invoice') Company = pool.get('company.company') cursor = Transaction().cursor table = cls.__table__() purchase_line = PurchaseLine.__table__() invoice_line = InvoiceLine.__table__() invoice = Invoice.__table__() company = Company.__table__() amounts = defaultdict(Decimal) work_ids = [w.id for w in works] work2currency = {} iline2work = {} for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(purchase_line, condition=purchase_line.work == table.id ).join(invoice_line, condition=invoice_line.origin == Concat( 'purchase.line,', purchase_line.id) ).join(invoice, condition=invoice_line.invoice == invoice.id ).select(invoice_line.id, table.id, where=where & ~invoice.state.in_(['draft', 'cancel']))) iline2work.update(cursor.fetchall()) cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.itervalues())) id2currency = {c.id: c for c in currencies} invoice_lines = InvoiceLine.browse(iline2work.keys()) for invoice_line in invoice_lines: invoice = invoice_line.invoice work_id = iline2work[invoice_line.id] currency_id = work2currency[work_id] currency = id2currency[currency_id] if currency != invoice.currency: with Transaction().set_context(date=invoice.currency_date): amount = Currency.compute(invoice.currency, invoice_line.amount, currency) else: amount = invoice_line.amount amounts[work_id] += amount return amounts
def _purchase_cost(cls, works): 'Compute direct purchase cost' pool = Pool() Currency = pool.get('currency.currency') PurchaseLine = pool.get('purchase.line') InvoiceLine = pool.get('account.invoice.line') Invoice = pool.get('account.invoice') Company = pool.get('company.company') cursor = Transaction().connection.cursor() table = cls.__table__() purchase_line = PurchaseLine.__table__() invoice_line = InvoiceLine.__table__() invoice = Invoice.__table__() company = Company.__table__() amounts = defaultdict(Decimal) work_ids = [w.id for w in works] work2currency = {} iline2work = {} for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(purchase_line, condition=purchase_line.work == table.id ).join(invoice_line, condition=invoice_line.origin == Concat( 'purchase.line,', purchase_line.id) ).join(invoice, condition=invoice_line.invoice == invoice.id ).select(invoice_line.id, table.id, where=where & ~invoice.state.in_(['draft', 'cancel']))) iline2work.update(cursor.fetchall()) cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.values())) id2currency = {c.id: c for c in currencies} invoice_lines = InvoiceLine.browse(list(iline2work.keys())) for invoice_line in invoice_lines: invoice = invoice_line.invoice work_id = iline2work[invoice_line.id] currency_id = work2currency[work_id] currency = id2currency[currency_id] if currency != invoice.currency: with Transaction().set_context(date=invoice.currency_date): amount = Currency.compute(invoice.currency, invoice_line.amount, currency) else: amount = invoice_line.amount amounts[work_id] += amount return amounts
def _update_definitions(self, columns=None, constraints=None, indexes=None): if columns is None and constraints is None and indexes is None: columns = constraints = indexes = True cursor = Transaction().connection.cursor() if columns: self._columns = {} # Fetch columns definitions from the table cursor.execute( 'SELECT ' 'column_name, udt_name, is_nullable, ' 'character_maximum_length, ' 'column_default ' 'FROM information_schema.columns ' 'WHERE table_name = %s AND table_schema = %s', (self.table_name, self.table_schema)) for column, typname, nullable, size, default in cursor.fetchall(): self._columns[column] = { 'typname': typname, 'notnull': True if nullable == 'NO' else False, 'size': size, 'default': default, } if constraints: # fetch constraints for the table cursor.execute( 'SELECT constraint_name ' 'FROM information_schema.table_constraints ' 'WHERE table_name = %s AND table_schema = %s', (self.table_name, self.table_schema)) self._constraints = [c for c, in cursor.fetchall()] cursor.execute( 'SELECT k.column_name, r.delete_rule ' 'FROM information_schema.key_column_usage AS k ' 'JOIN information_schema.referential_constraints AS r ' 'ON r.constraint_schema = k.constraint_schema ' 'AND r.constraint_name = k.constraint_name ' 'WHERE k.table_name = %s AND k.table_schema = %s', (self.table_name, self.table_schema)) self._fk_deltypes = dict(cursor.fetchall()) if indexes: # Fetch indexes defined for the table cursor.execute( "SELECT cl2.relname " "FROM pg_index ind " "JOIN pg_class cl on (cl.oid = ind.indrelid) " "JOIN pg_namespace n ON (cl.relnamespace = n.oid) " "JOIN pg_class cl2 on (cl2.oid = ind.indexrelid) " "WHERE cl.relname = %s AND n.nspname = %s", (self.table_name, self.table_schema)) self._indexes = [l[0] for l in cursor.fetchall()]
def _load_modules(): global res TableHandler = backend.get('TableHandler') cursor = Transaction().cursor # Migration from 3.6: remove double module old_table = 'ir_module_module' new_table = 'ir_module' if TableHandler.table_exist(cursor, old_table): TableHandler.table_rename(cursor, old_table, new_table) if update: cursor.execute(*ir_module.select(ir_module.name, where=ir_module.state.in_(('installed', 'to install', 'to upgrade', 'to remove')))) else: cursor.execute(*ir_module.select(ir_module.name, where=ir_module.state.in_(('installed', 'to upgrade', 'to remove')))) module_list = [name for (name,) in cursor.fetchall()] if update: module_list += update graph = create_graph(module_list)[0] try: load_module_graph(graph, pool, update, lang) except Exception: cursor.rollback() raise if update: cursor.execute(*ir_module.select(ir_module.name, where=(ir_module.state == 'to remove'))) fetchall = cursor.fetchall() if fetchall: for (mod_name,) in fetchall: # TODO check if ressource not updated by the user cursor.execute(*ir_model_data.select(ir_model_data.model, ir_model_data.db_id, where=(ir_model_data.module == mod_name), order_by=ir_model_data.id.desc)) for rmod, rid in cursor.fetchall(): Model = pool.get(rmod) Model.delete([Model(rid)]) cursor.commit() cursor.execute(*ir_module.update([ir_module.state], ['uninstalled'], where=(ir_module.state == 'to remove'))) cursor.commit() res = False Module = pool.get('ir.module') Module.update_list() cursor.commit() Cache.resets(database_name)
def _get_invoiced_amount_timesheet(cls, works): pool = Pool() TimesheetWork = pool.get('timesheet.work') TimesheetLine = pool.get('timesheet.line') InvoiceLine = pool.get('account.invoice.line') Company = pool.get('company.company') Currency = pool.get('currency.currency') cursor = Transaction().connection.cursor() table = cls.__table__() timesheet_work = TimesheetWork.__table__() timesheet_line = TimesheetLine.__table__() invoice_line = InvoiceLine.__table__() company = Company.__table__() amounts = {} work2currency = {} work_ids = [w.id for w in works] for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(timesheet_work, condition=( Concat(cls.__name__ + ',', table.id) == timesheet_work.origin) ).join(timesheet_line, condition=timesheet_line.work == timesheet_work.id ).join(invoice_line, condition=timesheet_line.invoice_line == invoice_line.id ).select(table.id, Sum(timesheet_line.duration * invoice_line.unit_price), where=where, group_by=table.id)) amounts.update(cursor.fetchall()) cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.itervalues())) id2currency = {c.id: c for c in currencies} for work in works: currency = id2currency[work2currency[work.id]] amount = amounts.get(work.id, 0) if isinstance(amount, datetime.timedelta): amount = amount.total_seconds() amount = amount / 60 / 60 amounts[work.id] = currency.round(Decimal(str(amount))) return amounts
def _exec_add_service(self, line, typification): cursor = Transaction().connection.cursor() pool = Pool() AnalysisLaboratory = pool.get('lims.analysis-laboratory') AnalysisDevice = pool.get('lims.analysis.device') Service = pool.get('lims.service') EntryDetailAnalysis = pool.get('lims.entry.detail.analysis') cursor.execute('SELECT DISTINCT(laboratory) ' 'FROM "' + AnalysisLaboratory._table + '" ' 'WHERE analysis = %s', (self.target_analysis.id,)) laboratories = [x[0] for x in cursor.fetchall()] if not laboratories: return laboratory_id = laboratories[0] method_id = typification.method and typification.method.id or None cursor.execute('SELECT DISTINCT(device) ' 'FROM "' + AnalysisDevice._table + '" ' 'WHERE active IS TRUE ' 'AND analysis = %s ' 'AND laboratory = %s ' 'AND by_default IS TRUE', (self.target_analysis.id, laboratory_id)) devices = [x[0] for x in cursor.fetchall()] device_id = devices and devices[0] or None service_create = [{ 'fraction': line.fraction.id, 'analysis': self.target_analysis.id, 'urgent': True, 'laboratory': laboratory_id, 'method': method_id, 'device': device_id, }] with Transaction().set_context(manage_service=True): new_service, = Service.create(service_create) Service.copy_analysis_comments([new_service]) Service.set_confirmation_date([new_service]) analysis_detail = EntryDetailAnalysis.search([ ('service', '=', new_service.id)]) if analysis_detail: EntryDetailAnalysis.create_notebook_lines(analysis_detail, line.fraction) EntryDetailAnalysis.write(analysis_detail, { 'state': 'unplanned', })
def _update_definitions(self, columns=None, constraints=None, indexes=None): if columns is None and constraints is None and indexes is None: columns = constraints = indexes = True cursor = Transaction().connection.cursor() if columns: self._columns = {} # Fetch columns definitions from the table cursor.execute('SELECT ' 'column_name, udt_name, is_nullable, ' 'character_maximum_length, ' 'column_default ' 'FROM information_schema.columns ' 'WHERE table_name = %s AND table_schema = %s', (self.table_name, self.table_schema)) for column, typname, nullable, size, default in cursor.fetchall(): self._columns[column] = { 'typname': typname, 'notnull': True if nullable == 'NO' else False, 'size': size, 'default': default, } if constraints: # fetch constraints for the table cursor.execute('SELECT constraint_name ' 'FROM information_schema.table_constraints ' 'WHERE table_name = %s AND table_schema = %s', (self.table_name, self.table_schema)) self._constraints = [c for c, in cursor.fetchall()] cursor.execute('SELECT k.column_name, r.delete_rule ' 'FROM information_schema.key_column_usage AS k ' 'JOIN information_schema.referential_constraints AS r ' 'ON r.constraint_schema = k.constraint_schema ' 'AND r.constraint_name = k.constraint_name ' 'WHERE k.table_name = %s AND k.table_schema = %s', (self.table_name, self.table_schema)) self._fk_deltypes = dict(cursor.fetchall()) if indexes: # Fetch indexes defined for the table cursor.execute("SELECT cl2.relname " "FROM pg_index ind " "JOIN pg_class cl on (cl.oid = ind.indrelid) " "JOIN pg_namespace n ON (cl.relnamespace = n.oid) " "JOIN pg_class cl2 on (cl2.oid = ind.indexrelid) " "WHERE cl.relname = %s AND n.nspname = %s", (self.table_name, self.table_schema)) self._indexes = [l[0] for l in cursor.fetchall()]
def _get_invoiced_amount_progress(cls, works): pool = Pool() Progress = pool.get('project.work.invoiced_progress') InvoiceLine = pool.get('account.invoice.line') Company = pool.get('company.company') Currency = pool.get('currency.currency') cursor = Transaction().cursor table = cls.__table__() progress = Progress.__table__() invoice_line = InvoiceLine.__table__() company = Company.__table__() amounts = defaultdict(Decimal) work2currency = {} work_ids = [w.id for w in works] for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(progress, condition=progress.work == table.id ).join(invoice_line, condition=progress.invoice_line == invoice_line.id ).select(table.id, Sum(progress.effort_duration * invoice_line.unit_price), where=where, group_by=table.id)) for work_id, amount in cursor.fetchall(): if isinstance(amount, datetime.timedelta): amount = amount.total_seconds() # Amount computed in second instead of hours if amount is not None: amount /= 60 * 60 else: amount = 0 amounts[work_id] = amount cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.itervalues())) id2currency = {c.id: c for c in currencies} for work in works: currency = id2currency[work2currency[work.id]] amounts[work.id] = currency.round(Decimal(amounts[work.id])) return amounts
def _get_invoiced_amount_progress(cls, works): pool = Pool() Progress = pool.get('project.work.invoiced_progress') InvoiceLine = pool.get('account.invoice.line') Company = pool.get('company.company') Currency = pool.get('currency.currency') cursor = Transaction().connection.cursor() table = cls.__table__() progress = Progress.__table__() invoice_line = InvoiceLine.__table__() company = Company.__table__() amounts = defaultdict(Decimal) work2currency = {} work_ids = [w.id for w in works] for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(progress, condition=progress.work == table.id ).join(invoice_line, condition=progress.invoice_line == invoice_line.id ).select(table.id, Sum(progress.effort_duration * invoice_line.unit_price), where=where, group_by=table.id)) for work_id, amount in cursor.fetchall(): if isinstance(amount, datetime.timedelta): amount = amount.total_seconds() # Amount computed in second instead of hours if amount is not None: amount /= 60 * 60 else: amount = 0 amounts[work_id] = amount cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.itervalues())) id2currency = {c.id: c for c in currencies} for work in works: currency = id2currency[work2currency[work.id]] amounts[work.id] = currency.round(Decimal(amounts[work.id])) return amounts
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().cursor table = TableHandler(cursor, cls, module_name) sql_table = cls.__table__() # Migration from 2.2 new field currency created_currency = table.column_exist('currency') super(ProductSupplier, cls).__register__(module_name) # Migration from 2.2 fill currency if not created_currency: Company = Pool().get('company.company') company = Company.__table__() limit = cursor.IN_MAX cursor.execute(*sql_table.select(Count(sql_table.id))) product_supplier_count, = cursor.fetchone() for offset in range(0, product_supplier_count, limit): cursor.execute(*sql_table.join(company, condition=sql_table.company == company.id ).select(sql_table.id, company.currency, order_by=sql_table.id, limit=limit, offset=offset)) for product_supplier_id, currency_id in cursor.fetchall(): cursor.execute(*sql_table.update( columns=[sql_table.currency], values=[currency_id], where=sql_table.id == product_supplier_id)) # Migration from 2.4: drop required on sequence table.not_null_action('sequence', action='remove') # Migration from 2.6: drop required on delivery_time table.not_null_action('delivery_time', action='remove') # Migration from 3.8: change delivery_time inte timedelta lead_time if table.column_exist('delivery_time'): cursor.execute(*sql_table.select( sql_table.id, sql_table.delivery_time)) for id_, delivery_time in cursor.fetchall(): if delivery_time is None: continue lead_time = datetime.timedelta(days=delivery_time) cursor.execute(*sql_table.update( [sql_table.lead_time], [lead_time], where=sql_table.id == id_)) table.drop_column('delivery_time')
def _get_invoiced_amount_timesheet(cls, works): pool = Pool() TimesheetWork = pool.get('timesheet.work') TimesheetLine = pool.get('timesheet.line') InvoiceLine = pool.get('account.invoice.line') Company = pool.get('company.company') Currency = pool.get('currency.currency') cursor = Transaction().cursor table = cls.__table__() timesheet_work = TimesheetWork.__table__() timesheet_line = TimesheetLine.__table__() invoice_line = InvoiceLine.__table__() company = Company.__table__() amounts = {} work2currency = {} work_ids = [w.id for w in works] for sub_ids in grouped_slice(work_ids): where = reduce_ids(table.id, sub_ids) cursor.execute(*table.join(timesheet_work, condition=table.work == timesheet_work.id ).join(timesheet_line, condition=timesheet_line.work == timesheet_work.id ).join(invoice_line, condition=timesheet_line.invoice_line == invoice_line.id ).select(table.id, Sum(timesheet_line.duration * invoice_line.unit_price), where=where, group_by=table.id)) amounts.update(cursor.fetchall()) cursor.execute(*table.join(company, condition=table.company == company.id ).select(table.id, company.currency, where=where)) work2currency.update(cursor.fetchall()) currencies = Currency.browse(set(work2currency.itervalues())) id2currency = {c.id: c for c in currencies} for work in works: currency = id2currency[work2currency[work.id]] amount = amounts.get(work.id, 0) if isinstance(amount, datetime.timedelta): amount = amount.total_seconds() amount = amount / 60 / 60 amounts[work.id] = currency.round(Decimal(str(amount))) return amounts
def get_creationdate(cls, uri, cache=None): Party = Pool().get('party.party') party_id = cls.vcard(uri) cursor = Transaction().cursor if party_id is None: raise DAV_NotFound if party_id: if cache is not None: cache.setdefault('_contact', {}) ids = cache['_contact'].keys() if party_id not in ids: ids.append(party_id) elif 'creationdate' in cache['_contact'][party_id]: return cache['_contact'][party_id]['creationdate'] else: ids = [party_id] res = None for i in range(0, len(ids), cursor.IN_MAX): sub_ids = ids[i:i + cursor.IN_MAX] red_sql, red_ids = reduce_ids('id', sub_ids) cursor.execute('SELECT id, ' 'EXTRACT(epoch FROM create_date) ' 'FROM "' + Party._table + '" ' 'WHERE ' + red_sql, red_ids) for party_id2, date in cursor.fetchall(): if party_id2 == party_id: res = date if cache is not None: cache['_contact'].setdefault(party_id2, {}) cache['_contact'][party_id2]['creationdate'] = date if res is not None: return res return super(Collection, cls).get_creationdate(uri, cache=cache)
def get_recent_sales(cls): """ Return sales of current shop, which were made within last 5 days and are in draft state. Sort by write_date or create_date of Sale and sale lines. """ context = Transaction().context date = ( datetime.now() - timedelta(days=5) ).strftime('%Y-%m-%d %H:%M:%S') current_shop = context['shop'] cursor = Transaction().cursor cursor.execute( "SELECT sale_sale.id \ FROM sale_sale INNER JOIN sale_line \ ON (sale_sale.id = sale_line.sale) \ WHERE shop=%d AND state='draft' AND \ (sale_sale.write_date >= '%s' OR sale_sale.create_date >= '%s') \ ORDER BY sale_line.write_date DESC, sale_line.create_date DESC, \ sale_sale.write_date DESC, sale_sale.create_date DESC" % (current_shop, date, date) ) ids = [x[0] for x in cursor.fetchall()] return [cls(id).serialize('recent_sales') for id in ids]
def __register__(cls, module_name): pool = Pool() StatementLine = pool.get('account.statement.line') cursor = Transaction().connection.cursor() sql_table = cls.__table__() super(Move, cls).__register__(module_name) # Migration from 3.4: # account.statement.line origin changed to account.statement statement_line = StatementLine.__table__() cursor.execute(*sql_table.join(statement_line, condition=( Concat(StatementLine.__name__ + ',', statement_line.id) == sql_table.origin ) ).select(sql_table.id, statement_line.statement, order_by=(sql_table.id, statement_line.statement))) for statement_id, values in groupby(cursor.fetchall(), itemgetter(1)): ids = [x[0] for x in values] for sub_ids in grouped_slice(ids): red_sql = reduce_ids(sql_table.id, sub_ids) cursor.execute(*sql_table.update( columns=[sql_table.origin], values=['account.statement,%s' % statement_id], where=red_sql))
def get_recent_sales(cls): """ Return sales of current channel, which were made within last 5 days and are in draft state. Sort by write_date or create_date of Sale and sale lines. """ SaleLine = Pool().get('sale.line') context = Transaction().context date = (datetime.now() - timedelta(days=5)).strftime('%Y-%m-%d %H:%M:%S') current_channel = context['current_channel'] SaleTable = cls.__table__() SaleLineTable = SaleLine.__table__() cursor = Transaction().cursor query = SaleTable.join( SaleLineTable, condition=(SaleTable.id == SaleLineTable.sale)).select( SaleTable.id, where=((SaleTable.channel == Literal(current_channel)) & (SaleTable.state.in_( ['draft', 'quotation', 'confirmed', 'processing'])) & ((SaleTable.write_date >= Literal(date)) | (SaleTable.create_date >= Literal(date)))), order_by=(SaleLineTable.write_date.desc, SaleLineTable.create_date.desc, SaleTable.write_date.desc, SaleTable.create_date.desc)) cursor.execute(*query) ids = [x[0] for x in cursor.fetchall()] return [cls(id).serialize('recent_sales') for id in ids]
def __register__(cls, module_name): pool = Pool() Employee = pool.get('company.employee') TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() table = cls.__table__() table_h = TableHandler(cls, module_name) migrate_cost_price = not table_h.column_exist('cost_price') super(TimesheetLine, cls).__register__(module_name) # Migration from 3.6: add cost_price if migrate_cost_price: cursor.execute(*table.select(table.id, table.employee, table.date, where=(table.cost_price == 0) & (table.employee != Null) & (table.date != Null))) for line_id, employee_id, date in cursor.fetchall(): employee = Employee(employee_id) cost_price = employee.compute_cost_price(date=date) cursor.execute(*table.update([table.cost_price], [cost_price], where=table.id == line_id))
def get_lastmodified(cls, uri, cache=None): pool = Pool() object_name, object_id = cls._uri2object(uri, cache=cache) if object_name == 'ir.attachment': Model = pool.get(object_name) if object_id: if cache is not None: cache.setdefault(Model.__name__, {}) ids = cache[Model.__name__].keys() if object_id not in ids: ids.append(object_id) elif 'lastmodified' in cache[Model.__name__][object_id]: return cache[Model.__name__][object_id][ 'lastmodified'] else: ids = [object_id] res = None cursor = Transaction().cursor table = Model.__table__() for sub_ids in grouped_slice(ids): red_sql = reduce_ids(table.id, sub_ids) cursor.execute(*table.select(table.id, Extract('EPOCH', Coalesce(table.write_date, table.create_date)), where=red_sql)) for object_id2, date in cursor.fetchall(): if object_id2 == object_id: res = date if cache is not None: cache[Model.__name__].setdefault(object_id2, {}) cache[Model.__name__][object_id2][ 'lastmodified'] = date if res is not None: return res return time.time()
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() pool = Pool() Move = pool.get('stock.move') sql_table = cls.__table__() move_table = Move.__table__() super(InventoryLine, cls).__register__(module_name) table = TableHandler(cls, module_name) # Migration from 2.8: Remove constraint inventory_product_uniq table.drop_constraint('inventory_product_uniq') # Migration from 3.0: use Move origin if table.column_exist('move'): cursor.execute(*sql_table.select(sql_table.id, sql_table.move, where=sql_table.move != Null)) for line_id, move_id in cursor.fetchall(): cursor.execute(*move_table.update( columns=[move_table.origin], values=['%s,%s' % (cls.__name__, line_id)], where=move_table.id == move_id)) table.drop_column('move') # Migration from 4.6: drop required on quantity table.not_null_action('quantity', action='remove')
def ejecutar_nc(self): if self.start.tipoemi=='porid': query = '''SELECT ac.id, ac.number from account_invoice ac left join sigcoop_suministro_suministro ss on ss.id=ac.suministro where ac.state in ('posted') and ac.type = 'out_invoice' and ac.id between \'%s\' and \'%s\' and ss.servicio = \'%s\' order by ac.id ''' % (self.start.desdeid, self.start.hastaid, self.start.servicio.id) elif self.start.consumo_cero: #genero NC para las facturas con cero consumo query = '''SELECT ac.id, ac.number from account_invoice ac left join sigcoop_suministro_suministro ss on ss.id=ac.suministro join sigcoop_consumos_consumo c on c.invoice = ac.id where ac.state in ('posted') and ac.type = 'out_invoice' and c.consumo_neto = 0 and ss.servicio = \'%s\' and ac.periodo = \'%s\' order by ac.id ''' % (self.start.servicio.id, self.start.periodo.id) else: query = '''SELECT ac.id, ac.number from account_invoice ac left join sigcoop_suministro_suministro ss on ss.id=ac.suministro where ac.state in ('posted') and ac.type = 'out_invoice' and ss.servicio = \'%s\' and ss.lista_precios = \'%s\' and ac.periodo = \'%s\' order by ac.id ''' % (self.start.servicio.id, self.start.lista_precios.id, self.start.periodo.id) cursor = Transaction().cursor cursor.execute(query) facturas = cursor.fetchall() self.exito.resumen = "Se van a generar notas de creditos para las siguientes facturas: %s" % map(lambda x: str(x[1]), facturas) return 'exito'
def search_netting_moves(cls, name, clause): operator = 'in' if clause[2] else 'not in' query = """ SELECT id FROM account_move_line l WHERE party IN ( SELECT aml.party FROM account_account aa, account_move_line aml WHERE aa.reconcile AND aa.id = aml.account AND aml.reconciliation IS NULL GROUP BY aml.party HAVING bool_or(aml.debit <> 0) AND bool_or(aml.credit <> 0) ) """ cursor = Transaction().connection.cursor() cursor.execute(query) return [('id', operator, [x[0] for x in cursor.fetchall()])]
def _get_duration_timesheet(works, invoiced): pool = Pool() TimesheetLine = pool.get('timesheet.line') cursor = Transaction().cursor line = TimesheetLine.__table__() durations = {} twork2work = dict((w.work.id, w.id) for w in works if w.work) ids = twork2work.keys() for sub_ids in grouped_slice(ids): red_sql = reduce_ids(line.work, sub_ids) if invoiced: where = line.invoice_line != Null else: where = line.invoice_line == Null cursor.execute(*line.select(line.work, Sum(line.duration), where=red_sql & where, group_by=line.work)) for twork_id, duration in cursor.fetchall(): if duration: # SQLite uses float for SUM if not isinstance(duration, datetime.timedelta): duration = datetime.timedelta(seconds=duration) durations[twork2work[twork_id]] = duration return durations
def transition_check(self): Plots = Pool().get('forest.plot') Preco = Pool().get('forest_work.preconisation') plots_succeed = [] plots_failed = [] Lignes = Preco.browse(Transaction().context.get('active_ids')) for ligne in Lignes: cursor = Transaction().cursor cursor.execute( 'SELECT p.id ' 'FROM forest_plot p ' 'WHERE p.id=%s ' 'GROUP BY p.id' % (ligne.plot.id)) for plotid in cursor.fetchall(): plots = Plots.browse(plotid) for plot in plots: try: if plot.travaux: print "plots_failed ok" self.create_travaux(plot) plots_failed.append(plot.id) else: print "plots_succeed ok" self.create_travaux(plot) plots_succeed.append(plot.id) except Exception, e: raise self.result.plots_succeed = plots_succeed self.result.plots_failed = plots_failed
def __register__(cls, module_name): pool = Pool() Party = pool.get('party.party') TableHandler = backend.get('TableHandler') cursor = Transaction().cursor party = Party.__table__() super(PartyIdentifier, cls).__register__(module_name) party_h = TableHandler(cursor, Party, module_name) if (party_h.column_exist('vat_number') and party_h.column_exist('vat_country')): identifiers = [] cursor.execute(*party.select( party.id, party.vat_number, party.vat_country, where=(party.vat_number != Null) | (party.vat_country != Null))) for party_id, number, country in cursor.fetchall(): code = (country or '') + (number or '') if not code: continue type = None if vat.is_valid(code): type = 'eu_vat' identifiers.append( cls(party=party_id, code=code, type=type)) cls.save(identifiers) party_h.drop_column('vat_number') party_h.drop_column('vat_country')
def __register__(cls, module_name): pool = Pool() Employee = pool.get('company.employee') TableHandler = backend.get('TableHandler') cursor = Transaction().cursor table = cls.__table__() table_h = TableHandler(cursor, cls, module_name) migrate_cost_price = not table_h.column_exist('cost_price') super(TimesheetLine, cls).__register__(module_name) # Migration from 3.6: add cost_price if migrate_cost_price: cursor.execute(*table.select(table.id, table.employee, table.date, where=(table.cost_price == 0) & (table.employee != Null) & (table.date != Null))) for line_id, employee_id, date in cursor.fetchall(): employee = Employee(employee_id) cost_price = employee.compute_cost_price(date=date) cursor.execute(*table.update( [table.cost_price], [cost_price], where=table.id == line_id))
def get_day_busy_hours(cls, activities, name): cursor = Transaction().connection.cursor() table = cls.__table__() employees = [x.employee.id for x in activities] min_date = min([x.date for x in activities]) max_date = max([x.date for x in activities]) query = table.select( table.employee, table.date, Sum(table.duration), where=((table.employee.in_(employees)) & (table.date >= min_date) & (table.date <= max_date)), group_by=(table.employee, table.date)) cursor.execute(*query) records = cursor.fetchall() sums = {} for record in records: sums[(record[0], record[1])] = record[2] res = {} for activity in activities: res[activity.id] = sums.get((activity.employee.id, activity.date), datetime.timedelta()) return res
def __register__(cls, module_name): TimesheetWork = Pool().get('timesheet.work') cursor = Transaction().cursor table_project_work = TableHandler(cursor, cls, module_name) table_timesheet_work = TableHandler(cursor, TimesheetWork, module_name) migrate_sequence = (not table_project_work.column_exist('sequence') and table_timesheet_work.column_exist('sequence')) super(Work, cls).__register__(module_name) # Migration from 2.0: copy sequence from timesheet to project if migrate_sequence: cursor.execute( 'SELECT t.sequence, t.id ' 'FROM "%s" AS t ' 'JOIN "%s" AS p ON (p.work = t.id)' % ( TimesheetWork._table, cls._table)) for sequence, id_ in cursor.fetchall(): sql = ('UPDATE "%s" ' 'SET sequence = %%s ' 'WHERE work = %%s' % cls._table) cursor.execute(sql, (sequence, id_)) # Migration from 2.4: drop required on sequence table_project_work.not_null_action('sequence', action='remove')
def copy_selection_options(): """ Copy the selection field options and create options for the selection field. """ Option = Pool().get('product.attribute.selection_option') cursor = Transaction().cursor cursor.execute( """ SELECT id, selection FROM product_attribute WHERE type_='selection' """ ) # Key value map attribute_kv_map = defaultdict(dict) for row in cursor.fetchall(): id, selection = row for k, v in get_selection_json(selection): option = Option( name=v, attribute=id ) option.save() attribute_kv_map[id][k] = option.id print "Created selection values for %d attributes" % len(attribute_kv_map) return attribute_kv_map
def get_listings_updated_after(self, updated_after=None): """ This method returns listing, which needs inventory update Downstream module can override change its implementation :return: List of AR of `product.product.channel_listing` """ ChannelListing = Pool().get('product.product.channel_listing') cursor = Transaction().connection.cursor() if not updated_after: # Return all active listings return ChannelListing.search([('channel', '=', self), ('state', '=', 'active')]) else: # Query to find listings # in which product inventory is recently updated or # listing it self got updated recently cursor.execute( """ SELECT listing.id FROM product_product_channel_listing AS listing INNER JOIN stock_move ON stock_move.product = listing.product WHERE listing.channel = %s AND listing.state = 'active' AND ( COALESCE(stock_move.write_date, stock_move.create_date) > %s OR COALESCE(listing.write_date, listing.create_date) > %s ) GROUP BY listing.id """, (self.id, updated_after, updated_after)) listing_ids = map(lambda r: r[0], cursor.fetchall()) return ChannelListing.browse(listing_ids)
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().cursor table = TableHandler(cursor, cls, module_name) sql_table = cls.__table__() pool = Pool() Work = pool.get('timesheet.work') work = Work.__table__() created_company = not table.column_exist('company') super(Line, cls).__register__(module_name) table = TableHandler(cursor, cls, module_name) # Migration from 3.4: new company field if created_company: # Don't use FROM because SQLite nor MySQL support it. cursor.execute(*sql_table.update( [sql_table.company], [work.select(work.company, where=work.id == sql_table.work)])) # Migration from 3.4: change hours into timedelta duration if table.column_exist('hours'): table.drop_constraint('check_move_hours_pos') cursor.execute(*sql_table.select( sql_table.id, sql_table.hours)) for id_, hours in cursor.fetchall(): duration = datetime.timedelta(hours=hours) cursor.execute(*sql_table.update( [sql_table.duration], [duration], where=sql_table.id == id_)) table.drop_column('hours')
def __register__(cls, module_name): pool = Pool() Party = pool.get('party.party') TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() party = Party.__table__() super(PartyIdentifier, cls).__register__(module_name) party_h = TableHandler(Party, module_name) if (party_h.column_exist('vat_number') and party_h.column_exist('vat_country')): identifiers = [] cursor.execute(*party.select(party.id, party.vat_number, party.vat_country, where=(party.vat_number != Null) | (party.vat_country != Null))) for party_id, number, country in cursor.fetchall(): code = (country or '') + (number or '') if not code: continue type = None if vat.is_valid(code): type = 'eu_vat' identifiers.append(cls(party=party_id, code=code, type=type)) cls.save(identifiers) party_h.drop_column('vat_number') party_h.drop_column('vat_country')
def get_creationdate(cls, uri, cache=None): pool = Pool() object_name, object_id = cls._uri2object(uri, cache=cache) if object_name == 'ir.attachment': Model = pool.get(object_name) if object_id: if cache is not None: cache.setdefault(Model.__name__, {}) ids = cache[Model.__name__].keys() if object_id not in ids: ids.append(object_id) elif 'creationdate' in cache[Model.__name__][object_id]: return cache[Model.__name__][object_id][ 'creationdate'] else: ids = [object_id] res = None cursor = Transaction().cursor for i in range(0, len(ids), cursor.IN_MAX): sub_ids = ids[i:i + cursor.IN_MAX] red_sql, red_ids = reduce_ids('id', sub_ids) cursor.execute('SELECT id, ' 'EXTRACT(epoch FROM create_date) ' 'FROM "' + Model._table + '" ' 'WHERE ' + red_sql, red_ids) for object_id2, date in cursor.fetchall(): if object_id2 == object_id: res = date if cache is not None: cache[Model.__name__].setdefault(object_id2, {}) cache[Model.__name__][object_id2][ 'creationdate'] = date if res is not None: return res return time.time()
def _get_duration_timesheet(works, invoiced): pool = Pool() TimesheetLine = pool.get('timesheet.line') cursor = Transaction().connection.cursor() line = TimesheetLine.__table__() durations = defaultdict(datetime.timedelta) twork2work = {tw.id: w.id for w in works for tw in w.timesheet_works} ids = twork2work.keys() for sub_ids in grouped_slice(ids): red_sql = reduce_ids(line.work, sub_ids) if invoiced: where = line.invoice_line != Null else: where = line.invoice_line == Null cursor.execute(*line.select(line.work, Sum(line.duration), where=red_sql & where, group_by=line.work)) for twork_id, duration in cursor.fetchall(): if duration: # SQLite uses float for SUM if not isinstance(duration, datetime.timedelta): duration = datetime.timedelta(seconds=duration) durations[twork2work[twork_id]] += duration return durations
def table_exist(table_name): cursor = Transaction().connection.cursor() cursor.execute("SELECT sql FROM sqlite_master " "WHERE type = 'table' AND name = ?", (table_name,)) res = cursor.fetchone() if not res: return False sql, = res # Migration from 1.6 add autoincrement if "AUTOINCREMENT" not in sql.upper(): temp_sql = sql.replace(table_name, "_temp_%s" % table_name) cursor.execute(temp_sql) cursor.execute('PRAGMA table_info("' + table_name + '")') columns = ['"%s"' % column for _, column, _, _, _, _ in cursor.fetchall()] cursor.execute( ('INSERT INTO "_temp_%s" ' "(" + ",".join(columns) + ") " "SELECT " + ",".join(columns) + ' FROM "%s"') % (table_name, table_name) ) cursor.execute('DROP TABLE "%s"' % table_name) new_sql = sql.replace("PRIMARY KEY", "PRIMARY KEY AUTOINCREMENT") cursor.execute(new_sql) cursor.execute( ('INSERT INTO "%s" ' "(" + ",".join(columns) + ") " "SELECT " + ",".join(columns) + ' FROM "_temp_%s"') % (table_name, table_name) ) cursor.execute('DROP TABLE "_temp_%s"' % table_name) return True
def get_lastmodified(cls, uri, cache=None): pool = Pool() object_name, object_id = cls._uri2object(uri, cache=cache) if object_name == 'ir.attachment': Model = pool.get(object_name) if object_id: if cache is not None: cache.setdefault(Model.__name__, {}) ids = cache[Model.__name__].keys() if object_id not in ids: ids.append(object_id) elif 'lastmodified' in cache[Model.__name__][object_id]: return cache[Model.__name__][object_id]['lastmodified'] else: ids = [object_id] res = None cursor = Transaction().cursor table = Model.__table__() for sub_ids in grouped_slice(ids): red_sql = reduce_ids(table.id, sub_ids) cursor.execute(*table.select( table.id, Extract('EPOCH', Coalesce(table.write_date, table.create_date)), where=red_sql)) for object_id2, date in cursor.fetchall(): if object_id2 == object_id: res = date if cache is not None: cache[Model.__name__].setdefault(object_id2, {}) cache[Model. __name__][object_id2]['lastmodified'] = date if res is not None: return res return time.time()
def get_lines_to_pay(cls, invoices, name): pool = Pool() Move = pool.get('account.move') Line = pool.get('account.move.line') Account = pool.get('account.account') line = Line.__table__() account = Account.__table__() move = Move.__table__() invoice = cls.__table__() cursor = Transaction().connection.cursor() _, origin_type = Move.origin.sql_type() lines = super(Invoice, cls).get_lines_to_pay(invoices, name) for sub_ids in grouped_slice(invoices): red_sql = reduce_ids(invoice.id, sub_ids) query = invoice.join(move, condition=((move.origin == Concat('account.invoice,', Cast(invoice.id, origin_type)))) ).join(line, condition=(line.move == move.id) ).join(account, condition=( (line.account == account.id) & Case((invoice.type == 'out', account.kind == 'receivable'), else_=account.kind == 'payable'))).select( invoice.id, line.id, where=(line.maturity_date != None) & red_sql, order_by=(invoice.id, line.maturity_date)) cursor.execute(*query) for invoice_id, line_id in cursor.fetchall(): if line_id not in lines[invoice_id]: lines[invoice_id].append(line_id) return lines
def get_pending_amount(cls, agents, name): pool = Pool() Commission = pool.get('commission') commission = Commission.__table__() cursor = Transaction().connection.cursor() ids = [a.id for a in agents] amounts = dict.fromkeys(ids, None) for sub_ids in grouped_slice(ids): where = reduce_ids(commission.agent, sub_ids) where &= commission.invoice_line == Null query = commission.select(commission.agent, Sum(commission.amount), where=where, group_by=commission.agent) cursor.execute(*query) amounts.update(dict(cursor.fetchall())) digits = cls.pending_amount.digits exp = Decimal(str(10.0**-digits[1])) for agent_id, amount in amounts.iteritems(): if amount: # SQLite uses float for SUM if not isinstance(amount, Decimal): amount = Decimal(str(amount)) amounts[agent_id] = amount.quantize(exp) return amounts
def get_patient_status(cls, patients, name): cursor = Transaction().connection.cursor() pool = Pool() Registration = pool.get('gnuhealth.inpatient.registration') registration = Registration.__table__() # Will store statuses {patient: True/False, ...} ids = list(map(int, patients)) result = dict.fromkeys(ids, False) for sub_ids in grouped_slice(ids): # SQL expression for relevant patient ids clause_ids = reduce_ids(registration.id, sub_ids) # Hospitalized patient ids query = registration.select( registration.patient, Literal(True), where=(registration.state == 'hospitalized') & clause_ids, group_by=registration.patient) # Update dictionary of patient ids with True statuses cursor.execute(*query) result.update(cursor.fetchall()) return result
def __register__(cls, module_name): pool = Pool() Account = pool.get('analytic_account.account') TableHandler = backend.get('TableHandler') cursor = Transaction().cursor # Migration from 3.4: use origin as the key for One2Many migration_3_4 = False old_table = 'analytic_account_account_selection_rel' if TableHandler.table_exist(cursor, old_table): TableHandler.table_rename(cursor, old_table, cls._table) migration_3_4 = True # Don't create table before renaming table = TableHandler(cursor, cls, module_name) super(AnalyticAccountEntry, cls).__register__(module_name) # Migration from 3.4: set root value and remove required if migration_3_4: account = Account.__table__() cursor.execute(*account.select(account.id, account.root, where=account.type != 'root')) entry = cls.__table__() for account_id, root_id in cursor.fetchall(): cursor.execute(*entry.update( columns=[entry.root], values=[root_id], where=entry.account == account_id)) table.not_null_action('selection', action='remove') table.not_null_action('account', action='remove')
def __register__(cls, module_name): pool = Pool() Party = pool.get('party.party') cursor = Transaction().connection.cursor() party = Party.__table__() super().__register__(module_name) party_h = Party.__table_handler__(module_name) if (party_h.column_exist('vat_number') and party_h.column_exist('vat_country')): identifiers = [] cursor.execute(*party.select(party.id, party.vat_number, party.vat_country, where=(party.vat_number != Null) | (party.vat_country != Null))) for party_id, number, country in cursor.fetchall(): code = (country or '') + (number or '') if not code: continue for type in Party.tax_identifier_types(): module = get_cc_module(*type.split('_', 1)) if module.is_valid(code): break else: type = None identifiers.append(cls(party=party_id, code=code, type=type)) cls.save(identifiers) party_h.drop_column('vat_number') party_h.drop_column('vat_country')
def get_creationdate(cls, uri, cache=None): Party = Pool().get('party.party') party = Party.__table__() party_id = cls.vcard(uri) cursor = Transaction().cursor if party_id is None: raise DAV_NotFound if party_id: if cache is not None: cache.setdefault('_contact', {}) ids = cache['_contact'].keys() if party_id not in ids: ids.append(party_id) elif 'creationdate' in cache['_contact'][party_id]: return cache['_contact'][party_id]['creationdate'] else: ids = [party_id] res = None for sub_ids in grouped_slice(ids): red_sql = reduce_ids(party.id, sub_ids) cursor.execute(*party.select(party.id, Extract('EPOCH', party.create_date), where=red_sql)) for party_id2, date in cursor.fetchall(): if party_id2 == party_id: res = date if cache is not None: cache['_contact'].setdefault(party_id2, {}) cache['_contact'][party_id2]['creationdate'] = date if res is not None: return res return super(Collection, cls).get_creationdate(uri, cache=cache)
def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() table = TableHandler(cls, module_name) sql_table = cls.__table__() pool = Pool() Work = pool.get('timesheet.work') work = Work.__table__() created_company = not table.column_exist('company') super(Line, cls).__register__(module_name) # Migration from 3.4: new company field if created_company: # Don't use FROM because SQLite nor MySQL support it. cursor.execute(*sql_table.update( [sql_table.company], [work.select(work.company, where=work.id == sql_table.work)])) # Migration from 3.4: change hours into timedelta duration if table.column_exist('hours'): table.drop_constraint('check_move_hours_pos') cursor.execute(*sql_table.select(sql_table.id, sql_table.hours)) for id_, hours in cursor.fetchall(): duration = datetime.timedelta(hours=hours) cursor.execute( *sql_table.update([sql_table.duration], [duration], where=sql_table.id == id_)) table.drop_column('hours')
def __register__(cls, module_name): pool = Pool() StatementLine = pool.get('account.statement.line') cursor = Transaction().cursor sql_table = cls.__table__() super(Move, cls).__register__(module_name) # Migration from 3.4: # account.statement.line origin changed to account.statement statement_line = StatementLine.__table__() cursor.execute(*sql_table.join(statement_line, condition=( Concat(StatementLine.__name__ + ',', statement_line.id) == sql_table.origin ) ).select(sql_table.id, statement_line.statement, order_by=(sql_table.id, statement_line.statement))) for statement_id, values in groupby(cursor.fetchall(), itemgetter(1)): ids = [x[0] for x in values] for sub_ids in grouped_slice(ids): red_sql = reduce_ids(sql_table.id, sub_ids) cursor.execute(*sql_table.update( columns=[sql_table.origin], values=['account.statement,%s' % statement_id], where=red_sql))
def table_exist(table_name): cursor = Transaction().connection.cursor() cursor.execute("SELECT sql FROM sqlite_master " "WHERE type = 'table' AND name = ?", (table_name,)) res = cursor.fetchone() if not res: return False sql, = res # Migration from 1.6 add autoincrement if 'AUTOINCREMENT' not in sql.upper(): temp_sql = sql.replace(table_name, '_temp_%s' % table_name) cursor.execute(temp_sql) cursor.execute('PRAGMA table_info("' + table_name + '")') columns = ['"%s"' % column for _, column, _, _, _, _ in cursor.fetchall()] cursor.execute(('INSERT INTO "_temp_%s" ' '(' + ','.join(columns) + ') ' 'SELECT ' + ','.join(columns) + ' FROM "%s"') % (table_name, table_name)) cursor.execute('DROP TABLE "%s"' % table_name) new_sql = sql.replace('PRIMARY KEY', 'PRIMARY KEY AUTOINCREMENT') cursor.execute(new_sql) cursor.execute(('INSERT INTO "%s" ' '(' + ','.join(columns) + ') ' 'SELECT ' + ','.join(columns) + ' FROM "_temp_%s"') % (table_name, table_name)) cursor.execute('DROP TABLE "_temp_%s"' % table_name) return True
def get_pending_amount(cls, agents, name): pool = Pool() Commission = pool.get('commission') commission = Commission.__table__() cursor = Transaction().connection.cursor() ids = [a.id for a in agents] amounts = dict.fromkeys(ids, None) for sub_ids in grouped_slice(ids): where = reduce_ids(commission.agent, sub_ids) where &= commission.invoice_line == Null query = commission.select(commission.agent, Sum(commission.amount), where=where, group_by=commission.agent) cursor.execute(*query) amounts.update(dict(cursor.fetchall())) digits = cls.pending_amount.digits exp = Decimal(str(10.0 ** -digits[1])) for agent_id, amount in amounts.items(): if amount: # SQLite uses float for SUM if not isinstance(amount, Decimal): amount = Decimal(str(amount)) amounts[agent_id] = amount.quantize(exp) return amounts
def __register__(cls, module_name): pool = Pool() Account = pool.get('analytic_account.account') TableHandler = backend.get('TableHandler') cursor = Transaction().connection.cursor() # Migration from 3.4: use origin as the key for One2Many migration_3_4 = False old_table = 'analytic_account_account_selection_rel' if TableHandler.table_exist(old_table): TableHandler.table_rename(old_table, cls._table) migration_3_4 = True # Don't create table before renaming table = cls.__table_handler__(module_name) super(AnalyticAccountEntry, cls).__register__(module_name) # Migration from 3.4: set root value and remove required if migration_3_4: account = Account.__table__() cursor.execute(*account.select( account.id, account.root, where=account.type != 'root')) entry = cls.__table__() for account_id, root_id in cursor.fetchall(): cursor.execute( *entry.update(columns=[entry.root], values=[root_id], where=entry.account == account_id)) table.not_null_action('selection', action='remove') table.not_null_action('account', action='remove')
def __register__(cls, module_name): pool = Pool() Sale = pool.get('sale.sale') cursor = Transaction().cursor TableHandler = backend.get('TableHandler') sql_table = cls.__table__() sale = Sale.__table__() reference_exists = True if TableHandler.table_exist(cursor, cls._table): table = TableHandler(cursor, cls, module_name) reference_exists = table.column_exist('reference') super(SaleOpportunity, cls).__register__(module_name) table = TableHandler(cursor, cls, module_name) # Migration from 2.8: make party not required and add reference as # required table.not_null_action('party', action='remove') if not reference_exists: cursor.execute( *sql_table.update(columns=[sql_table.reference], values=[sql_table.id], where=sql_table.reference == Null)) table.not_null_action('reference', action='add') # Migration from 3.4: replace sale by origin if table.column_exist('sale'): cursor.execute(*sql_table.select( sql_table.id, sql_table.sale, where=sql_table.sale != Null)) for id_, sale_id in cursor.fetchall(): cursor.execute( *sale.update(columns=[sale.origin], values=['%s,%s' % (cls.__name__, id_)], where=sale.id == sale_id)) table.drop_column('sale', exception=True)
def read(cls, ids, fields_names=None): Rule = Pool().get('ir.rule') cursor = Transaction().cursor table = cls.__table__() if len(set(ids)) != cls.search([('id', 'in', ids)], count=True): cls.raise_user_error('access_error', cls.__doc__) writable_ids = [] domain = Rule.query_get(cls.__name__, mode='write') if domain: for sub_ids in grouped_slice(ids): red_sql = reduce_ids(table.id, sub_ids) cursor.execute(*table.select( table.id, where=red_sql & table.id.in_(domain))) writable_ids.extend(x[0] for x in cursor.fetchall()) else: writable_ids = ids writable_ids = set(writable_ids) if fields_names is None: fields_names = [] fields_names = fields_names[:] to_remove = set() for field in ('classification', 'calendar', 'transp'): if field not in fields_names: fields_names.append(field) to_remove.add(field) res = super(Event, cls).read(ids, fields_names=fields_names) for record in res: if record['classification'] == 'confidential' \ and record['id'] not in writable_ids: cls._clean_confidential(record, record['transp']) for field in to_remove: del record[field] return res
def validate(cls, paymentgroups): super(CondoPaymentGroup, cls).validate(paymentgroups) table = cls.__table__() payments = Pool().get('condo.payment').__table__() for paymentgroup in paymentgroups: if paymentgroup.readonly: with Transaction().new_transaction(readonly=True) as transaction,\ transaction.connection.cursor() as cursor: cursor.execute(*table.select(table.date, where=(table.id == paymentgroup.id) & (table.date != paymentgroup.date))) if cursor.fetchone(): cls.raise_user_error('readonly_paymentgroup', (paymentgroup.reference) ) return cursor = Transaction().connection.cursor() cursor.execute(*payments.select(payments.id, where=(payments.group == paymentgroup.id) & (payments.date < paymentgroup.date) & (payments.state != 'draft'))) if cursor.fetchall(): cls.raise_user_error('payments_approved', (paymentgroup.reference) ) paymentgroup.check_today() paymentgroup.check_businessdate() paymentgroup.company_has_sepa_creditor_identifier() #if has drafted payments with due date before new date #update date field of payments cursor.execute(*payments.select(payments.id, where=(payments.group == paymentgroup.id) & (payments.date < paymentgroup.date) & (payments.state == 'draft'))) ids_draft = [ids for (ids,) in cursor.fetchall()] if len(ids_draft): for sub_ids in grouped_slice(ids_draft): red_sql = reduce_ids(payments.id, sub_ids) # Use SQL to prevent double validate loop cursor.execute(*payments.update( columns=[payments.date], values=[paymentgroup.date], where=red_sql))
def _turnover(self, values): ''' Ожидаемые запросы: - Оборот по - счету или счетам - за период, периоды или определенный отрезок времени - дебетовый, кредитовый, свернутый values: diction with struct {'accounts':[], 'period':{ 'StartDate': date, 'EndDate': date, 'Periods': [] - IDs ekd.period, }, 'type_turnover': ['debit', 'credit'] } Return Decimal(Amount) ''' line_obj = self.pool.get('ekd.account.move.line') balance_obj = self.pool.get('ekd.balances.account') period_obj = self.pool.get('ekd.period') res = {} res['debit'] = Decimal('0.0') res['credit'] = Decimal('0.0') if values.get('period').get('Periods'): for line in balance_obj.search_read([ ('period','in',values.get('period').get('Periods')), ('account','in',values.get('accounts'))]): for type in values.get('type_turnover'): res[type] += line[type] #raise Exception(str(values)) elif values.get('period').get('StartDate'): cursor = Transaction().cursor cursor.execute('SELECT dt_account, ct_account, SUM(COALESCE(amount)) FROM account_ru_move_line '\ 'WHERE state=\'posted\' AND date_operation >= %s AND date_operation <= %s AND '\ '( dt_account in ('+','.join(map(str, values.get('accounts')))+') OR'\ ' ct_account in ('+','.join(map(str, values.get('accounts'))) +'))'\ 'GROUP BY dt_account, ct_account', (values.get('period').get('StartDate'), values.get('period').get('EndDate'))) for dt_account, ct_account, amount in cursor.fetchall(): if dt_account in values.get('accounts'): res['debit'] += amount if ct_account in values.get('accounts'): res['credit'] += amount else: raise Exception('Error in _turnover', 'Not found period') if values.get('type_turnover') == 0: return res['debit']-res['credit'] elif values.get('type_turnover') == 1: return res['debit'] elif values.get('type_turnover') == -1: return res['credit'] else: raise Exception('Error in _turnover', 'Not found Type turnover')
def search_receivable_payable(cls, name, clause): pool = Pool() MoveLine = pool.get('account.move.line') Company = pool.get('company.company') User = pool.get('res.user') Date = pool.get('ir.date') cursor = Transaction().cursor if name not in ('receivable', 'payable', 'receivable_today', 'payable_today'): raise Exception('Bad argument') company_id = None user_id = Transaction().user if user_id == 0 and 'user' in Transaction().context: user_id = Transaction().context['user'] user = User(user_id) if Transaction().context.get('company'): child_companies = Company.search([ ('parent', 'child_of', [user.main_company.id]), ]) if Transaction().context['company'] in child_companies: company_id = Transaction().context['company'] if not company_id: if user.company: company_id = user.company.id elif user.main_company: company_id = user.main_company.id if not company_id: return [] code = name today_query = '' today_value = [] if name in ('receivable_today', 'payable_today'): code = name[:-6] today_query = 'AND (l.maturity_date <= %s ' \ 'OR l.maturity_date IS NULL) ' today_value = [Date.today()] line_query, _ = MoveLine.query_get() cursor.execute('SELECT l.party ' 'FROM account_move_line AS l, account_account AS a ' 'WHERE a.id = l.account ' 'AND a.active ' 'AND a.kind = %s ' 'AND l.party IS NOT NULL ' 'AND l.reconciliation IS NULL ' 'AND ' + line_query + ' ' + today_query + 'AND a.company = %s ' 'GROUP BY l.party ' 'HAVING (SUM((COALESCE(l.debit, 0) - COALESCE(l.credit, 0))) ' + clause[1] + ' %s)', [code] + today_value + [company_id] + [Decimal(clause[2] or 0)]) return [('id', 'in', [x[0] for x in cursor.fetchall()])]
def loaders(self): ''' Lazy load the loaders ''' if self._loaders is None: self._loaders = [] if not Transaction().cursor: contextmanager = Transaction().start(self.database_name, 0) else: contextmanager = contextlib.nested( Transaction().set_user(0), Transaction().reset_context() ) with contextmanager: cursor = Transaction().cursor cursor.execute( "SELECT name FROM ir_module_module " "WHERE state = 'installed'" ) installed_module_list = [name for (name,) in cursor.fetchall()] if self.searchpath is not None: self._loaders.append(FileSystemLoader(self.searchpath)) # Look into the module graph and check if they have template # folders and if they do add them too from trytond.modules import create_graph, get_module_list, \ MODULES_PATH, EGG_MODULES packages = list(create_graph(get_module_list())[0])[::-1] for package in packages: if package.name not in installed_module_list: # If the module is not installed in the current database # then don't load the templates either to be consistent # with Tryton's modularity continue if package.name in EGG_MODULES: # trytond.tools has a good helper which allows resources to # be loaded from the installed site packages. Just use it # to load the tryton.cfg file which is guaranteed to exist # and from it lookup the directory. From here, its just # another searchpath for the loader. f = tools.file_open( os.path.join(package.name, 'tryton.cfg') ) template_dir = os.path.join( os.path.dirname(f.name), 'templates' ) else: template_dir = os.path.join( MODULES_PATH, package.name, 'templates' ) if os.path.isdir(template_dir): # Add to FS Loader only if the folder exists self._loaders.append(FileSystemLoader(template_dir)) return self._loaders