def _update_overtime(self, employee_attendance_dates=None): if employee_attendance_dates is None: employee_attendance_dates = self._get_attendances_dates() overtime_to_unlink = self.env['hr.attendance.overtime'] overtime_vals_list = [] for emp, attendance_dates in employee_attendance_dates.items(): # get_attendances_dates returns the date translated from the local timezone without tzinfo, # and contains all the date which we need to check for overtime emp_tz = pytz.timezone(emp._get_tz()) attendance_domain = [] for attendance_date in attendance_dates: attendance_domain = OR([ attendance_domain, [ ('check_in', '>=', attendance_date[0]), ('check_in', '<', attendance_date[0] + timedelta(hours=24)), ] ]) attendance_domain = AND([[('employee_id', '=', emp.id)], attendance_domain]) # Attendances per LOCAL day attendances_per_day = defaultdict( lambda: self.env['hr.attendance']) all_attendances = self.env['hr.attendance'].search( attendance_domain) for attendance in all_attendances: check_in_day_start = attendance._get_day_start_and_day( attendance.employee_id, attendance.check_in) attendances_per_day[check_in_day_start[1]] += attendance # As _attendance_intervals_batch and _leave_intervals_batch both take localized dates we need to localize those date start = pytz.utc.localize( min(attendance_dates, key=itemgetter(0))[0]) stop = pytz.utc.localize( max(attendance_dates, key=itemgetter(0))[0] + timedelta(hours=24)) # Retrieve expected attendance intervals expected_attendances = emp.resource_calendar_id._attendance_intervals_batch( start, stop, emp.resource_id)[emp.resource_id.id] # Substract Global Leaves expected_attendances -= emp.resource_calendar_id._leave_intervals_batch( start, stop, None)[False] # working_times = {date: [(start, stop)]} working_times = defaultdict(lambda: []) for expected_attendance in expected_attendances: # Exclude resource.calendar.attendance working_times[expected_attendance[0].date()].append( expected_attendance[:2]) overtimes = self.env['hr.attendance.overtime'].sudo().search([ ('employee_id', '=', emp.id), ('date', 'in', [day_data[1] for day_data in attendance_dates]), ('adjustment', '=', False), ]) company_threshold = emp.company_id.overtime_company_threshold / 60.0 employee_threshold = emp.company_id.overtime_employee_threshold / 60.0 for day_data in attendance_dates: attendance_date = day_data[1] attendances = attendances_per_day.get(attendance_date, self.browse()) unfinished_shifts = attendances.filtered( lambda a: not a.check_out) overtime_duration = 0 overtime_duration_real = 0 # Overtime is not counted if any shift is not closed or if there are no attendances for that day, # this could happen when deleting attendances. if not unfinished_shifts and attendances: # The employee usually doesn't work on that day if not working_times[attendance_date]: # User does not have any resource_calendar_attendance for that day (week-end for example) overtime_duration = sum( attendances.mapped('worked_hours')) overtime_duration_real = overtime_duration # The employee usually work on that day else: # Compute start and end time for that day planned_start_dt, planned_end_dt = False, False planned_work_duration = 0 for calendar_attendance in working_times[ attendance_date]: planned_start_dt = min( planned_start_dt, calendar_attendance[0] ) if planned_start_dt else calendar_attendance[0] planned_end_dt = max( planned_end_dt, calendar_attendance[1] ) if planned_end_dt else calendar_attendance[1] planned_work_duration += ( calendar_attendance[1] - calendar_attendance[0] ).total_seconds() / 3600.0 # Count time before, during and after 'working hours' pre_work_time, work_duration, post_work_time = 0, 0, 0 for attendance in attendances: # consider check_in as planned_start_dt if within threshold # if delta_in < 0: Checked in after supposed start of the day # if delta_in > 0: Checked in before supposed start of the day local_check_in = pytz.utc.localize( attendance.check_in) delta_in = (planned_start_dt - local_check_in ).total_seconds() / 3600.0 # Started before or after planned date within the threshold interval if (delta_in > 0 and delta_in <= company_threshold) or\ (delta_in < 0 and abs(delta_in) <= employee_threshold): local_check_in = planned_start_dt local_check_out = pytz.utc.localize( attendance.check_out) # same for check_out as planned_end_dt delta_out = (local_check_out - planned_end_dt ).total_seconds() / 3600.0 # if delta_out < 0: Checked out before supposed start of the day # if delta_out > 0: Checked out after supposed start of the day # Finised before or after planned date within the threshold interval if (delta_out > 0 and delta_out <= company_threshold) or\ (delta_out < 0 and abs(delta_out) <= employee_threshold): local_check_out = planned_end_dt # There is an overtime at the start of the day if local_check_in < planned_start_dt: pre_work_time += ( min(planned_start_dt, local_check_out) - local_check_in).total_seconds() / 3600.0 # Interval inside the working hours -> Considered as working time if local_check_in <= planned_end_dt and local_check_out >= planned_start_dt: work_duration += ( min(planned_end_dt, local_check_out) - max(planned_start_dt, local_check_in) ).total_seconds() / 3600.0 # There is an overtime at the end of the day if local_check_out > planned_end_dt: post_work_time += (local_check_out - max( planned_end_dt, local_check_in)).total_seconds() / 3600.0 # Overtime within the planned work hours + overtime before/after work hours is > company threshold overtime_duration = work_duration - planned_work_duration if pre_work_time > company_threshold: overtime_duration += pre_work_time if post_work_time > company_threshold: overtime_duration += post_work_time # Global overtime including the thresholds overtime_duration_real = sum( attendances.mapped( 'worked_hours')) - planned_work_duration overtime = overtimes.filtered( lambda o: o.date == attendance_date) if not float_is_zero(overtime_duration, 2) or unfinished_shifts: # Do not create if any attendance doesn't have a check_out, update if exists if unfinished_shifts: overtime_duration = 0 if not overtime and overtime_duration: overtime_vals_list.append({ 'employee_id': emp.id, 'date': attendance_date, 'duration': overtime_duration, 'duration_real': overtime_duration_real, }) elif overtime: overtime.sudo().write({ 'duration': overtime_duration, 'duration_real': overtime_duration }) elif overtime: overtime_to_unlink |= overtime self.env['hr.attendance.overtime'].sudo().create(overtime_vals_list) overtime_to_unlink.sudo().unlink()
def _import_fattura_pa(self, tree, invoice): """ Decodes a fattura_pa invoice into an invoice. :param tree: the fattura_pa tree to decode. :param invoice: the invoice to update or an empty recordset. :returns: the invoice where the fattura_pa data was imported. """ invoices = self.env['account.move'] first_run = True # possible to have multiple invoices in the case of an invoice batch, the batch itself is repeated for every invoice of the batch for body_tree in tree.xpath('//FatturaElettronicaBody'): if not first_run or not invoice: # make sure all the iterations create a new invoice record (except the first which could have already created one) invoice = self.env['account.move'] first_run = False # Type must be present in the context to get the right behavior of the _default_journal method (account.move). # journal_id must be present in the context to get the right behavior of the _default_account method (account.move.line). elements = tree.xpath('//CessionarioCommittente//IdCodice') company = elements and self.env['res.company'].search( [('vat', 'ilike', elements[0].text)], limit=1) if not company: elements = tree.xpath( '//CessionarioCommittente//CodiceFiscale') company = elements and self.env['res.company'].search( [('l10n_it_codice_fiscale', 'ilike', elements[0].text)], limit=1) if not company: # Only invoices with a correct VAT or Codice Fiscale can be imported _logger.warning( 'No company found with VAT or Codice Fiscale like %r.', elements[0].text) continue # Refund type. # TD01 == invoice # TD02 == advance/down payment on invoice # TD03 == advance/down payment on fee # TD04 == credit note # TD05 == debit note # TD06 == fee # For unsupported document types, just assume in_invoice, and log that the type is unsupported elements = tree.xpath('//DatiGeneraliDocumento/TipoDocumento') move_type = 'in_invoice' if elements and elements[0].text and elements[0].text == 'TD04': move_type = 'in_refund' elif elements and elements[0].text and elements[0].text != 'TD01': _logger.info( 'Document type not managed: %s. Invoice type is set by default.', elements[0].text) # Setup the context for the Invoice Form invoice_ctx = invoice.with_company(company) \ .with_context(default_move_type=move_type) # move could be a single record (editing) or be empty (new). with Form(invoice_ctx) as invoice_form: message_to_log = [] # Partner (first step to avoid warning 'Warning! You must first select a partner.'). <1.2> elements = tree.xpath('//CedentePrestatore//IdCodice') partner = elements and self.env['res.partner'].search([ '&', ('vat', 'ilike', elements[0].text), '|', ('company_id', '=', company.id), ('company_id', '=', False) ], limit=1) if not partner: elements = tree.xpath('//CedentePrestatore//CodiceFiscale') if elements: codice = elements[0].text domains = [[('l10n_it_codice_fiscale', '=', codice)]] if re.match(r'^[0-9]{11}$', codice): domains.append([('l10n_it_codice_fiscale', '=', 'IT' + codice)]) elif re.match(r'^IT[0-9]{11}$', codice): domains.append([ ('l10n_it_codice_fiscale', '=', self.env['res.partner']. _l10n_it_normalize_codice_fiscale(codice)) ]) partner = elements and self.env['res.partner'].search( AND([ OR(domains), OR([[('company_id', '=', company.id)], [('company_id', '=', False)]]) ]), limit=1) if not partner: elements = tree.xpath('//DatiTrasmissione//Email') partner = elements and self.env['res.partner'].search( [ '&', '|', ('email', '=', elements[0].text), ('l10n_it_pec_email', '=', elements[0].text), '|', ('company_id', '=', company.id), ('company_id', '=', False) ], limit=1) if partner: invoice_form.partner_id = partner else: message_to_log.append("%s<br/>%s" % ( _("Vendor not found, useful informations from XML file:" ), invoice._compose_info_message(tree, './/CedentePrestatore'))) # Numbering attributed by the transmitter. <1.1.2> elements = tree.xpath('//ProgressivoInvio') if elements: invoice_form.payment_reference = elements[0].text elements = body_tree.xpath('.//DatiGeneraliDocumento//Numero') if elements: invoice_form.ref = elements[0].text # Currency. <2.1.1.2> elements = body_tree.xpath('.//DatiGeneraliDocumento/Divisa') if elements: currency_str = elements[0].text currency = self.env.ref('base.%s' % currency_str.upper(), raise_if_not_found=False) if currency != self.env.company.currency_id and currency.active: invoice_form.currency_id = currency # Date. <2.1.1.3> elements = body_tree.xpath('.//DatiGeneraliDocumento/Data') if elements: date_str = elements[0].text date_obj = datetime.strptime( date_str, DEFAULT_FACTUR_ITALIAN_DATE_FORMAT) invoice_form.invoice_date = date_obj # Dati Bollo. <2.1.1.6> elements = body_tree.xpath( './/DatiGeneraliDocumento/DatiBollo/ImportoBollo') if elements: invoice_form.l10n_it_stamp_duty = float(elements[0].text) # Comment. <2.1.1.11> elements = body_tree.xpath('.//DatiGeneraliDocumento//Causale') for element in elements: invoice_form.narration = '%s%s<br/>' % ( invoice_form.narration or '', element.text) # Informations relative to the purchase order, the contract, the agreement, # the reception phase or invoices previously transmitted # <2.1.2> - <2.1.6> for document_type in [ 'DatiOrdineAcquisto', 'DatiContratto', 'DatiConvenzione', 'DatiRicezione', 'DatiFattureCollegate' ]: elements = body_tree.xpath('.//DatiGenerali/' + document_type) if elements: for element in elements: message_to_log.append( "%s %s<br/>%s" % (document_type, _("from XML file:"), invoice._compose_info_message(element, '.'))) # Dati DDT. <2.1.8> elements = body_tree.xpath('.//DatiGenerali/DatiDDT') if elements: message_to_log.append( "%s<br/>%s" % (_("Transport informations from XML file:"), invoice._compose_info_message( body_tree, './/DatiGenerali/DatiDDT'))) # Due date. <2.4.2.5> elements = body_tree.xpath( './/DatiPagamento/DettaglioPagamento/DataScadenzaPagamento' ) if elements: date_str = elements[0].text date_obj = datetime.strptime( date_str, DEFAULT_FACTUR_ITALIAN_DATE_FORMAT) invoice_form.invoice_date_due = fields.Date.to_string( date_obj) # Total amount. <2.4.2.6> elements = body_tree.xpath('.//ImportoPagamento') amount_total_import = 0 for element in elements: amount_total_import += float(element.text) if amount_total_import: message_to_log.append( _("Total amount from the XML File: %s") % (amount_total_import)) # Bank account. <2.4.2.13> if invoice_form.move_type not in ('out_invoice', 'in_refund'): elements = body_tree.xpath( './/DatiPagamento/DettaglioPagamento/IBAN') if elements: if invoice_form.partner_id and invoice_form.partner_id.commercial_partner_id: bank = self.env['res.partner.bank'].search([ ('acc_number', '=', elements[0].text), ('partner_id.id', '=', invoice_form.partner_id. commercial_partner_id.id) ]) else: bank = self.env['res.partner.bank'].search([ ('acc_number', '=', elements[0].text), ('company_id', '=', invoice_form.company_id.id), ]) if len(bank) > 1: bank = None if bank: invoice_form.partner_bank_id = bank else: message_to_log.append("%s<br/>%s" % ( _("Bank account not found, useful informations from XML file:" ), invoice._compose_multi_info_message( body_tree, [ './/DatiPagamento//Beneficiario', './/DatiPagamento//IstitutoFinanziario', './/DatiPagamento//IBAN', './/DatiPagamento//ABI', './/DatiPagamento//CAB', './/DatiPagamento//BIC', './/DatiPagamento//ModalitaPagamento' ]))) else: elements = body_tree.xpath( './/DatiPagamento/DettaglioPagamento') if elements: message_to_log.append("%s<br/>%s" % ( _("Bank account not found, useful informations from XML file:" ), invoice._compose_info_message( body_tree, './/DatiPagamento'))) # Invoice lines. <2.2.1> elements = body_tree.xpath('.//DettaglioLinee') if elements: for element in elements: with invoice_form.invoice_line_ids.new( ) as invoice_line_form: # Sequence. line_elements = element.xpath('.//NumeroLinea') if line_elements: invoice_line_form.sequence = int( line_elements[0].text) # Product. elements_code = element.xpath('.//CodiceArticolo') if elements_code: for element_code in elements_code: type_code = element_code.xpath( './/CodiceTipo')[0] code = element_code.xpath( './/CodiceValore')[0] if type_code.text == 'EAN': product = self.env[ 'product.product'].search([ ('barcode', '=', code.text) ]) if product: invoice_line_form.product_id = product break if partner: product_supplier = self.env[ 'product.supplierinfo'].search( [('partner_id', '=', partner.id), ('product_code', '=', code.text)], limit=2) if product_supplier and len( product_supplier ) == 1 and product_supplier.product_id: invoice_line_form.product_id = product_supplier.product_id break if not invoice_line_form.product_id: for element_code in elements_code: code = element_code.xpath( './/CodiceValore')[0] product = self.env[ 'product.product'].search( [('default_code', '=', code.text)], limit=2) if product and len(product) == 1: invoice_line_form.product_id = product break # Label. line_elements = element.xpath('.//Descrizione') if line_elements: invoice_line_form.name = " ".join( line_elements[0].text.split()) # Price Unit. line_elements = element.xpath('.//PrezzoUnitario') if line_elements: invoice_line_form.price_unit = float( line_elements[0].text) # Quantity. line_elements = element.xpath('.//Quantita') if line_elements: invoice_line_form.quantity = float( line_elements[0].text) else: invoice_line_form.quantity = 1 # Taxes tax_element = element.xpath('.//AliquotaIVA') natura_element = element.xpath('.//Natura') invoice_line_form.tax_ids.clear() if tax_element and tax_element[0].text: percentage = float(tax_element[0].text) if natura_element and natura_element[0].text: l10n_it_kind_exoneration = natura_element[ 0].text tax = self.env['account.tax'].search( [ ('company_id', '=', invoice_form.company_id.id), ('amount_type', '=', 'percent'), ('type_tax_use', '=', 'purchase'), ('amount', '=', percentage), ('l10n_it_kind_exoneration', '=', l10n_it_kind_exoneration), ], limit=1) else: tax = self.env['account.tax'].search( [ ('company_id', '=', invoice_form.company_id.id), ('amount_type', '=', 'percent'), ('type_tax_use', '=', 'purchase'), ('amount', '=', percentage), ], limit=1) l10n_it_kind_exoneration = '' if tax: invoice_line_form.tax_ids.add(tax) else: if l10n_it_kind_exoneration: message_to_log.append( _("Tax not found with percentage: %s and exoneration %s for the article: %s" ) % (percentage, l10n_it_kind_exoneration, invoice_line_form.name)) else: message_to_log.append( _("Tax not found with percentage: %s for the article: %s" ) % (percentage, invoice_line_form.name)) # Discounts discount_elements = element.xpath( './/ScontoMaggiorazione') if discount_elements: discount_element = discount_elements[0] discount_percentage = discount_element.xpath( './/Percentuale') # Special case of only 1 percentage discount if discount_percentage and len( discount_elements) == 1: discount_type = discount_element.xpath( './/Tipo') discount_sign = 1 if discount_type and discount_type[ 0].text == 'MG': discount_sign = -1 invoice_line_form.discount = discount_sign * float( discount_percentage[0].text) # Discounts in cascade summarized in 1 percentage else: total = float( element.xpath('.//PrezzoTotale') [0].text) discount = 100 - (100 * total) / ( invoice_line_form.quantity * invoice_line_form.price_unit) invoice_line_form.discount = discount # Global discount summarized in 1 amount discount_elements = body_tree.xpath( './/DatiGeneraliDocumento/ScontoMaggiorazione') if discount_elements: taxable_amount = float( json.loads( invoice_form.tax_totals_json)['amount_untaxed']) discounted_amount = taxable_amount for discount_element in discount_elements: discount_type = discount_element.xpath('.//Tipo') discount_sign = 1 if discount_type and discount_type[0].text == 'MG': discount_sign = -1 discount_amount = discount_element.xpath('.//Importo') if discount_amount: discounted_amount -= discount_sign * float( discount_amount[0].text) continue discount_percentage = discount_element.xpath( './/Percentuale') if discount_percentage: discounted_amount *= 1 - discount_sign * float( discount_percentage[0].text) / 100 general_discount = discounted_amount - taxable_amount sequence = len(elements) + 1 with invoice_form.invoice_line_ids.new( ) as invoice_line_global_discount: invoice_line_global_discount.tax_ids.clear() invoice_line_global_discount.sequence = sequence invoice_line_global_discount.name = 'SCONTO' if general_discount < 0 else 'MAGGIORAZIONE' invoice_line_global_discount.price_unit = general_discount new_invoice = invoice_form.save() new_invoice.l10n_it_send_state = "other" elements = body_tree.xpath('.//Allegati') if elements: for element in elements: name_attachment = element.xpath( './/NomeAttachment')[0].text attachment_64 = str.encode( element.xpath('.//Attachment')[0].text) attachment_64 = self.env['ir.attachment'].create({ 'name': name_attachment, 'datas': attachment_64, 'type': 'binary', }) # default_res_id is had to context to avoid facturx to import his content # no_new_invoice to prevent from looping on the message_post that would create a new invoice without it new_invoice.with_context( no_new_invoice=True, default_res_id=new_invoice.id).message_post( body=(_("Attachment from XML")), attachment_ids=[attachment_64.id]) for message in message_to_log: new_invoice.message_post(body=message) invoices += new_invoice return invoices
def search_panel_select_multi_range(self, field_name, **kwargs): """ Return possible values of the field field_name (case select="multi"), possibly with counters and groups. :param field_name: the name of a filter field; possible types are many2one, many2many, selection. :param search_domain: base domain of search :param category_domain: domain generated by categories :param filter_domain: domain generated by filters :param comodel_domain: domain of field values (if relational) :param group_by: extra field to read on comodel, to group comodel records :param disable_counters: whether to count records by value :return: a list of possible values, each being a dict with keys 'id' (value), 'name' (value label), 'count' (how many records with that value), 'group_id' (value of group), 'group_name' (label of group). """ field = self._fields[field_name] supported_types = ['many2one', 'many2many', 'selection'] if field.type not in supported_types: raise UserError(_('Only types %(supported_types)s are supported for filter (found type %(field_type)s)') % ({ 'supported_types': supported_types, 'field_type': field.type})) Comodel = self.env.get(field.comodel_name) model_domain = AND([ kwargs.get('search_domain', []), kwargs.get('category_domain', []), kwargs.get('filter_domain', []), [(field_name, '!=', False)], ]) comodel_domain = kwargs.get('comodel_domain', []) disable_counters = kwargs.get('disable_counters', False) group_by = kwargs.get('group_by', False) if group_by: # determine the labeling of values returned by the group_by field group_by_field = Comodel._fields[group_by] if group_by_field.type == 'many2one': def group_id_name(value): return value or (False, _("Not Set")) elif group_by_field.type == 'selection': desc = Comodel.fields_get([group_by])[group_by] group_by_selection = dict(desc['selection']) group_by_selection[False] = _("Not Set") def group_id_name(value): return value, group_by_selection[value] else: def group_id_name(value): return (value, value) if value else (False, _("Not Set")) # get filter_values filter_values = [] if field.type == 'many2one': counters = {} if not disable_counters: groups = self.read_group(model_domain, [field_name], [field_name]) counters = { group[field_name][0]: group[field_name + '_count'] for group in groups } # retrieve all possible values, and return them with their label and counter field_names = ['display_name', group_by] if group_by else ['display_name'] records = Comodel.search_read(comodel_domain, field_names) for record in records: record_id = record['id'] values = { 'id': record_id, 'name': record['display_name'], 'count': counters.get(record_id, 0), } if group_by: values['group_id'], values['group_name'] = group_id_name(record[group_by]) filter_values.append(values) elif field.type == 'many2many': # retrieve all possible values, and return them with their label and counter field_names = ['display_name', group_by] if group_by else ['display_name'] records = Comodel.search_read(comodel_domain, field_names) for record in records: record_id = record['id'] values = { 'id': record_id, 'name': record['display_name'], 'count': 0, } if not disable_counters: count_domain = AND([model_domain, [(field_name, 'in', record_id)]]) values['count'] = self.search_count(count_domain) if group_by: values['group_id'], values['group_name'] = group_id_name(record[group_by]) filter_values.append(values) elif field.type == 'selection': counters = {} if not disable_counters: groups = self.read_group(model_domain, [field_name], [field_name]) counters = { group[field_name]: group[field_name + '_count'] for group in groups } # retrieve all possible values, and return them with their label and counter selection = self.fields_get([field_name])[field_name] for value, label in selection: filter_values.append({ 'id': value, 'name': label, 'count': counters.get(value, 0), }) return filter_values
def portal_my_tasks(self, page=1, date_begin=None, date_end=None, sortby=None, filterby=None, search=None, search_in='content', groupby=None, **kw): values = self._prepare_portal_layout_values() searchbar_sortings = self._task_get_searchbar_sortings() searchbar_sortings = dict( sorted(self._task_get_searchbar_sortings().items(), key=lambda item: item[1]["sequence"])) searchbar_filters = { 'all': { 'label': _('All'), 'domain': [] }, } searchbar_inputs = self._task_get_searchbar_inputs() searchbar_groupby = self._task_get_searchbar_groupby() # extends filterby criteria with project the customer has access to projects = request.env['project.project'].search([]) for project in projects: searchbar_filters.update({ str(project.id): { 'label': project.name, 'domain': [('project_id', '=', project.id)] } }) # extends filterby criteria with project (criteria name is the project id) # Note: portal users can't view projects they don't follow project_groups = request.env['project.task'].read_group( [('project_id', 'not in', projects.ids)], ['project_id'], ['project_id']) for group in project_groups: proj_id = group['project_id'][0] if group['project_id'] else False proj_name = group['project_id'][1] if group['project_id'] else _( 'Others') searchbar_filters.update({ str(proj_id): { 'label': proj_name, 'domain': [('project_id', '=', proj_id)] } }) # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = searchbar_filters.get(filterby, searchbar_filters.get('all'))['domain'] # default group by value if not groupby: groupby = 'project' if date_begin and date_end: domain += [('create_date', '>', date_begin), ('create_date', '<=', date_end)] # search if search and search_in: domain += self._task_get_search_domain(search_in, search) TaskSudo = request.env['project.task'].sudo() domain = AND([ domain, request.env['ir.rule']._compute_domain(TaskSudo._name, 'read') ]) # task count task_count = TaskSudo.search_count(domain) # pager pager = portal_pager(url="/my/tasks", url_args={ 'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby, 'filterby': filterby, 'groupby': groupby, 'search_in': search_in, 'search': search }, total=task_count, page=page, step=self._items_per_page) # content according to pager and archive selected order = self._task_get_order(order, groupby) tasks = TaskSudo.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) request.session['my_tasks_history'] = tasks.ids[:100] groupby_mapping = self._task_get_groupby_mapping() group = groupby_mapping.get(groupby) if group: grouped_tasks = [ request.env['project.task'].concat(*g) for k, g in groupbyelem(tasks, itemgetter(group)) ] else: grouped_tasks = [tasks] task_states = dict(request.env['project.task']._fields['kanban_state']. _description_selection(request.env)) if sortby == 'status': if groupby == 'none' and grouped_tasks: grouped_tasks[0] = grouped_tasks[0].sorted( lambda tasks: task_states.get(tasks.kanban_state)) else: grouped_tasks.sort( key=lambda tasks: task_states.get(tasks[0].kanban_state)) values.update({ 'date': date_begin, 'date_end': date_end, 'grouped_tasks': grouped_tasks, 'page_name': 'task', 'default_url': '/my/tasks', 'task_url': 'task', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'searchbar_groupby': searchbar_groupby, 'searchbar_inputs': searchbar_inputs, 'search_in': search_in, 'search': search, 'sortby': sortby, 'groupby': groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, }) return request.render("project.portal_my_tasks", values)
def search_panel_select_range(self, field_name, **kwargs): """ Return possible values of the field field_name (case select="one"), possibly with counters, and the parent field (if any and required) used to hierarchize them. :param field_name: the name of a field; of type many2one or selection. :param category_domain: domain generated by categories. Default is []. :param comodel_domain: domain of field values (if relational). Default is []. :param enable_counters: whether to count records by value. Default is False. :param expand: whether to return the full range of field values in comodel_domain or only the field image values (possibly filtered and/or completed with parents if hierarchize is set). Default is False. :param filter_domain: domain generated by filters. Default is []. :param hierarchize: determines if the categories must be displayed hierarchically (if possible). If set to true and _parent_name is set on the comodel field, the information necessary for the hierarchization will be returned. Default is True. :param limit: integer, maximal number of values to fetch. Default is None. :param search_domain: base domain of search. Default is []. with parents if hierarchize is set) :return: { 'parent_field': parent field on the comodel of field, or False 'values': array of dictionaries containing some info on the records available on the comodel of the field 'field_name'. The display name, the __count (how many records with that value) and possibly parent_field are fetched. } or an object with an error message when limit is defined and is reached. """ field = self._fields[field_name] supported_types = ['many2one', 'selection'] if field.type not in supported_types: raise UserError(_('Only types %(supported_types)s are supported for category (found type %(field_type)s)') % ({ 'supported_types': supported_types, 'field_type': field.type})) model_domain = kwargs.get('search_domain', []) extra_domain = AND([ kwargs.get('category_domain', []), kwargs.get('filter_domain', []), ]) if field.type == 'selection': return { 'parent_field': False, 'values': self._search_panel_selection_range(field_name, model_domain=model_domain, extra_domain=extra_domain, **kwargs ), } Comodel = self.env[field.comodel_name].with_context(hierarchical_naming=False) field_names = ['display_name'] hierarchize = kwargs.get('hierarchize', True) parent_name = False if hierarchize and Comodel._parent_name in Comodel._fields: parent_name = Comodel._parent_name field_names.append(parent_name) def get_parent_id(record): value = record[parent_name] return value and value[0] else: hierarchize = False comodel_domain = kwargs.get('comodel_domain', []) enable_counters = kwargs.get('enable_counters') expand = kwargs.get('expand') limit = kwargs.get('limit') if enable_counters or not expand: domain_image = self._search_panel_field_image(field_name, model_domain=model_domain, extra_domain=extra_domain, only_counters=expand, set_limit= limit and not (expand or hierarchize or comodel_domain), **kwargs ) if not (expand or hierarchize or comodel_domain): values = list(domain_image.values()) if limit and len(values) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} return { 'parent_field': parent_name, 'values': values, } if not expand: image_element_ids = list(domain_image.keys()) if hierarchize: condition = [('id', 'parent_of', image_element_ids)] else: condition = [('id', 'in', image_element_ids)] comodel_domain = AND([comodel_domain, condition]) comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit) if hierarchize: ids = [rec['id'] for rec in comodel_records] if expand else image_element_ids comodel_records = self._search_panel_sanitized_parent_hierarchy(comodel_records, parent_name, ids) if limit and len(comodel_records) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} field_range = {} for record in comodel_records: record_id = record['id'] values = { 'id': record_id, 'display_name': record['display_name'], } if hierarchize: values[parent_name] = get_parent_id(record) if enable_counters: image_element = domain_image.get(record_id) values['__count'] = image_element['__count'] if image_element else 0 field_range[record_id] = values if hierarchize and enable_counters: self._search_panel_global_counters(field_range, parent_name) return { 'parent_field': parent_name, 'values': list(field_range.values()), }
def _search(self, args, *args_, **kwargs): """Hide templates from searches by default.""" if should_apply_default_template_filter(args, self._context): args = AND((args or [], [('is_template', '=', False)])) return super()._search(args, *args_, **kwargs)
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='project', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env['account.analytic.line']._timesheet_get_portal_domain() searchbar_sortings = { 'date': {'label': _('Newest'), 'order': 'date desc'}, 'name': {'label': _('Name'), 'order': 'name'}, } searchbar_inputs = { 'all': {'input': 'all', 'label': _('Search in All')}, } searchbar_groupby = { 'none': {'input': 'none', 'label': _('None')}, 'project': {'input': 'project', 'label': _('Project')}, } today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': {'label': _('All'), 'domain': []}, 'today': {'label': _('Today'), 'domain': [("date", "=", today)]}, 'week': {'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))]}, 'month': {'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))]}, 'year': {'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))]}, 'quarter': {'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)]}, 'last_week': {'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))]}, 'last_month': {'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))]}, 'last_year': {'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))]}, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: domain = AND([domain, [('name', 'ilike', search)]]) timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager( url="/my/timesheets", url_args={'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby}, total=timesheet_count, page=page, step=self._items_per_page ) if groupby == 'project': order = "project_id, %s" % order timesheets = Timesheet_sudo.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) if groupby == 'project': grouped_timesheets = [Timesheet_sudo.concat(*g) for k, g in groupbyelem(timesheets, itemgetter('project_id'))] else: grouped_timesheets = [timesheets] values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, }) return request.render("hr_timesheet.portal_my_timesheets", values)
def portal_my_project_subtasks(self, project_id, task_id, page=1, date_begin=None, date_end=None, sortby=None, filterby=None, search=None, search_in='content', groupby=None, **kw): try: project_sudo = self._document_check_access('project.project', project_id) task_sudo = request.env['project.task'].search([ ('project_id', '=', project_id), ('id', '=', task_id) ]).sudo() task_domain = [('id', 'child_of', task_id), ('id', '!=', task_id)] searchbar_filters = self._get_my_tasks_searchbar_filters( [('id', '=', task_sudo.project_id.id)], task_domain) if not filterby: filterby = 'all' domain = searchbar_filters.get( filterby, searchbar_filters.get('all'))['domain'] values = self._prepare_tasks_values( page, date_begin, date_end, sortby, search, search_in, groupby, url=f'/my/projects/{project_id}/task/{task_id}/subtasks', domain=AND([task_domain, domain])) values['page_name'] = 'project_subtasks' # pager pager_vals = values['pager'] pager_vals['url_args'].update(filterby=filterby) pager = portal_pager(**pager_vals) values.update({ 'project': project_sudo, 'task': task_sudo, 'grouped_tasks': values['grouped_tasks'](pager['offset']), 'pager': pager, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, }) return request.render("project.portal_my_tasks", values) except (AccessError, MissingError): return request.not_found()
def _prepare_tasks_values(self, page, date_begin, date_end, sortby, search, search_in, groupby, url="/my/tasks", domain=None, su=False): values = self._prepare_portal_layout_values() searchbar_sortings = dict( sorted(self._task_get_searchbar_sortings().items(), key=lambda item: item[1]["sequence"])) searchbar_inputs = self._task_get_searchbar_inputs() searchbar_groupby = self._task_get_searchbar_groupby() Task = request.env['project.task'] if not domain: domain = [] if not su and Task.check_access_rights('read'): domain = AND([ domain, request.env['ir.rule']._compute_domain(Task._name, 'read') ]) Task = Task.sudo() # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default group by value if not groupby: groupby = 'project' if date_begin and date_end: domain += [('create_date', '>', date_begin), ('create_date', '<=', date_end)] # search if search and search_in: domain += self._task_get_search_domain(search_in, search) # content according to pager and archive selected order = self._task_get_order(order, groupby) def get_grouped_tasks(pager_offset): tasks = Task.search(domain, order=order, limit=self._items_per_page, offset=pager_offset) request.session['my_project_tasks_history' if url.startswith( '/my/projects') else 'my_tasks_history'] = tasks.ids[:100] groupby_mapping = self._task_get_groupby_mapping() group = groupby_mapping.get(groupby) if group: grouped_tasks = [ Task.concat(*g) for k, g in groupbyelem(tasks, itemgetter(group)) ] else: grouped_tasks = [tasks] task_states = dict( Task._fields['kanban_state']._description_selection( request.env)) if sortby == 'status': if groupby == 'none' and grouped_tasks: grouped_tasks[0] = grouped_tasks[0].sorted( lambda tasks: task_states.get(tasks.kanban_state)) else: grouped_tasks.sort(key=lambda tasks: task_states.get(tasks[ 0].kanban_state)) return grouped_tasks values.update({ 'date': date_begin, 'date_end': date_end, 'grouped_tasks': get_grouped_tasks, 'page_name': 'task', 'default_url': url, 'task_url': 'tasks', 'pager': { "url": url, "url_args": { 'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby, 'groupby': groupby, 'search_in': search_in, 'search': search }, "total": Task.search_count(domain), "page": page, "step": self._items_per_page }, 'searchbar_sortings': searchbar_sortings, 'searchbar_groupby': searchbar_groupby, 'searchbar_inputs': searchbar_inputs, 'search_in': search_in, 'search': search, 'sortby': sortby, 'groupby': groupby, }) return values
def _add_product_lot_domain_if_required(domain, env, context): lot_id = context.get('stock_inventory_product_lot_filter') if lot_id: lot = env['stock.production.lot'].browse(lot_id) domain = AND((domain or [], [('id', '=', lot.product_id.id)])) return domain
def read_group(self, domain, fields, groupby, offset=0, limit=None, orderby=False, lazy=True): """ This is a hack to allow us to correctly calculate the average of PO specific date values since the normal report query result will duplicate PO values across its PO lines during joins and lead to incorrect aggregation values. Only the AVG operator is supported for avg_days_to_purchase. """ avg_days_to_purchase = next( (field for field in fields if re.search(r'\bavg_days_to_purchase\b', field)), False) if avg_days_to_purchase: fields.remove(avg_days_to_purchase) if any( field.split(':')[1].split('(')[0] != 'avg' for field in [avg_days_to_purchase] if field): raise UserError( "Value: 'avg_days_to_purchase' should only be used to show an average. If you are seeing this message then it is being accessed incorrectly." ) res = [] if fields: res = super(PurchaseReport, self).read_group(domain, fields, groupby, offset=offset, limit=limit, orderby=orderby, lazy=lazy) if not res and avg_days_to_purchase: res = [{}] if avg_days_to_purchase: self.check_access_rights('read') query = """ SELECT AVG(days_to_purchase.po_days_to_purchase)::decimal(16,2) AS avg_days_to_purchase FROM ( SELECT extract(epoch from age(po.date_approve,po.create_date))/(24*60*60) AS po_days_to_purchase FROM purchase_order po WHERE po.id IN ( SELECT "purchase_report"."order_id" FROM %s WHERE %s) ) AS days_to_purchase """ subdomain = AND([ domain, [('company_id', '=', self.env.company.id), ('date_approve', '!=', False)] ]) subtables, subwhere, subparams = expression(subdomain, self).query.get_sql() self.env.cr.execute(query % (subtables, subwhere), subparams) res[0].update({ '__count': 1, avg_days_to_purchase.split(':')[0]: self.env.cr.fetchall()[0][0], }) return res
def _add_product_category_domain_if_required(domain, context): category_id = context.get('stock_inventory_product_category_filter') if category_id: domain = AND((domain or [], [('categ_id', 'child_of', category_id)])) return domain
def _add_product_domain_if_required(domain, context): product_id = context.get('stock_inventory_product_filter') if product_id: domain = AND((domain or [], [('id', '=', product_id)])) return domain
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='none', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env[ 'account.analytic.line']._timesheet_get_portal_domain() searchbar_sortings = { 'date': { 'label': _('Newest'), 'order': 'date desc' }, 'name': { 'label': _('Description'), 'order': 'name' }, } searchbar_inputs = { 'all': { 'input': 'all', 'label': _('Search in All') }, 'project': { 'input': 'project', 'label': _('Search in Project') }, 'name': { 'input': 'name', 'label': _('Search in Description') }, 'employee': { 'input': 'employee', 'label': _('Search in Employee') }, 'task': { 'input': 'task', 'label': _('Search in Task') }, } searchbar_groupby = { 'none': { 'input': 'none', 'label': _('None') }, 'project': { 'input': 'project', 'label': _('Project') }, 'task': { 'input': 'task', 'label': _('Task') }, 'date': { 'input': 'date', 'label': _('Date') }, 'employee': { 'input': 'employee', 'label': _('Employee') }, } today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': { 'label': _('All'), 'domain': [] }, 'today': { 'label': _('Today'), 'domain': [("date", "=", today)] }, 'week': { 'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))] }, 'month': { 'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))] }, 'year': { 'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))] }, 'quarter': { 'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)] }, 'last_week': { 'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))] }, 'last_month': { 'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))] }, 'last_year': { 'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))] }, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: search_domain = [] if search_in in ('project', 'all'): search_domain = OR( [search_domain, [('project_id', 'ilike', search)]]) if search_in in ('name', 'all'): search_domain = OR( [search_domain, [('name', 'ilike', search)]]) if search_in in ('employee', 'all'): search_domain = OR( [search_domain, [('employee_id', 'ilike', search)]]) if search_in in ('task', 'all'): search_domain = OR( [search_domain, [('task_id', 'ilike', search)]]) domain += search_domain timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager(url="/my/timesheets", url_args={ 'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby, 'groupby': groupby }, total=timesheet_count, page=page, step=self._items_per_page) def get_timesheets(): groupby_mapping = { 'project': 'project_id', 'task': 'task_id', 'employee': 'employee_id', 'date': 'date', } field = groupby_mapping.get(groupby, None) orderby = '%s, %s' % (field, order) if field else order timesheets = Timesheet_sudo.search(domain, order=orderby, limit=self._items_per_page, offset=pager['offset']) if field: if groupby == 'date': time_data = Timesheet_sudo.read_group( domain, ['date', 'unit_amount:sum'], ['date:day']) mapped_time = dict([ (datetime.strptime(m['date:day'], '%d %b %Y').date(), m['unit_amount']) for m in time_data ]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k]) for k, g in groupbyelem(timesheets, itemgetter('date')) ] else: time_data = time_data = Timesheet_sudo.read_group( domain, [field, 'unit_amount:sum'], [field]) mapped_time = dict([(m[field][0], m['unit_amount']) for m in time_data]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k.id]) for k, g in groupbyelem(timesheets, itemgetter(field)) ] return timesheets, grouped_timesheets grouped_timesheets = [ (timesheets, sum(Timesheet_sudo.search(domain).mapped('unit_amount'))) ] if timesheets else [] return timesheets, grouped_timesheets timesheets, grouped_timesheets = get_timesheets() values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'search': search, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, 'is_uom_day': request.env['account.analytic.line']._is_timesheet_encode_uom_day( ), }) return request.render("hr_timesheet.portal_my_timesheets", values)
def publicOrderController(self, table_id, config_id=False, **k): # http://192.168.31.193:8069/public/pos/web?table_id=3&config_id=3 license_started_date = request.env['ir.config_parameter'].sudo( ).get_param('license_started_date') if not license_started_date: request.env['ir.config_parameter'].sudo().set_param( 'license_started_date', fields.Date.today()) if not table_id or not config_id: return werkzeug.utils.redirect('/web/login') start = timeit.default_timer() config = request.env['pos.config'].sudo().browse(int(config_id)) if not config.restaurant_order or not config.restaurant_order_login or not config.restaurant_order_password: return werkzeug.utils.redirect('/web/login') request.uid = odoo.SUPERUSER_ID try: uid = request.session.authenticate( request.session.db, config.restaurant_order_login, config.restaurant_order_password) except: values = { 'error': _("Wrong login/password of account user %s", config.restaurant_order_login) } response = request.render('web.login', values) return response request.uid = uid request.session.temp_session = True request.params['login_success'] = True domain = [('state', 'in', ['opening_control', 'opened']), ('user_id', '=', request.session.uid), ('rescue', '=', False)] if config_id: domain = AND([domain, [('config_id', '=', int(config_id))]]) pos_session = request.env['pos.session'].sudo().search(domain, limit=1) if not pos_session and config_id: pos_session = request.env['pos.session'].create({ 'user_id': request.env.user.id, 'config_id': int(config_id), }) if not pos_session or not config_id: return werkzeug.utils.redirect( '/web#action=point_of_sale.action_client_pos_menu') # The POS only work in one company, so we enforce the one of the session in the context if request.env.user.id not in [ 1, 2 ] and not pos_session.config_id.restaurant_order: request.env['pos.remote.session'].sudo( ).closing_another_sessions_opened( pos_session.config_id.id, _('%s opening your POS Session. We closing your session now', request.env.user.name), start) session_info = request.env['ir.http'].session_info() session_info['model_ids'] = { 'product.product': { 'min_id': 0, 'max_id': 0, }, 'res.partner': { 'min_id': 0, 'max_id': 0 }, } request.env.cr.execute("select max(id) from product_product") product_max_ids = request.env.cr.fetchall() request.env.cr.execute("select count(id) from product_product") count_products = request.env.cr.fetchall() session_info['model_ids']['product.product'][ 'max_id'] = product_max_ids[0][0] if len( product_max_ids) == 1 else 1 session_info['model_ids']['product.product']['count'] = count_products[ 0][0] if len(count_products) == 1 else None request.env.cr.execute("select max(id) from res_partner") partner_max_ids = request.env.cr.fetchall() session_info['model_ids']['res.partner']['max_id'] = partner_max_ids[ 0][0] if len(partner_max_ids) == 1 else 10 request.env.cr.execute("select count(id) from res_partner") count_partners = request.env.cr.fetchall() session_info['model_ids']['res.partner']['count'] = count_partners[0][ 0] if len(count_partners) == 1 else None session_info['user_context'][ 'allowed_company_ids'] = pos_session.company_id.ids session_info[ 'company_currency_id'] = request.env.user.company_id.currency_id.id session_info['license'] = request.env['ir.config_parameter'].sudo( ).get_param('license') if session_info['license']: license = session_info['license'].split(' ')[0] session_info['license'] = crypt_context.verify_and_update( request.env.cr.dbname, license)[0] if not session_info['license']: session_info['license'] = crypt_context.verify_and_update( 'saas_license', license)[0] session_info['config_id'] = config_id session_info['products_name'] = None session_info['partners_name'] = None session_info['start_time'] = start modules_installed = request.env['ir.module.module'].sudo().search( [('name', '=', 'pos_retail')], limit=1) if not modules_installed: session_info['pos_retail'] = False else: session_info['pos_retail'] = True session_info['table_id'] = table_id session_info['restaurant_order'] = True if pos_session.config_id.translate_products_name and pos_session.config_id.set_product_name_from_field: # TODO: supported multi language products session_info['products_name'] = {} values = request.env['product.product'].sudo().search_read([ ('available_in_pos', '=', True), ('%s' % pos_session.config_id.set_product_name_from_field, '!=', None), ], [pos_session.config_id.set_product_name_from_field]) for val in values: session_info['products_name'][val['id']] = val[ pos_session.config_id.set_product_name_from_field] if pos_session.config_id.replace_partners_name and pos_session.config_id.set_partner_name_from_field != 'name': session_info['partners_name'] = {} values = request.env['res.partner'].sudo().search_read([ ('%s' % pos_session.config_id.set_partner_name_from_field, '!=', None), ], [pos_session.config_id.set_partner_name_from_field]) for val in values: session_info['partners_name'][val['id']] = val[ pos_session.config_id.set_partner_name_from_field] context = { 'session_info': session_info, 'login_number': pos_session.login(), } _logger.info('========== *** Guest LOGIN TO POS %s *** =========' % (timeit.default_timer() - start)) return requPoint Of Saleest.render('point_of_sale.index', qcontext=context)
def _where_calc(self, domain, active_test=True): if self._context.get(NO_DISPLAY_SUBTASKS): domain = AND([domain or [], [("parent_id", "=", False)]]) return super()._where_calc(domain, active_test)
def pos_web(self, config_id=False, **k): start = timeit.default_timer() license_started_date = request.env['ir.config_parameter'].sudo( ).get_param('license_started_date') if not license_started_date: request.env['ir.config_parameter'].sudo().set_param( 'license_started_date', fields.Date.today()) domain = [('state', 'in', ['opening_control', 'opened']), ('user_id', '=', request.session.uid), ('rescue', '=', False)] if config_id: domain = AND([domain, [('config_id', '=', int(config_id))]]) pos_session = request.env['pos.session'].sudo().search(domain, limit=1) if not pos_session and config_id and request.env.user.pos_config_id and request.env.user.pos_config_id.id == int( config_id): pos_session = request.env['pos.session'].create({ 'user_id': request.env.user.id, 'config_id': int(config_id), }) if not pos_session or not config_id: return werkzeug.utils.redirect( '/web#action=point_of_sale.action_client_pos_menu') # The POS only work in one company, so we enforce the one of the session in the context if request.env.user.id not in [ 1, 2 ] and not pos_session.config_id.restaurant_order: request.env['pos.remote.session'].sudo( ).closing_another_sessions_opened( pos_session.config_id.id, _('%s opening your POS Session. We closing your session now', request.env.user.name), start) session_info = request.env['ir.http'].session_info() configs = request.env['pos.config'].search_read( [ ('id', '=', pos_session.config_id.id), ], [ 'id', 'background', 'primary_color', 'secondary_color', 'three_color', 'cart_box_style', 'product_width', 'product_height', 'cart_width', 'cart_background', 'font_family', 'display_product_image', 'payment_screen_background', 'limited_products_display', # 'big_datas_turbo', 'multi_session', 'receipt_template', 'product_screen_background', 'header_color', ]) config_data = configs[0] config_data['pos_session_id'] = pos_session.id session_info['config'] = config_data modules_installed = request.env['ir.module.module'].sudo().search( [('name', '=', 'pos_retail')], limit=1) if not modules_installed: session_info['pos_retail'] = False else: session_info['pos_retail'] = True session_info['model_ids'] = { 'product.product': { 'min_id': 0, 'max_id': 0, }, 'res.partner': { 'min_id': 0, 'max_id': 0 }, } request.env.cr.execute("select max(id) from product_product") product_max_ids = request.env.cr.fetchall() request.env.cr.execute("select count(id) from product_product") count_products = request.env.cr.fetchall() session_info['model_ids']['product.product'][ 'max_id'] = product_max_ids[0][0] if len( product_max_ids) == 1 else 1 session_info['model_ids']['product.product']['count'] = count_products[ 0][0] if len(count_products) == 1 else None request.env.cr.execute("select max(id) from res_partner") partner_max_ids = request.env.cr.fetchall() session_info['model_ids']['res.partner']['max_id'] = partner_max_ids[ 0][0] if len(partner_max_ids) == 1 else 10 request.env.cr.execute("select count(id) from res_partner") count_partners = request.env.cr.fetchall() session_info['model_ids']['res.partner']['count'] = count_partners[0][ 0] if len(count_partners) == 1 else None session_info['user_context'][ 'allowed_company_ids'] = pos_session.company_id.ids session_info[ 'company_currency_id'] = request.env.user.company_id.currency_id.id session_info['license'] = request.env['ir.config_parameter'].sudo( ).get_param('license') if session_info['license']: license = session_info['license'].split(' ')[0] session_info['license'] = crypt_context.verify_and_update( request.env.cr.dbname, license)[0] if not session_info['license']: session_info['license'] = crypt_context.verify_and_update( 'saas_license', license)[0] session_info['config_id'] = config_id session_info['products_name'] = None session_info['partners_name'] = None session_info['start_time'] = start modules_installed = request.env['ir.module.module'].sudo().search( [('name', '=', 'pos_retail')], limit=1) if not modules_installed: session_info['pos_retail'] = False else: session_info['pos_retail'] = True if pos_session.config_id.translate_products_name and pos_session.config_id.set_product_name_from_field: # TODO: supported multi language products session_info['products_name'] = {} values = request.env['product.product'].sudo().search_read([ ('available_in_pos', '=', True), ('%s' % pos_session.config_id.set_product_name_from_field, '!=', None), ], [pos_session.config_id.set_product_name_from_field]) for val in values: session_info['products_name'][val['id']] = val[ pos_session.config_id.set_product_name_from_field] if pos_session.config_id.replace_partners_name and pos_session.config_id.set_partner_name_from_field != 'name': session_info['partners_name'] = {} values = request.env['res.partner'].sudo().search_read([ ('%s' % pos_session.config_id.set_partner_name_from_field, '!=', None), ], [pos_session.config_id.set_partner_name_from_field]) for val in values: session_info['partners_name'][val['id']] = val[ pos_session.config_id.set_partner_name_from_field] login_number = pos_session.login() session_info['opened_at'] = fields.Datetime.to_string( pos_session.opened_at) session_info['opened_uid'] = pos_session.opened_uid.id context = { 'session_info': session_info, 'login_number': login_number, 'pos_session_id': pos_session.id, } _logger.info( '=========> [POS Started Session] with total seconds: %s <=============' % (timeit.default_timer() - start)) return request.render('point_of_sale.index', qcontext=context)
def pos_web(self, config_id=False, **k): start = timeit.default_timer() domain = [('state', '=', 'opened'), ('user_id', '=', request.session.uid), ('rescue', '=', False)] if config_id: domain = AND([domain, [('config_id', '=', int(config_id))]]) pos_session = request.env['pos.session'].sudo().search(domain, limit=1) if not pos_session and config_id and request.env.user.pos_config_id: pos_session = request.env['pos.session'].create({ 'user_id': request.env.user.id, 'config_id': request.env.user.pos_config_id.id, }) pos_session.action_pos_session_open() if not pos_session or not config_id: return werkzeug.utils.redirect( '/web#action=point_of_sale.action_client_pos_menu') # The POS only work in one company, so we enforce the one of the session in the context session_info = request.env['ir.http'].session_info() session_info['model_ids'] = { 'product.product': { 'min_id': 0, 'max_id': 0, }, 'res.partner': { 'min_id': 0, 'max_id': 0 }, } request.env.cr.execute("select max(id) from product_product") product_max_ids = request.env.cr.fetchall() request.env.cr.execute("select count(id) from product_product") count_products = request.env.cr.fetchall() session_info['model_ids']['product.product'][ 'max_id'] = product_max_ids[0][0] if len( product_max_ids) == 1 else 1 session_info['model_ids']['product.product']['count'] = count_products[ 0][0] if len(count_products) == 1 else None request.env.cr.execute("select max(id) from res_partner") partner_max_ids = request.env.cr.fetchall() session_info['model_ids']['res.partner']['max_id'] = partner_max_ids[ 0][0] if len(partner_max_ids) == 1 else 10 request.env.cr.execute("select count(id) from res_partner") count_partners = request.env.cr.fetchall() session_info['model_ids']['res.partner']['count'] = count_partners[0][ 0] if len(count_partners) == 1 else None session_info['user_context'][ 'allowed_company_ids'] = pos_session.company_id.ids session_info[ 'company_currency_id'] = request.env.user.company_id.currency_id.id session_info['big_datas_turbo'] = pos_session.config_id.big_datas_turbo session_info['license'] = request.env['ir.config_parameter'].sudo( ).get_param('license') if session_info['license']: license = session_info['license'].split(' ')[0] session_info['license'] = crypt_context.verify_and_update( request.env.cr.dbname, license)[0] if not session_info['license']: session_info['license'] = crypt_context.verify_and_update( 'saas_license', license)[0] session_info['config_id'] = config_id session_info['products_name'] = None session_info['partners_name'] = None if pos_session.config_id.translate_products_name and pos_session.config_id.set_product_name_from_field: # TODO: supported multi language products session_info['products_name'] = {} values = request.env['product.product'].sudo().search_read([ ('available_in_pos', '=', True), ('%s' % pos_session.config_id.set_product_name_from_field, '!=', None), ], [pos_session.config_id.set_product_name_from_field]) for val in values: session_info['products_name'][val['id']] = val[ pos_session.config_id.set_product_name_from_field] if pos_session.config_id.replace_partners_name and pos_session.config_id.set_partner_name_from_field != 'name': session_info['partners_name'] = {} values = request.env['res.partner'].sudo().search_read([ ('%s' % pos_session.config_id.set_partner_name_from_field, '!=', None), ], [pos_session.config_id.set_partner_name_from_field]) for val in values: session_info['partners_name'][val['id']] = val[ pos_session.config_id.set_partner_name_from_field] context = { 'session_info': session_info, 'login_number': pos_session.login(), } _logger.info( '========== *** POS starting with loaded times: %s *** =========' % (timeit.default_timer() - start)) return request.render('point_of_sale.index', qcontext=context)
def read_group(self, domain, *args, **kwargs): """Hide templates from grouped searches by default.""" if should_apply_default_template_filter(domain, self._context): domain = AND((domain or [], [('is_template', '=', False)])) return super().read_group(domain, *args, **kwargs)
def _find_move_line_for_package(self, package): domain = AND( [self._domain_move_lines_to_do_all(), [("package_id", "in", package.ids)]] ) return self.env["stock.move.line"].search(domain, limit=1)
def _project_get_page_view_values(self, project, access_token, page=1, date_begin=None, date_end=None, sortby=None, search=None, search_in='content', groupby=None, **kwargs): # TODO: refactor this because most of this code is duplicated from portal_my_tasks method values = self._prepare_portal_layout_values() searchbar_sortings = self._task_get_searchbar_sortings() searchbar_inputs = self._task_get_searchbar_inputs() searchbar_groupby = self._task_get_searchbar_groupby() # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value domain = [('project_id', '=', project.id)] # default group by value if not groupby: groupby = 'project' if date_begin and date_end: domain += [('create_date', '>', date_begin), ('create_date', '<=', date_end)] # search if search and search_in: domain += self._task_get_search_domain(search_in, search) Task = request.env['project.task'] if access_token: Task = Task.sudo() elif not request.env.user._is_public(): domain = AND([ domain, request.env['ir.rule']._compute_domain(Task._name, 'read') ]) Task = Task.sudo() # task count task_count = Task.search_count(domain) # pager url = "/my/project/%s" % project.id pager = portal_pager(url=url, url_args={ 'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby, 'groupby': groupby, 'search_in': search_in, 'search': search }, total=task_count, page=page, step=self._items_per_page) # content according to pager and archive selected order = self._task_get_order(order, groupby) tasks = Task.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) request.session['my_project_tasks_history'] = tasks.ids[:100] groupby_mapping = self._task_get_groupby_mapping() group = groupby_mapping.get(groupby) if group: grouped_tasks = [ Task.concat(*g) for k, g in groupbyelem(tasks, itemgetter(group)) ] else: grouped_tasks = [tasks] values.update( date=date_begin, date_end=date_end, grouped_tasks=grouped_tasks, page_name='project', default_url=url, pager=pager, searchbar_sortings=searchbar_sortings, searchbar_groupby=searchbar_groupby, searchbar_inputs=searchbar_inputs, search_in=search_in, search=search, sortby=sortby, groupby=groupby, project=project, ) return self._get_page_view_values(project, access_token, values, 'my_projects_history', False, **kwargs)
def _compute_amounts(self, frequency, company): """ Method used to compute all the business data of the new object. It will search for previous closings of the same frequency to infer the move from which account move lines should be fetched. @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} containing {field: value} for each business field of the object """ interval_dates = self._interval_dates(frequency, company) previous_closing = self.search([('frequency', '=', frequency), ('company_id', '=', company.id)], limit=1, order='sequence_number desc') first_order = self.env['pos.order'] date_start = interval_dates['interval_from'] cumulative_total = 0 if previous_closing: first_order = previous_closing.last_order_id date_start = previous_closing.create_date cumulative_total += previous_closing.cumulative_total domain = [('company_id', '=', company.id), ('state', 'in', ('paid', 'done', 'invoiced'))] if first_order.l10n_fr_secure_sequence_number is not False and first_order.l10n_fr_secure_sequence_number is not None: domain = AND([ domain, [('l10n_fr_secure_sequence_number', '>', first_order.l10n_fr_secure_sequence_number)] ]) elif date_start: #the first time we compute the closing, we consider only from the installation of the module domain = AND([domain, [('date_order', '>=', date_start)]]) orders = self.env['pos.order'].search(domain, order='date_order desc') total_interval = sum(orders.mapped('amount_total')) cumulative_total += total_interval # We keep the reference to avoid gaps (like daily object during the weekend) last_order = first_order if orders: last_order = orders[0] return { 'total_interval': total_interval, 'cumulative_total': cumulative_total, 'last_order_id': last_order.id, 'last_order_hash': last_order.l10n_fr_secure_sequence_number, 'date_closing_stop': interval_dates['date_stop'], 'date_closing_start': date_start, 'name': interval_dates['name_interval'] + ' - ' + interval_dates['date_stop'][:10] }
def force_storage_to_db_for_special_fields(self, new_cr=False): """Migrate special attachments from Object Storage back to database The access to a file stored on the objects storage is slower than a local disk or database access. For attachments like image_small that are accessed in batch for kanban views, this is too slow. We store this type of attachment in the database. This method can be used when migrating a filestore where all the files, including the special files (assets, image_small, ...) have been pushed to the Object Storage and we want to write them back in the database. It is not called anywhere, but can be called by RPC or scripts. """ storage = self._storage() if storage not in self._get_stores(): return domain = AND(( normalize_domain( [('store_fname', '=like', '{}://%'.format(storage)), # for res_field, see comment in # _force_storage_to_object_storage '|', ('res_field', '=', False), ('res_field', '!=', False), ] ), normalize_domain(self._store_in_db_instead_of_object_storage_domain()) )) with self.do_in_new_env(new_cr=new_cr) as new_env: model_env = new_env['ir.attachment'].with_context( prefetch_fields=False ) attachment_ids = model_env.search(domain).ids if not attachment_ids: return total = len(attachment_ids) start_time = time.time() _logger.info('Moving %d attachments from %s to' ' DB for fast access', total, storage) current = 0 for attachment_id in attachment_ids: current += 1 # if we browse attachments outside of the loop, the first # access to 'datas' will compute all the 'datas' fields at # once, which means reading hundreds or thousands of files at # once, exhausting memory attachment = model_env.browse(attachment_id) # this write will read the datas from the Object Storage and # write them back in the DB (the logic for location to write is # in the 'datas' inverse computed field) attachment.write({'datas': attachment.datas}) # as the file will potentially be deleted from the bucket, # we should commit the changes here new_env.cr.commit() if current % 100 == 0 or total - current == 0: _logger.info( 'attachment %s/%s after %.2fs', current, total, time.time() - start_time )
def _full_text_search(self, name, domain=None, limit=None): full_text_domain = self._expand_full_text_domain([("full_text", "like", name)]) return self.search(AND([domain, full_text_domain]), limit=limit)
def search_panel_select_multi_range(self, field_name, **kwargs): """ Return possible values of the field field_name (case select="multi"), possibly with counters and groups. :param field_name: the name of a filter field; possible types are many2one, many2many, selection. :param category_domain: domain generated by categories. Default is []. :param comodel_domain: domain of field values (if relational) (this parameter is used in _search_panel_range). Default is []. :param enable_counters: whether to count records by value. Default is False. :param expand: whether to return the full range of field values in comodel_domain or only the field image values. Default is False. :param filter_domain: domain generated by filters. Default is []. :param group_by: extra field to read on comodel, to group comodel records :param group_domain: dict, one domain for each activated group for the group_by (if any). Those domains are used to fech accurate counters for values in each group. Default is [] (many2one case) or None. :param limit: integer, maximal number of values to fetch. Default is None. :param search_domain: base domain of search. Default is []. :return: { 'values': a list of possible values, each being a dict with keys 'id' (value), 'name' (value label), '__count' (how many records with that value), 'group_id' (value of group), set if a group_by has been provided, 'group_name' (label of group), set if a group_by has been provided } or an object with an error message when limit is defined and reached. """ field = self._fields[field_name] supported_types = ['many2one', 'many2many', 'selection'] if field.type not in supported_types: raise UserError(_('Only types %(supported_types)s are supported for filter (found type %(field_type)s)') % ({ 'supported_types': supported_types, 'field_type': field.type})) model_domain = kwargs.get('search_domain', []) extra_domain = AND([ kwargs.get('category_domain', []), kwargs.get('filter_domain', []), ]) if field.type == 'selection': return { 'values': self._search_panel_selection_range(field_name, model_domain=model_domain, extra_domain=extra_domain, **kwargs ) } Comodel = self.env.get(field.comodel_name).with_context(hierarchical_naming=False) field_names = ['display_name'] group_by = kwargs.get('group_by') limit = kwargs.get('limit') if group_by: group_by_field = Comodel._fields[group_by] field_names.append(group_by) if group_by_field.type == 'many2one': def group_id_name(value): return value or (False, _("Not Set")) elif group_by_field.type == 'selection': desc = Comodel.fields_get([group_by])[group_by] group_by_selection = dict(desc['selection']) group_by_selection[False] = _("Not Set") def group_id_name(value): return value, group_by_selection[value] else: def group_id_name(value): return (value, value) if value else (False, _("Not Set")) comodel_domain = kwargs.get('comodel_domain', []) enable_counters = kwargs.get('enable_counters') expand = kwargs.get('expand') if field.type == 'many2many': comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit) if expand and limit and len(comodel_records) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} group_domain = kwargs.get('group_domain') field_range = [] for record in comodel_records: record_id = record['id'] values= { 'id': record_id, 'display_name': record['display_name'], } if group_by: group_id, group_name = group_id_name(record[group_by]) values['group_id'] = group_id values['group_name'] = group_name if enable_counters or not expand: search_domain = AND([ model_domain, [(field_name, 'in', record_id)], ]) local_extra_domain = extra_domain if group_by and group_domain: local_extra_domain = AND([ local_extra_domain, group_domain.get(json.dumps(group_id), []), ]) search_count_domain = AND([ search_domain, local_extra_domain ]) if enable_counters: count = self.search_count(search_count_domain) if not expand: if enable_counters and is_true_domain(local_extra_domain): inImage = count else: inImage = self.search(search_domain, limit=1) if expand or inImage: if enable_counters: values['__count'] = count field_range.append(values) if not expand and limit and len(field_range) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} return { 'values': field_range, } if field.type == 'many2one': if enable_counters or not expand: extra_domain = AND([ extra_domain, kwargs.get('group_domain', []), ]) domain_image = self._search_panel_field_image(field_name, model_domain=model_domain, extra_domain=extra_domain, only_counters=expand, set_limit=limit and not (expand or group_by or comodel_domain), **kwargs ) if not (expand or group_by or comodel_domain): values = list(domain_image.values()) if limit and len(values) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} return {'values': values, } if not expand: image_element_ids = list(domain_image.keys()) comodel_domain = AND([ comodel_domain, [('id', 'in', image_element_ids)], ]) comodel_records = Comodel.search_read(comodel_domain, field_names, limit=limit) if limit and len(comodel_records) == limit: return {'error_msg': _(SEARCH_PANEL_ERROR_MESSAGE)} field_range = [] for record in comodel_records: record_id = record['id'] values= { 'id': record_id, 'display_name': record['display_name'], } if group_by: group_id, group_name = group_id_name(record[group_by]) values['group_id'] = group_id values['group_name'] = group_name if enable_counters: image_element = domain_image.get(record_id) values['__count'] = image_element['__count'] if image_element else 0 field_range.append(values) return { 'values': field_range, }
if date_stop: date_stop = fields.Datetime.from_string(date_stop) # avoid a date_stop smaller than date_start if (date_stop < date_start): date_stop = date_start + timedelta(days=1, seconds=-1) else: # stop by default today 23:59:59 date_stop = date_start + timedelta(days=1, seconds=-1) domain = AND([domain, [('date_order', '>=', fields.Datetime.to_string(date_start)), ('date_order', '<=', fields.Datetime.to_string(date_stop))] ]) if config_ids: domain = AND([domain, [('config_id', 'in', config_ids)]]) orders = self.env['pos.order'].search(domain) user_currency = self.env.company.currency_id total = 0.0 products_sold = {} taxes = {} for order in orders: if user_currency != order.pricelist_id.currency_id: total += order.pricelist_id.currency_id._convert( order.amount_total, user_currency, order.company_id, order.date_order or fields.Date.today()) else: total += order.amount_total currency = order.session_id.currency_id
def get_extended_security_domain(self): domain = super().get_extended_security_domain() return AND((domain, [('customer', '=', True)]))
def _add_supplier_domain_if_required(domain, env, context): partner_id = context.get('stock_inventory_partner_filter') if partner_id: products = get_products_from_supplier_id(env, partner_id) domain = AND((domain or [], [('id', '=', products.ids)])) return domain
def get_sale_details(self, date_start=False, date_stop=False, config_ids=False, session_ids=False): """ Serialise the orders of the requested time period, configs and sessions. :param date_start: The dateTime to start, default today 00:00:00. :type date_start: str. :param date_stop: The dateTime to stop, default date_start + 23:59:59. :type date_stop: str. :param config_ids: Pos Config id's to include. :type config_ids: list of numbers. :param session_ids: Pos Config id's to include. :type session_ids: list of numbers. :returns: dict -- Serialised sales. """ domain = [('state', 'in', ['paid','invoiced','done'])] if (session_ids): domain = AND([domain, [('session_id', 'in', session_ids)]]) else: if date_start: date_start = fields.Datetime.from_string(date_start) else: # start by default today 00:00:00 user_tz = pytz.timezone(self.env.context.get('tz') or self.env.user.tz or 'UTC') today = user_tz.localize(fields.Datetime.from_string(fields.Date.context_today(self))) date_start = today.astimezone(pytz.timezone('UTC')) if date_stop: date_stop = fields.Datetime.from_string(date_stop) # avoid a date_stop smaller than date_start if (date_stop < date_start): date_stop = date_start + timedelta(days=1, seconds=-1) else: # stop by default today 23:59:59 date_stop = date_start + timedelta(days=1, seconds=-1) domain = AND([domain, [('date_order', '>=', fields.Datetime.to_string(date_start)), ('date_order', '<=', fields.Datetime.to_string(date_stop))] ]) if config_ids: domain = AND([domain, [('config_id', 'in', config_ids)]]) orders = self.env['pos.order'].search(domain) user_currency = self.env.company.currency_id total = 0.0 products_sold = {} taxes = {} for order in orders: if user_currency != order.pricelist_id.currency_id: total += order.pricelist_id.currency_id._convert( order.amount_total, user_currency, order.company_id, order.date_order or fields.Date.today()) else: total += order.amount_total currency = order.session_id.currency_id for line in order.lines: key = (line.product_id, line.price_unit, line.discount) products_sold.setdefault(key, 0.0) products_sold[key] += line.qty if line.tax_ids_after_fiscal_position: line_taxes = line.tax_ids_after_fiscal_position.compute_all(line.price_unit * (1-(line.discount or 0.0)/100.0), currency, line.qty, product=line.product_id, partner=line.order_id.partner_id or False) for tax in line_taxes['taxes']: taxes.setdefault(tax['id'], {'name': tax['name'], 'tax_amount':0.0, 'base_amount':0.0}) taxes[tax['id']]['tax_amount'] += tax['amount'] taxes[tax['id']]['base_amount'] += tax['base'] else: taxes.setdefault(0, {'name': _('No Taxes'), 'tax_amount':0.0, 'base_amount':0.0}) taxes[0]['base_amount'] += line.price_subtotal_incl payment_ids = self.env["pos.payment"].search([('pos_order_id', 'in', orders.ids)]).ids if payment_ids: self.env.cr.execute(""" SELECT method.name, sum(amount) total FROM pos_payment AS payment, pos_payment_method AS method WHERE payment.payment_method_id = method.id AND payment.id IN %s GROUP BY method.name """, (tuple(payment_ids),)) payments = self.env.cr.dictfetchall() else: payments = [] return { 'currency_precision': user_currency.decimal_places, 'total_paid': user_currency.round(total), 'payments': payments, 'company_name': self.env.company.name, 'taxes': list(taxes.values()), 'products': sorted([{ 'product_id': product.id, 'product_name': product.name, 'code': product.default_code, 'quantity': qty, 'price_unit': price_unit, 'discount': discount, 'uom': product.uom_id.name } for (product, price_unit, discount), qty in products_sold.items()], key=lambda l: l['product_name']) }
def get_sale_details(self, date_start=False, date_stop=False, config_ids=False, session_ids=False): """Serialise the orders of the day information params: date_start, date_stop string representing the datetime of order """ domain = [("state", "in", ["paid", "invoiced", "done"])] if session_ids: domain = AND([domain, [("session_id", "in", session_ids)]]) else: if date_start: date_start = fields.Datetime.from_string(date_start) else: # start by default today 00:00:00 user_tz = pytz.timezone( self.env.context.get("tz") or self.env.user.tz or "UTC") today = user_tz.localize( fields.Datetime.from_string( fields.Date.context_today(self))) date_start = today.astimezone(pytz.timezone("UTC")) if date_stop: date_stop = fields.Datetime.from_string(date_stop) # avoid a date_stop smaller than date_start if date_stop < date_start: date_stop = date_start + timedelta(days=1, seconds=-1) else: # stop by default today 23:59:59 date_stop = date_start + timedelta(days=1, seconds=-1) domain = AND([ domain, [ ("date_order", ">=", fields.Datetime.to_string(date_start)), ("date_order", "<=", fields.Datetime.to_string(date_stop)), ], ]) if config_ids: domain = AND([domain, [("config_id", "in", config_ids)]]) orders = self.env["pos.order"].search(domain) user_currency = self.env.company.currency_id total = 0.0 products_sold = {} taxes = {} for order in orders: if user_currency != order.pricelist_id.currency_id: total += order.pricelist_id.currency_id._convert( order.amount_total, user_currency, order.company_id, order.date_order or fields.Date.today(), ) else: total += order.amount_total currency = order.session_id.currency_id for line in order.lines: key = (line.product_id, line.price_unit, line.discount) key2 = (line.qty or 0, line.absolute_discount or 0) products_sold.setdefault(key, key2) if line.tax_ids_after_fiscal_position: if line.absolute_discount: line_taxes = line.tax_ids_after_fiscal_position.compute_all( line.price_unit * (1 - line.absolute_discount), currency, line.qty, product=line.product_id, partner=line.order_id.partner_id or False, ) else: line_taxes = line.tax_ids_after_fiscal_position.compute_all( line.price_unit * (1 - (line.discount or 0.0) / 100.0), currency, line.qty, product=line.product_id, partner=line.order_id.partner_id or False, ) for tax in line_taxes["taxes"]: taxes.setdefault( tax["id"], { "name": tax["name"], "tax_amount": 0.0, "base_amount": 0.0, }, ) taxes[tax["id"]]["tax_amount"] += tax["amount"] taxes[tax["id"]]["base_amount"] += tax["base"] else: taxes.setdefault( 0, { "name": _("No Taxes"), "tax_amount": 0.0, "base_amount": 0.0 }, ) taxes[0]["base_amount"] += line.price_subtotal_incl payment_ids = (self.env["pos.payment"].search([("pos_order_id", "in", orders.ids)]).ids) if payment_ids: self.env.cr.execute( """ SELECT method.name, sum(amount) total FROM pos_payment AS payment, pos_payment_method AS method WHERE payment.payment_method_id = method.id AND payment.id IN %s GROUP BY method.name """, (tuple(payment_ids), ), ) payments = self.env.cr.dictfetchall() else: payments = [] return { "currency_precision": user_currency.decimal_places, "total_paid": user_currency.round(total), "payments": payments, "company_name": self.env.company.name, "taxes": list(taxes.values()), "products": sorted( [{ "product_id": product.id, "product_name": product.name, "code": product.default_code, "quantity": qty, "price_unit": price_unit, "discount": discount, "absolute_discount": absolute_discount, "uom": product.uom_id.name, } for (product, price_unit, discount), ( qty, absolute_discount, ) in products_sold.items()], key=lambda l: l["product_name"], ), }