def _get_next_date(self, date, qty=1): next_date = super(ProductTemplate, self)._get_next_date(date) if self.membership_interval_unit == 'days': raise exceptions.Warning( _("It's not possible to prorate daily periods.")) qty = math.ceil(qty) * self.membership_interval_qty if self.membership_interval_unit == 'weeks': next_date = date_utils.start_of(date, 'week') next_date = date_utils.add(next_date, weeks=qty) elif self.membership_interval_unit == 'months': next_date = date_utils.start_of(date, 'month') next_date = date_utils.add(next_date, months=qty) elif self.membership_interval_unit == 'years': next_date = date_utils.start_of(date, 'year') next_date = date_utils.add(next_date, years=qty) return next_date
def _compute_two_weeks_explanation(self): today = fields.Date.today() week_type = _("odd") if int( math.floor((today.toordinal() - 1) / 7) % 2) else _("even") first_day = date_utils.start_of(today, 'week') last_day = date_utils.end_of(today, 'week') self.two_weeks_explanation = "This week (from %s to %s) is an %s week." % ( first_day, last_day, week_type)
def _compute_two_weeks_explanation(self): today = fields.Date.today() week_type = self.env['resource.calendar.attendance'].get_week_type(today) week_type_str = _("second") if week_type else _("first") first_day = date_utils.start_of(today, 'week') last_day = date_utils.end_of(today, 'week') self.two_weeks_explanation = _("The current week (from %s to %s) correspond to the %s one.", first_day, last_day, week_type_str)
def get_timespan_start_end_dt(self): """Method to get the start and end date based on the timespan.""" curr_dt = datetime.now().date() s_date = date_utils.start_of(curr_dt, 'month') e_date = date_utils.end_of(curr_dt, 'month') if self.time_span: if self.time_span == 'monthly': s_date = date_utils.start_of(curr_dt, 'month') e_date = date_utils.end_of(curr_dt, 'month') elif self.time_span == 'quarterly': # Pass granularity like year, quarter, month, week, day, hour s_date = date_utils.start_of(curr_dt, 'quarter') e_date = date_utils.end_of(curr_dt, 'quarter') elif self.time_span == 'yearly': s_date = date_utils.start_of(curr_dt, 'year') e_date = date_utils.end_of(curr_dt, 'year') return s_date, e_date
def _compute_vat_period_date(self): self.ensure_one() date_to = self.date # Take the periodicity of tax report (1 or 3 months) from the company and since we base ourselve on the move # date as ending date for the period, we need to remove 0 or 2 month from that date and take the 1st of the month # to compute the starting period date. delay = self.company_id.account_tax_next_activity_type.delay_count - 1 date_from = date_utils.start_of(date_to + relativedelta(months=-delay), 'month') options = {'date': {'date_from': date_from, 'date_to': date_to, 'filter': 'custom'}} report = self.env['account.generic.tax.report'] return report._get_options(options)
def _read_progress_bar(self, domain, group_by, progress_bar): """ Implementation of read_progress_bar() that returns results in the format of read_group(). """ try: fname = progress_bar['field'] return self.read_group(domain, [fname], [group_by, fname], lazy=False) except UserError: # possibly failed because of grouping on or aggregating non-stored # field; fallback on alternative implementation pass # Workaround to match read_group's infrastructure # TO DO in master: harmonize this function and readgroup to allow factorization group_by_name = group_by.partition(':')[0] group_by_modifier = group_by.partition(':')[2] or 'month' records_values = self.search_read( domain or [], [progress_bar['field'], group_by_name]) field_type = self._fields[group_by_name].type for record_values in records_values: group_by_value = record_values.pop(group_by_name) # Again, imitating what _read_group_format_result and _read_group_prepare_data do if group_by_value and field_type in ['date', 'datetime']: locale = get_lang(self.env).code group_by_value = date_utils.start_of( fields.Datetime.to_datetime(group_by_value), group_by_modifier) group_by_value = pytz.timezone('UTC').localize(group_by_value) tz_info = None if field_type == 'datetime' and self._context.get( 'tz') in pytz.all_timezones: tz_info = self._context.get('tz') group_by_value = babel.dates.format_datetime( group_by_value, format=DISPLAY_DATE_FORMATS[group_by_modifier], tzinfo=tz_info, locale=locale) else: group_by_value = babel.dates.format_date( group_by_value, format=DISPLAY_DATE_FORMATS[group_by_modifier], locale=locale) if field_type == 'many2many' and isinstance(group_by_value, list): group_by_value = str(tuple(group_by_value)) or False record_values[group_by] = group_by_value record_values['__count'] = 1 return records_values
def _get_membership_interval(self, product, date): """Get the interval to evaluate as the theoretical membership period. :param product: Product that defines the membership :param date: date object for the requested date to determine the variable period :return: A tuple with 2 date objects with the beginning and the end of the period """ if product.membership_type == 'fixed': return super(AccountInvoiceLine, self)._get_membership_interval(product, date) if product.membership_interval_unit == 'days': raise exceptions.Warning( _("It's not possible to prorate daily periods.")) if product.membership_interval_unit == 'weeks': date_from = date_utils.start_of(date, 'week') elif product.membership_interval_unit == 'months': date_from = date_utils.start_of(date, 'month') elif product.membership_interval_unit == 'years': date_from = date_utils.start_of(date, 'year') date_to = date_utils.subtract(product._get_next_date(date), days=1) return date_from, date_to
def _get_date_range(self): """ Return the date range for a production schedude depending the manufacturing period and the number of columns to display specify by the user. It returns a list of tuple that contains the timestamp for each column. """ self.ensure_one() date_range = [] first_day = start_of(fields.Date.today(), self.manufacturing_period) for columns in range(self.manufacturing_period_to_display): last_day = end_of(first_day, self.manufacturing_period) date_range.append((first_day, last_day)) first_day = add(last_day, days=1) return date_range
def _get_date_range(self): date_range = [] first_day = start_of(fields.Date.today(), self.env.company.manufacturing_period) manufacturing_period = self.env.company.manufacturing_period_to_display week = (first_day.day // 7) + 1 for i in range(week): first_day = first_day + relativedelta(days=-7) manufacturing_period = manufacturing_period + 1 for columns in range(manufacturing_period): last_day = end_of(first_day, self.env.company.manufacturing_period) date_range.append((first_day, last_day)) first_day = add(last_day, days=1) return date_range
def _compute_vat_period_date(self): self.ensure_one() date_to = self.tax_closing_end_date # Take the periodicity of tax report from the company and compute the starting period date. delay = self.company_id.account_tax_next_activity_type.delay_count - 1 date_from = date_utils.start_of(date_to + relativedelta(months=-delay), 'month') options = { 'date': { 'date_from': date_from, 'date_to': date_to, 'filter': 'custom' } } report = self.env['account.generic.tax.report'] return report._get_options(options)
def get_worked_hours(self, start_date, end_date): """ The dictionary of hours worked is composed: days of the week as keys and a list of hours worked as values. dict = {days of the week: [hours worked]} """ count = 1 list_hours = [start_date] if end_date: while True: temp_date = date_utils.start_of( start_date, "hour") + datetime.timedelta(hours=count) if temp_date < end_date: list_hours.append(temp_date) count += 1 else: break list_hours.append(end_date) return list_hours
def compute_date_domain(r): date_range_type = r.date_range_type fn = r.date_range_field_id.name domain = [] if fn and date_range_type: start = None end = None today = fields.Date.today() if date_range_type == 'late': domain = [(fn, '<', today.strftime('%Y-%m-%d'))] elif date_range_type == 'today': # domain = [(fn, '=', today.strftime('%Y-%m-%d'))] start = today end = today + relativedelta(days=1) elif date_range_type == 'yesterday': start = today + relativedelta(days=-1) end = today elif date_range_type == 'this_week': start = date_utils.start_of(today, 'week') end = start + relativedelta(days=7) elif date_range_type == 'last_week': start_this_week = date_utils.start_of(today, 'week') start = start_this_week - relativedelta(days=7) end = start_this_week elif date_range_type == 'this_month': start = date_utils.start_of(today, 'month') end = start + relativedelta(months=1) elif date_range_type == 'last_month': start_this_month = date_utils.start_of(today, 'month') start = start_this_month - relativedelta(months=1) end = start_this_month elif date_range_type == 'this_year': start = date_utils.start_of(today, 'year') end = start + relativedelta(years=1) elif date_range_type == 'last_year': start_this_year = date_utils.start_of(today, 'year') start = start_this_year - relativedelta(years=1) end = start_this_year if not domain: if start: domain = [ (fn, '>=', start.strftime('%Y-%m-%d')), ] if end: domain.append((fn, '<', end.strftime('%Y-%m-%d'))) r.date_domain = domain
def _create_edit_tax_reminder(self, values=None): # Create/Edit activity type if needed if self._context.get('no_create_move', False): return self.env['account.move'] if not values: values = {} company = values.get('company_id', False) or self.company_id or self.env.company move_res_model_id = self.env['ir.model'].search( [('model', '=', 'account.move')], limit=1).id activity_type = company.account_tax_next_activity_type or False vals = { 'category': 'tax_report', 'delay_count': values.get('account_tax_periodicity', company.account_tax_periodicity) == 'monthly' and 1 or 3, 'delay_unit': 'months', 'delay_from': 'previous_activity', 'res_model_id': move_res_model_id, 'force_next': False, 'summary': _('Periodic Tax Return') } if not activity_type: vals['name'] = _('Tax Report for company %s') % (company.name, ) activity_type = self.env['mail.activity.type'].create(vals) company.account_tax_next_activity_type = activity_type else: activity_type.write(vals) # search for an existing reminder for given journal and change it's date account_tax_periodicity_journal_id = values.get( 'account_tax_periodicity_journal_id', company.account_tax_periodicity_journal_id) date = values.get('account_tax_periodicity_next_deadline', False) if not date: date = date_utils.end_of( fields.Date.today(), "quarter") + relativedelta( days=company.account_tax_periodicity_reminder_day) end_date_last_month = date_utils.end_of( date + relativedelta(months=-1), 'month') move_id = self.env['account.move'].search( [('state', '=', 'draft'), ('is_tax_closing', '=', True), ('journal_id', '=', account_tax_periodicity_journal_id.id), ('activity_ids.activity_type_id', '=', activity_type.id), ('date', '<=', end_date_last_month), ('date', '>=', date_utils.start_of( end_date_last_month + relativedelta(months=-vals['delay_count']), 'month'))], limit=1) # Create empty move if activity_type.delay_count == 1: formatted_date = format_date(self.env, end_date_last_month, date_format='LLLL') else: formatted_date = format_date(self.env, end_date_last_month, date_format='qqq') if len(move_id): for act in move_id.activity_ids: if act.activity_type_id == activity_type: act.write({'date_deadline': date}) move_id.date = end_date_last_month move_id.ref = _('Tax Return for %s') % (formatted_date, ) else: move_id = self.env['account.move'].create({ 'journal_id': account_tax_periodicity_journal_id.id, 'date': end_date_last_month, 'is_tax_closing': True, 'ref': _('Tax Return for %s') % (formatted_date, ) }) advisor_user = self.env['res.users'].search( [('company_ids', 'in', (company.id, )), ('groups_id', 'in', self.env.ref('account.group_account_manager').ids)], limit=1, order="id ASC") activity_vals = { 'res_id': move_id.id, 'res_model_id': move_res_model_id, 'activity_type_id': activity_type.id, 'summary': _('TAX Report'), 'date_deadline': date, 'automated': True, 'user_id': advisor_user.id or self.env.user.id } self.env['mail.activity'].with_context( mail_activity_quick_update=True).create(activity_vals) return move_id
from odoo.fields import Datetime today = Datetime.today() # today = datetime.strptime('2019-03-29 01:53:48', misc.DEFAULT_SERVER_DATETIME_FORMAT) # Представим, что сейчас 2019-03-29 01:53:48 date_utils.get_month(today) # (datetime.datetime(2019, 3, 1, 0, 0), datetime.datetime(2019, 3, 31, 0, 0)) date_utils.get_quarter(today) # (datetime.datetime(2019, 1, 1, 0, 0), datetime.datetime(2019, 3, 31, 0, 0)) date_utils.get_quarter_number(today) # 1 date_utils.get_fiscal_year(today) # (datetime.datetime(2019, 1, 1, 0, 0), datetime.datetime(2019, 12, 31, 0, 0)) date_utils.start_of(today, 'hour') # 2019-03-29 01:00:00 date_utils.start_of(today, 'day') # 2019-03-29 00:00:00 date_utils.start_of(today, 'week') # 2019-03-25 00:00:00 date_utils.start_of(today, 'month') # 2019-03-01 00:00:00 date_utils.start_of(today, 'quarter') # 2019-01-01 00:00:00 date_utils.start_of(today, 'year') # 2019-01-01 00:00:00 date_utils.end_of(today, 'hour') # 2019-03-29 01:59:59.999999 date_utils.end_of(today, 'day')
def get_journal_dashboard_datas(self): currency = self.env.user.company_id.currency_id today = fields.Date.today() month = date_utils.start_of(today, 'month') company_id = self.env.user.company_id.id sql_quotation = '''SELECT total FROM sale_order WHERE state = %s AND date_order >= %s AND company_id = %s; ''' self.env.cr.execute(sql_quotation, ('draft', month, company_id)) quotation_result = self.env.cr.dictfetchall() (quotation_count, quotation_sum) = self._sale_sum_results(quotation_result) sql_sale = '''SELECT total FROM sale_order WHERE state IN %s AND date_order >= %s AND company_id = %s; ''' self.env.cr.execute(sql_sale, (('sale', 'done'), month, company_id)) sale_result = self.env.cr.dictfetchall() (sale_count, sale_sum) = self._sale_sum_results(sale_result) sql_invoice = '''SELECT total FROM sale_order WHERE invoice_status = %s AND date_order >= %s AND company_id = %s; ''' self.env.cr.execute(sql_invoice, ('to invoice', month, company_id)) invoice_result = self.env.cr.dictfetchall() (invoice_count, invoice_sum) = self._sale_sum_results(invoice_result) sql_collection = '''SELECT amount FROM account_payment WHERE partner_type = %s AND payment_date >= %s; ''' self.env.cr.execute(sql_collection, ('customer', month)) collection_result = self.env.cr.dictfetchall() (collection_count, collection_sum) = self._collection_sum_results(collection_result) sql_salesperson = '''SELECT rp.name, round(cast(SUM(so.total) as numeric), 2) as total, COUNT(so.total) as count FROM sale_order as so, res_users as ru, res_partner as rp WHERE so.state IN %s AND so.date_order >= %s AND so.company_id = %s AND so.user_id = ru.id AND ru.partner_id = rp.id GROUP BY rp.name ORDER BY total desc LIMIT 6; ''' self.env.cr.execute(sql_salesperson, (('sale', 'done'), month, company_id)) salesperson_result = self.env.cr.dictfetchall() sql_salesteam = '''SELECT ct.name, round(cast(SUM(so.total) as numeric), 2) as total, COUNT(so.total) as count FROM sale_order as so, crm_team as ct WHERE so.state IN %s AND so.date_order >= %s AND so.company_id = %s AND so.team_id = ct.id GROUP BY ct.name ORDER BY total desc LIMIT 6;; ''' self.env.cr.execute(sql_salesteam, (('sale', 'done'), month, company_id)) salesteam_result = self.env.cr.dictfetchall() return { 'sum_sale_order': formatLang(self.env, currency.round(sale_sum) + 0.0, currency_obj=currency), 'sale_title': '本月销售订单统计', 'sale_count': sale_count, 'title_quotation': '本月报价单统计', 'quotation_sum': formatLang(self.env, currency.round(quotation_sum) + 0.0, currency_obj=currency), 'quotation_count': quotation_count, 'invoice_title': '本月待开发票统计', 'invoice_count': invoice_count, 'invoice_sum': formatLang(self.env, currency.round(invoice_sum) + 0.0, currency_obj=currency), 'collection_title': '本月回款统计', 'collection_count': collection_count, 'collection_sum': formatLang(self.env, currency.round(collection_sum) + 0.0, currency_obj=currency), 'salesperson_result': salesperson_result, 'salesteam_result': salesteam_result, }
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='none', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env[ 'account.analytic.line']._timesheet_get_portal_domain() _items_per_page = 100 searchbar_sortings = { 'date': { 'label': _('Newest'), 'order': 'date desc' }, 'name': { 'label': _('Description'), 'order': 'name' }, } searchbar_inputs = self._get_searchbar_inputs() searchbar_groupby = self._get_searchbar_groupby() today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': { 'label': _('All'), 'domain': [] }, 'today': { 'label': _('Today'), 'domain': [("date", "=", today)] }, 'week': { 'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))] }, 'month': { 'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))] }, 'year': { 'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))] }, 'quarter': { 'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)] }, 'last_week': { 'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))] }, 'last_month': { 'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))] }, 'last_year': { 'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))] }, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: domain += self._get_search_domain(search_in, search) timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager(url="/my/timesheets", url_args={ 'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby, 'groupby': groupby }, total=timesheet_count, page=page, step=_items_per_page) def get_timesheets(): groupby_mapping = self._get_groupby_mapping() field = groupby_mapping.get(groupby, None) orderby = '%s, %s' % (field, order) if field else order timesheets = Timesheet_sudo.search(domain, order=orderby, limit=_items_per_page, offset=pager['offset']) if field: if groupby == 'date': time_data = Timesheet_sudo.read_group( domain, ['date', 'unit_amount:sum'], ['date:day']) mapped_time = dict([ (datetime.strptime(m['date:day'], '%d %b %Y').date(), m['unit_amount']) for m in time_data ]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k]) for k, g in groupbyelem(timesheets, itemgetter('date')) ] else: time_data = time_data = Timesheet_sudo.read_group( domain, [field, 'unit_amount:sum'], [field]) mapped_time = dict([(m[field][0] if m[field] else False, m['unit_amount']) for m in time_data]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k.id]) for k, g in groupbyelem(timesheets, itemgetter(field)) ] return timesheets, grouped_timesheets grouped_timesheets = [ (timesheets, sum(Timesheet_sudo.search(domain).mapped('unit_amount'))) ] if timesheets else [] return timesheets, grouped_timesheets timesheets, grouped_timesheets = get_timesheets() values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'search': search, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, 'is_uom_day': request.env['account.analytic.line']._is_timesheet_encode_uom_day( ), }) return request.render("hr_timesheet.portal_my_timesheets", values)
def online_sync_bank_statement(self, transactions, journal, ending_balance): """ build a bank statement from a list of transaction and post messages is also post in the online_account of the journal. :param transactions: A list of transactions that will be created in the new bank statement. The format is : [{ 'id': online id, (unique ID for the transaction) 'date': transaction date, (The date of the transaction) 'name': transaction description, (The description) 'amount': transaction amount, (The amount of the transaction. Negative for debit, positive for credit) 'partner_id': optional field used to define the partner 'online_partner_vendor_name': optional field used to store information on the statement line under the online_partner_vendor_name field (typically information coming from plaid/yodlee). This is use to find partner for next statements 'online_partner_bank_account': optional field used to store information on the statement line under the online_partner_bank_account field (typically information coming from plaid/yodlee). This is use to find partner for next statements }, ...] :param journal: The journal (account.journal) of the new bank statement :param ending_balance: ending balance on the account Return: The number of imported transaction for the journal """ # Since the synchronization succeeded, set it as the bank_statements_source of the journal journal.sudo().write({'bank_statements_source': 'online_sync'}) if not len(transactions): return 0 transactions_identifiers = [ line['online_identifier'] for line in transactions ] existing_transactions_ids = self.env[ 'account.bank.statement.line'].search([ ('online_identifier', 'in', transactions_identifiers), ('journal_id', '=', journal.id) ]) existing_transactions = [ t.online_identifier for t in existing_transactions_ids ] sorted_transactions = sorted(transactions, key=lambda l: l['date']) min_date = date_utils.start_of(sorted_transactions[0]['date'], 'month') if journal.bank_statement_creation == 'week': # key is not always the first of month weekday = min_date.weekday() min_date = date_utils.subtract(min_date, days=weekday) max_date = sorted_transactions[-1]['date'] total = sum([t['amount'] for t in sorted_transactions]) statements_in_range = self.search([('date', '>=', min_date), ('journal_id', '=', journal.id)]) # For first synchronization, an opening bank statement is created to fill the missing bank statements all_statement = self.search_count([('journal_id', '=', journal.id)]) digits_rounding_precision = journal.currency_id.rounding if journal.currency_id else journal.company_id.currency_id.rounding if all_statement == 0 and not float_is_zero( ending_balance - total, precision_rounding=digits_rounding_precision): opening_transaction = [(0, 0, { 'date': date_utils.subtract(min_date, days=1), 'payment_ref': _("Opening statement: first synchronization"), 'amount': ending_balance - total, })] statement = self.create({ 'name': _('Opening statement'), 'date': date_utils.subtract(min_date, days=1), 'line_ids': opening_transaction, 'journal_id': journal.id, 'balance_end_real': ending_balance - total, }) statement.button_post() transactions_in_statements = [] statement_to_reset_to_draft = self.env['account.bank.statement'] transactions_to_create = {} number_added = 0 for transaction in sorted_transactions: if transaction['online_identifier'] in existing_transactions: continue line = transaction.copy() number_added += 1 if journal.bank_statement_creation == 'day': # key is full date key = transaction['date'] elif journal.bank_statement_creation == 'week': # key is first day of the week weekday = transaction['date'].weekday() key = date_utils.subtract(transaction['date'], days=weekday) elif journal.bank_statement_creation == 'bimonthly': if transaction['date'].day >= 15: # key is the 15 of that month key = transaction['date'].replace(day=15) else: # key if the first of the month key = date_utils.start_of(transaction['date'], 'month') # key is year-month-0 or year-month-1 elif journal.bank_statement_creation == 'month': # key is first of the month key = date_utils.start_of(transaction['date'], 'month') else: # key is last date of transactions fetched key = max_date # Decide if we have to update an existing statement or create a new one with this line stmt = statements_in_range.filtered(lambda x: x.date == key) if stmt and stmt[0].id: line['statement_id'] = stmt[0].id transactions_in_statements.append(line) statement_to_reset_to_draft += stmt[0] else: if not transactions_to_create.get(key): transactions_to_create[key] = [] transactions_to_create[key].append((0, 0, line)) # Create the lines that should be inside an existing bank statement and reset those stmt in draft if len(transactions_in_statements): for st in statement_to_reset_to_draft: if st.state == 'confirm': st.message_post(body=_( 'Statement has been reset to draft because some transactions from online synchronization were added to it.' )) st.state = 'posted' posted_statements = statement_to_reset_to_draft.filtered( lambda st: st.state == 'posted') posted_statements.state = 'open' statement_lines = self.env['account.bank.statement.line'].create( transactions_in_statements) posted_statements.state = 'posted' # Post only the newly created statement lines if the related statement is already posted. statement_lines.filtered(lambda line: line.statement_id.state == 'posted')\ .mapped('move_id')\ .with_context(skip_account_move_synchronization=True)\ ._post() # Recompute the balance_end_real of the first statement where we added line # because adding line don't trigger a recompute and balance_end_real is not updated. # We only trigger the recompute on the first element of the list as it is the one # the most in the past and this will trigger the recompute of all the statements # that are next. statement_to_reset_to_draft[0]._compute_ending_balance() # Create lines inside new bank statements st_vals_list = [] for date, lines in transactions_to_create.items(): # balance_start and balance_end_real will be computed automatically name = _('Online synchronization of %s') % (date, ) if journal.bank_statement_creation in ('bimonthly', 'week', 'month'): name = _('Online synchronization from %s to %s') end_date = date if journal.bank_statement_creation == 'month': end_date = date_utils.end_of(date, 'month') elif journal.bank_statement_creation == 'week': end_date = date_utils.add(date, days=6) elif journal.bank_statement_creation == 'bimonthly': if end_date.day == 1: end_date = date.replace(day=14) else: end_date = date_utils.end_of(date, 'month') name = name % (date, end_date) st_vals_list.append({ 'name': name, 'date': date, 'line_ids': lines, 'journal_id': journal.id }) statements = self.env['account.bank.statement'].create(st_vals_list) statements.button_post() # write account balance on the last statement of the journal # That way if there are missing transactions, it will show in the last statement # and the day missing transactions are fetched or manually written, everything will be corrected last_bnk_stmt = self.search([('journal_id', '=', journal.id)], limit=1) if last_bnk_stmt: last_bnk_stmt.balance_end_real = ending_balance if last_bnk_stmt.state == 'posted' and last_bnk_stmt.balance_end != last_bnk_stmt.balance_end_real: last_bnk_stmt.button_reopen() # Set last sync date as the last transaction date journal.account_online_journal_id.sudo().write({'last_sync': max_date}) return number_added
def read_progress_bar(self, domain, group_by, progress_bar): """ Gets the data needed for all the kanban column progressbars. These are fetched alongside read_group operation. :param domain - the domain used in the kanban view to filter records :param group_by - the name of the field used to group records into kanban columns :param progress_bar - the <progressbar/> declaration attributes (field, colors, sum) :return a dictionnary mapping group_by values to dictionnaries mapping progress bar field values to the related number of records """ # Workaround to match read_group's infrastructure # TO DO in master: harmonize this function and readgroup to allow factorization group_by_modifier = group_by.partition(':')[2] or 'month' group_by = group_by.partition(':')[0] display_date_formats = { 'day': 'dd MMM yyyy', 'week': "'W'w YYYY", 'month': 'MMMM yyyy', 'quarter': 'QQQ yyyy', 'year': 'yyyy' } records_values = self.search_read(domain or [], [progress_bar['field'], group_by]) data = {} field_type = self._fields[group_by].type if field_type == 'selection': selection_labels = dict(self.fields_get()[group_by]['selection']) for record_values in records_values: group_by_value = record_values[group_by] # Again, imitating what _read_group_format_result and _read_group_prepare_data do if group_by_value and field_type in ['date', 'datetime']: locale = get_lang(self.env).code group_by_value = date_utils.start_of( fields.Datetime.to_datetime(group_by_value), group_by_modifier) group_by_value = pytz.timezone('UTC').localize(group_by_value) tz_info = None if field_type == 'datetime' and self._context.get( 'tz') in pytz.all_timezones: tz_info = self._context.get('tz') group_by_value = babel.dates.format_datetime( group_by_value, format=display_date_formats[group_by_modifier], tzinfo=tz_info, locale=locale) else: group_by_value = babel.dates.format_date( group_by_value, format=display_date_formats[group_by_modifier], locale=locale) if field_type == 'selection': group_by_value = selection_labels[group_by_value] \ if group_by_value in selection_labels else False if type(group_by_value) == tuple: group_by_value = group_by_value[ 1] # FIXME should use technical value (0) if group_by_value not in data: data[group_by_value] = {} for key in progress_bar['colors']: data[group_by_value][key] = 0 field_value = record_values[progress_bar['field']] if field_value in data[group_by_value]: data[group_by_value][field_value] += 1 return data
def _online_sync_bank_statement(self, transactions, online_account): """ build a bank statement from a list of transaction and post messages is also post in the online_account of the journal. :param transactions: A list of transactions that will be created in the new bank statement. The format is : [{ 'id': online id, (unique ID for the transaction) 'date': transaction date, (The date of the transaction) 'name': transaction description, (The description) 'amount': transaction amount, (The amount of the transaction. Negative for debit, positive for credit) 'online_partner_information': optional field used to store information on the statement line under the online_partner_information field (typically information coming from plaid/yodlee). This is use to find partner for next statements }, ...] :param online_account: The online account for this statement Return: The number of imported transaction for the journal """ line_to_reconcile = self.env['account.bank.statement.line'] for journal in online_account.journal_ids: # Since the synchronization succeeded, set it as the bank_statements_source of the journal journal.sudo().write({'bank_statements_source': 'online_sync'}) if not transactions: continue transactions_identifiers = [ line['online_transaction_identifier'] for line in transactions ] existing_transactions_ids = self.env[ 'account.bank.statement.line'].search([ ('online_transaction_identifier', 'in', transactions_identifiers), ('journal_id', '=', journal.id) ]) existing_transactions = [ t.online_transaction_identifier for t in existing_transactions_ids ] transactions_partner_information = [] for transaction in transactions: transaction['date'] = fields.Date.from_string( transaction['date']) if transaction.get('online_partner_information'): transactions_partner_information.append( transaction['online_partner_information']) if transactions_partner_information: self._cr.execute( """ SELECT p.online_partner_information, p.id FROM res_partner p WHERE p.online_partner_information IN %s """, [tuple(transactions_partner_information)]) partner_id_per_information = dict(self._cr.fetchall()) else: partner_id_per_information = {} sorted_transactions = sorted(transactions, key=lambda l: l['date']) min_date = date_utils.start_of(sorted_transactions[0]['date'], 'month') if journal.bank_statement_creation_groupby == 'week': # key is not always the first of month weekday = min_date.weekday() min_date = date_utils.subtract(min_date, days=weekday) max_date = sorted_transactions[-1]['date'] total = sum([t['amount'] for t in sorted_transactions]) statements_in_range = self.search([('date', '>=', min_date), ('journal_id', '=', journal.id) ]) # For first synchronization, an opening bank statement is created to fill the missing bank statements all_statement = self.search_count([('journal_id', '=', journal.id) ]) digits_rounding_precision = journal.currency_id.rounding if journal.currency_id else journal.company_id.currency_id.rounding # If there are neither statement and the ending balance != 0, we create an opening bank statement if all_statement == 0 and not float_is_zero( online_account.balance - total, precision_rounding=digits_rounding_precision): opening_transaction = [(0, 0, { 'date': date_utils.subtract(min_date, days=1), 'payment_ref': _("Opening statement: first synchronization"), 'amount': online_account.balance - total, })] op_stmt = self.create({ 'date': date_utils.subtract(min_date, days=1), 'line_ids': opening_transaction, 'journal_id': journal.id, 'balance_end_real': online_account.balance - total, }) op_stmt.button_post() line_to_reconcile += op_stmt.mapped('line_ids') transactions_in_statements = [] statement_to_reset_to_draft = self.env['account.bank.statement'] transactions_to_create = {} for transaction in sorted_transactions: if transaction[ 'online_transaction_identifier'] in existing_transactions: continue # Do nothing if the transaction already exists line = transaction.copy() line['online_account_id'] = online_account.id if journal.bank_statement_creation_groupby == 'day': # key is full date key = transaction['date'] elif journal.bank_statement_creation_groupby == 'week': # key is first day of the week weekday = transaction['date'].weekday() key = date_utils.subtract(transaction['date'], days=weekday) elif journal.bank_statement_creation_groupby == 'bimonthly': if transaction['date'].day >= 15: # key is the 15 of that month key = transaction['date'].replace(day=15) else: # key if the first of the month key = date_utils.start_of(transaction['date'], 'month') # key is year-month-0 or year-month-1 elif journal.bank_statement_creation_groupby == 'month': # key is first of the month key = date_utils.start_of(transaction['date'], 'month') else: # key is last date of transactions fetched key = max_date # Find partner id if exists if line.get('online_partner_information'): partner_info = line['online_partner_information'] if partner_id_per_information.get(partner_info): line['partner_id'] = partner_id_per_information[ partner_info] # Decide if we have to update an existing statement or create a new one with this line stmt = statements_in_range.filtered(lambda x: x.date == key) if stmt: line['statement_id'] = stmt[0].id transactions_in_statements.append(line) statement_to_reset_to_draft += stmt[0] else: if not transactions_to_create.get(key): transactions_to_create[key] = [] transactions_to_create[key].append((0, 0, line)) # Create the lines that should be inside an existing bank statement and reset those stmt in draft if transactions_in_statements: for st in statement_to_reset_to_draft: if st.state != 'open': st.message_post(body=_( 'Statement has been reset to draft because some transactions from online synchronization were added to it.' )) statement_to_reset_to_draft.write({'state': 'open'}) line_to_reconcile += self.env[ 'account.bank.statement.line'].create( transactions_in_statements) # Recompute the balance_end_real of the first statement where we added line # because adding line don't trigger a recompute and balance_end_real is not updated. # We only trigger the recompute on the first element of the list as it is the one # the most in the past and this will trigger the recompute of all the statements # that are next. statement_to_reset_to_draft[0]._compute_ending_balance() # Create lines inside new bank statements created_stmts = self.env['account.bank.statement'] for date, lines in transactions_to_create.items(): # balance_start and balance_end_real will be computed automatically if journal.bank_statement_creation_groupby in ('bimonthly', 'week', 'month'): end_date = date if journal.bank_statement_creation_groupby == 'month': end_date = date_utils.end_of(date, 'month') elif journal.bank_statement_creation_groupby == 'week': end_date = date_utils.add(date, days=6) elif journal.bank_statement_creation_groupby == 'bimonthly': if end_date.day == 1: end_date = date.replace(day=14) else: end_date = date_utils.end_of(date, 'month') created_stmts += self.env['account.bank.statement'].create({ 'date': date, 'line_ids': lines, 'journal_id': journal.id, }) created_stmts.button_post() line_to_reconcile += created_stmts.mapped('line_ids') # write account balance on the last statement of the journal # That way if there are missing transactions, it will show in the last statement # and the day missing transactions are fetched or manually written, everything will be corrected last_bnk_stmt = self.search([('journal_id', '=', journal.id)], limit=1) if last_bnk_stmt and (created_stmts or transactions_in_statements): last_bnk_stmt.balance_end_real = online_account.balance # Set last sync date as the last transaction date journal.account_online_account_id.sudo().write( {'last_sync': max_date}) return line_to_reconcile
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='project', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env['account.analytic.line']._timesheet_get_portal_domain() searchbar_sortings = { 'date': {'label': _('Newest'), 'order': 'date desc'}, 'name': {'label': _('Name'), 'order': 'name'}, } searchbar_inputs = { 'all': {'input': 'all', 'label': _('Search in All')}, } searchbar_groupby = { 'none': {'input': 'none', 'label': _('None')}, 'project': {'input': 'project', 'label': _('Project')}, } today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': {'label': _('All'), 'domain': []}, 'today': {'label': _('Today'), 'domain': [("date", "=", today)]}, 'week': {'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))]}, 'month': {'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))]}, 'year': {'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))]}, 'quarter': {'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)]}, 'last_week': {'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))]}, 'last_month': {'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))]}, 'last_year': {'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))]}, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: domain = AND([domain, [('name', 'ilike', search)]]) timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager( url="/my/timesheets", url_args={'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby}, total=timesheet_count, page=page, step=self._items_per_page ) if groupby == 'project': order = "project_id, %s" % order timesheets = Timesheet_sudo.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) if groupby == 'project': grouped_timesheets = [Timesheet_sudo.concat(*g) for k, g in groupbyelem(timesheets, itemgetter('project_id'))] else: grouped_timesheets = [timesheets] values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, }) return request.render("hr_timesheet.portal_my_timesheets", values)
def _last_month(): """ Devolver el último dia del mes pasado """ today = fields.Date.today() first = date_utils.start_of(today, 'month') return first - timedelta(days=1)