def _onchange_employee(self): if (not self.employee_id) or (not self.date_from) or (not self.date_to): return employee = self.employee_id date_from = self.date_from date_to = self.date_to self.company_id = employee.company_id if not self.contract_id or self.employee_id != self.contract_id.employee_id: # Add a default contract if not already defined contracts = employee._get_contracts(date_from, date_to) if not contracts or not contracts[0].structure_type_id.default_struct_id: self.contract_id = False self.struct_id = False return self.contract_id = contracts[0] self.struct_id = contracts[0].structure_type_id.default_struct_id payslip_name = self.struct_id.payslip_name or _('Salary Slip') self.name = '%s - %s - %s' % (payslip_name, self.employee_id.name or '', format_date(self.env, self.date_from, date_format="MMMM y")) if date_to > date_utils.end_of(fields.Date.today(), 'month'): self.warning_message = _("This payslip can be erroneous! Work entries may not be generated for the period from %s to %s." % (date_utils.add(date_utils.end_of(fields.Date.today(), 'month'), days=1), date_to)) else: self.warning_message = False self.worked_days_line_ids = self._get_new_worked_days_lines()
def _onchange_employee(self): if (not self.employee_id) or (not self.date_from) or ( not self.date_to): return employee = self.employee_id date_from = self.date_from date_to = self.date_to self.company_id = employee.company_id if not self.contract_id or self.employee_id != self.contract_id.employee_id: # Add a default contract if not already defined contracts = employee._get_contracts(date_from, date_to) # if contracts.structure_type_id.name == 'Base': # structure = self.env['hr.payroll.structure'].search([('type_id','=','Base')]) # for struct in structure: # for rule in struct.rule_ids: # if rule.code == 'GROSS': # rule.unlink() # if rule.code == 'NET': # rule.unlink() # raise UserError(str("stucture")) if not contracts or not contracts[ 0].structure_type_id.default_struct_id: self.contract_id = False self.struct_id = False return self.contract_id = contracts[0] self.struct_id = contracts[0].structure_type_id.default_struct_id payslip_name = self.struct_id.payslip_name or _('Salary Slip') self.name = '%s - %s - %s' % ( payslip_name, self.employee_id.name or '', format_date(self.env, self.date_from, date_format="MMMM y")) if date_to > date_utils.end_of(fields.Date.today(), 'month'): self.warning_message = _( "This payslip can be erroneous! Work entries may not be generated for the period from %s to %s." % (date_utils.add(date_utils.end_of(fields.Date.today(), 'month'), days=1), date_to)) else: self.warning_message = False self.worked_days_line_ids = self._get_new_worked_days_lines() input_id = self.env['hr.payslip.input'].search([('payslip_id', '=', self.struct_id.id)]) input_line_ids = self._get_inputs() input_lines = self.input_line_ids.browse([]) # raise UserError(str(input_lines)) for r in input_line_ids: # raise UserError(str(r)) input_lines += input_lines.new(r) # raise UserError(str(input_lines)) self.input_line_ids = input_lines # raise UserError(str(input_line_ids)) return
def _close_tax_entry(self): # Close the activity if any and create a new move and a new activity # also freeze lock date # and fetch pdf for move in self: # Check that date correspond to ending date of period. tax_activity_type = move.company_id.account_tax_next_activity_type or False periodicity = tax_activity_type.delay_count == 1 and 'month' or 'quarter' if move.date != date_utils.end_of(move.date, periodicity): raise UserError(_("Can't post the move with reference %s as its ending date %s does not correspond to end date of the period.") % (move.ref, move.date)) activity = move.activity_ids.filtered(lambda m: m.activity_type_id == tax_activity_type) # Change lock date to date of the move move.company_id.tax_lock_date = move.date # Add pdf report as attachment to move options = move._compute_vat_period_date() ctx = self.env['account.report']._set_context(options) ctx['strict_range'] = True attachments = self.env['account.generic.tax.report'].with_context(ctx)._get_vat_report_attachments(options) # end activity if len(activity): activity.action_done() # post the message with the PDF subject = _('Vat closing from %s to %s') % (format_date(self.env, options.get('date').get('date_from')), format_date(self.env, options.get('date').get('date_to'))) move.with_context(no_new_invoice=True).message_post(body=move.ref, subject=subject, attachments=attachments) # create the recurring entry (new draft move and new activity) next_date_deadline = move.date + relativedelta(day=move.company_id.account_tax_periodicity_reminder_day, months=move.company_id.account_tax_next_activity_type.delay_count + 1) vals = { 'company_id': move.company_id, 'account_tax_periodicity': move.company_id.account_tax_periodicity, 'account_tax_periodicity_journal_id': move.company_id.account_tax_periodicity_journal_id, 'account_tax_periodicity_next_deadline': next_date_deadline, } self.env['res.config.settings']._create_edit_tax_reminder(vals)
def _compute_dates(self): """ Dado el mes y el año calcular el primero y el ultimo dia del periodo """ for rec in self: # Las retenciones se hacen por mes if rec.doc_type == WITHHOLDING: rec.quincena = 0 month = rec.month year = int(rec.year) _ds = fields.Date.to_date('%s-%.2d-01' % (year, month)) _de = date_utils.end_of(_ds, 'month') if rec.quincena == '1': _ds = datetime(year, month, 1) _de = datetime(year, rec.month, 15) if rec.quincena == '2': _ds = datetime(year, month, 16) last_day = calendar.monthrange(year, rec.month)[1] _de = datetime(year, month, last_day) rec.date_from = _ds rec.date_to = _de
def action_invoice_cancel(self): """All Mexican invoices are considered.""" inv_mx = self.filtered( lambda inv: inv.company_id.country_id == self.env.ref('base.mx')) if not inv_mx: return super(AccountInvoice, self).action_invoice_cancel() inv_paid = inv_mx.filtered( lambda inv: inv.state not in ['draft', 'open']) for inv in inv_paid: inv.message_post( body=_('Invoice must be in draft or open state in order to be ' 'cancelled.')) invoices = inv_mx - inv_paid date_mx = self.env['l10n_mx_edi.certificate'].sudo( ).get_mx_current_datetime() if self._context.get('force_cancellation_date'): date_mx = fields.Datetime.from_string( self._context['force_cancellation_date']) in_period = invoices.filtered( lambda inv: inv.date_invoice and inv.date_invoice >= date_utils. start_of(date_mx, 'month').date() and inv.date_invoice <= date_utils.end_of(date_mx, 'month').date()) for inv in invoices - in_period: inv.move_id.reverse_moves(date_mx) inv.state = 'cancel' inv._l10n_mx_edi_cancel() inv_no_mx = self - inv_mx return super(AccountInvoice, in_period + inv_no_mx).action_invoice_cancel()
def _compute_two_weeks_explanation(self): today = fields.Date.today() week_type = self.env['resource.calendar.attendance'].get_week_type(today) week_type_str = _("second") if week_type else _("first") first_day = date_utils.start_of(today, 'week') last_day = date_utils.end_of(today, 'week') self.two_weeks_explanation = _("The current week (from %s to %s) correspond to the %s one.", first_day, last_day, week_type_str)
def get_timespan_start_end_dt(self): """Method to get the start and end date based on the timespan.""" curr_dt = datetime.now().date() s_date = date_utils.start_of(curr_dt, 'month') e_date = date_utils.end_of(curr_dt, 'month') if self.time_span: if self.time_span == 'monthly': s_date = date_utils.start_of(curr_dt, 'month') e_date = date_utils.end_of(curr_dt, 'month') elif self.time_span == 'quarterly': # Pass granularity like year, quarter, month, week, day, hour s_date = date_utils.start_of(curr_dt, 'quarter') e_date = date_utils.end_of(curr_dt, 'quarter') elif self.time_span == 'yearly': s_date = date_utils.start_of(curr_dt, 'year') e_date = date_utils.end_of(curr_dt, 'year') return s_date, e_date
def _compute_two_weeks_explanation(self): today = fields.Date.today() week_type = _("odd") if int( math.floor((today.toordinal() - 1) / 7) % 2) else _("even") first_day = date_utils.start_of(today, 'week') last_day = date_utils.end_of(today, 'week') self.two_weeks_explanation = "This week (from %s to %s) is an %s week." % ( first_day, last_day, week_type)
def _get_exceed_hours(self, date_from, date_to): generated_date_max = min( fields.Date.to_date(date_to), date_utils.end_of(fields.Date.today(), 'month')) self._generate_work_entries(date_from, generated_date_max) date_from = datetime.combine(date_from, datetime.min.time()) date_to = datetime.combine(date_to, datetime.max.time()) exceed_work_data = defaultdict(int) work_entries = self.env['hr.work.entry'].search([ '&', '&', ('state', 'in', ['validated', 'draft']), ('contract_id', 'in', self.ids), '|', '|', '&', '&', ('date_start', '>=', date_from), ('date_start', '<', date_to), ('date_stop', '>', date_to), '&', '&', ('date_start', '<', date_from), ('date_stop', '<=', date_to), ('date_stop', '>', date_from), '&', ('date_start', '<', date_from), ('date_stop', '>', date_to), ]) for work_entry in work_entries: date_start = work_entry.date_start date_stop = work_entry.date_stop if work_entry.work_entry_type_id.is_leave: contract = work_entry.contract_id calendar = contract.resource_calendar_id employee = contract.employee_id contract_data = employee._get_work_days_data_batch( date_start, date_stop, compute_leaves=False, calendar=calendar)[employee.id] exceed_work_data[ work_entry.work_entry_type_id.id] += contract_data.get( 'hours', 0) else: dt = date_stop - date_start exceed_work_data[ work_entry.work_entry_type_id. id] += dt.days * 24 + dt.seconds / 3600 # Number of hours return exceed_work_data
def _get_tax_closing_period_boundaries(self, date): """ Returns the boundaries of the tax period containing the provided date for this company, as a tuple (start, end). """ self.ensure_one() period_months = self._get_tax_periodicity_months_delay() period_number = (date.month // period_months) + ( 1 if date.month % period_months != 0 else 0) end_date = date_utils.end_of( datetime.date(date.year, period_number * period_months, 1), 'month') start_date = end_date + relativedelta(day=1, months=-period_months + 1) return start_date, end_date
def _get_date_range(self): """ Return the date range for a production schedude depending the manufacturing period and the number of columns to display specify by the user. It returns a list of tuple that contains the timestamp for each column. """ self.ensure_one() date_range = [] first_day = start_of(fields.Date.today(), self.manufacturing_period) for columns in range(self.manufacturing_period_to_display): last_day = end_of(first_day, self.manufacturing_period) date_range.append((first_day, last_day)) first_day = add(last_day, days=1) return date_range
def _get_date_range(self): date_range = [] first_day = start_of(fields.Date.today(), self.env.company.manufacturing_period) manufacturing_period = self.env.company.manufacturing_period_to_display week = (first_day.day // 7) + 1 for i in range(week): first_day = first_day + relativedelta(days=-7) manufacturing_period = manufacturing_period + 1 for columns in range(manufacturing_period): last_day = end_of(first_day, self.env.company.manufacturing_period) date_range.append((first_day, last_day)) first_day = add(last_day, days=1) return date_range
class HrAttachmentSalary(models.Model): _name = 'l10n_be.attachment.salary' _description = 'Garnished amount from payslip wages' name = fields.Char(string="Description") amount = fields.Float(required=True) garnished_type = fields.Selection([ ('attachment_salary', 'Attachment of Salary'), ('assignment_salary', 'Assignment of Salary'), ('child_support', 'Child Support'), ], default='attachment_salary', required=True) contract_id = fields.Many2one('hr.contract') date_from = fields.Date(string="From", default=lambda self: fields.Date.today()) date_to = fields.Date( string="To", default=lambda self: end_of(fields.Date.today(), 'month'))
def _get_work_hours(self, date_from, date_to, domain=None): """ Returns the amount (expressed in hours) of work for a contract between two dates. If called on multiple contracts, sum work amounts of each contract. :param date_from: The start date :param date_to: The end date :returns: a dictionary {work_entry_id: hours_1, work_entry_2: hours_2} """ generated_date_max = min(fields.Date.to_date(date_to), date_utils.end_of(fields.Date.today(), 'month')) self._generate_work_entries(date_from, generated_date_max) date_from = datetime.combine(date_from, datetime.min.time()) date_to = datetime.combine(date_to, datetime.max.time()) work_data = defaultdict(int) # First, found work entry that didn't exceed interval. work_entries = self.env['hr.work.entry'].read_group( self._get_work_hours_domain(date_from, date_to, domain=domain, inside=True), ['hours:sum(duration)'], ['work_entry_type_id'] ) work_data.update({data['work_entry_type_id'][0] if data['work_entry_type_id'] else False: data['hours'] for data in work_entries}) # Second, find work entry that exceeds interval and compute right duration. work_entries = self.env['hr.work.entry'].search(self._get_work_hours_domain(date_from, date_to, domain=domain, inside=False)) for work_entry in work_entries: date_start = max(date_from, work_entry.date_start) date_stop = min(date_to, work_entry.date_stop) if work_entry.work_entry_type_id.is_leave: contract = work_entry.contract_id calendar = contract.resource_calendar_id employee = contract.employee_id contract_data = employee._get_work_days_data_batch( date_start, date_stop, compute_leaves=False, calendar=calendar )[employee.id] work_data[work_entry.work_entry_type_id.id] += contract_data.get('hours', 0) else: dt = date_stop - date_start work_data[work_entry.work_entry_type_id.id] += dt.days * 24 + dt.seconds / 3600 # Number of hours return work_data
def l10n_mx_edi_action_reinvoice(self): """Allows generating a new invoice with the current date from the customer portal.""" mx_date = self.env[ 'l10n_mx_edi.certificate'].sudo().get_mx_current_datetime().date() ctx = {'disable_after_commit': True} for invoice in self.filtered(lambda inv: inv.state != 'draft'): if mx_date <= date_utils.end_of(invoice.date_invoice, 'month'): # Get the credit move line to reconcile with a new invoice payment_move_lines = invoice.payment_move_line_ids invoice.button_cancel() invoice.refresh() invoice.action_invoice_draft() invoice.write({ 'date_invoice': mx_date.strftime("%Y-%m-%d") }) invoice.refresh() invoice.with_context(**ctx).action_post() invoice.refresh() # Now reconcile the payment if payment_move_lines: invoice.register_payment(payment_move_lines) continue # Case B: Create a new invoice and pay with a Credit Note # Create a Credit Note from old invoice refund_invoice = invoice.refund( date_invoice=mx_date, date=mx_date, description=_('Re-invoiced from %s') % invoice.number, journal_id=invoice.journal_id.id) refund_invoice.action_post() # Get the credit move line to reconcile with a new invoice refund_move_line = refund_invoice.move_id.line_ids.filtered( 'credit') # Create a new invoice new_invoice = invoice.copy({'date_invoice': mx_date}) new_invoice.action_post() # Now reconcile the the new invoice with the credit note new_invoice.assign_outstanding_credit(refund_move_line.id)
def online_sync_bank_statement(self, transactions, journal, ending_balance): """ build a bank statement from a list of transaction and post messages is also post in the online_account of the journal. :param transactions: A list of transactions that will be created in the new bank statement. The format is : [{ 'id': online id, (unique ID for the transaction) 'date': transaction date, (The date of the transaction) 'name': transaction description, (The description) 'amount': transaction amount, (The amount of the transaction. Negative for debit, positive for credit) 'partner_id': optional field used to define the partner 'online_partner_vendor_name': optional field used to store information on the statement line under the online_partner_vendor_name field (typically information coming from plaid/yodlee). This is use to find partner for next statements 'online_partner_bank_account': optional field used to store information on the statement line under the online_partner_bank_account field (typically information coming from plaid/yodlee). This is use to find partner for next statements }, ...] :param journal: The journal (account.journal) of the new bank statement :param ending_balance: ending balance on the account Return: The number of imported transaction for the journal """ # Since the synchronization succeeded, set it as the bank_statements_source of the journal journal.sudo().write({'bank_statements_source': 'online_sync'}) if not len(transactions): return 0 transactions_identifiers = [ line['online_identifier'] for line in transactions ] existing_transactions_ids = self.env[ 'account.bank.statement.line'].search([ ('online_identifier', 'in', transactions_identifiers), ('journal_id', '=', journal.id) ]) existing_transactions = [ t.online_identifier for t in existing_transactions_ids ] sorted_transactions = sorted(transactions, key=lambda l: l['date']) min_date = date_utils.start_of(sorted_transactions[0]['date'], 'month') if journal.bank_statement_creation == 'week': # key is not always the first of month weekday = min_date.weekday() min_date = date_utils.subtract(min_date, days=weekday) max_date = sorted_transactions[-1]['date'] total = sum([t['amount'] for t in sorted_transactions]) statements_in_range = self.search([('date', '>=', min_date), ('journal_id', '=', journal.id)]) # For first synchronization, an opening bank statement is created to fill the missing bank statements all_statement = self.search_count([('journal_id', '=', journal.id)]) digits_rounding_precision = journal.currency_id.rounding if journal.currency_id else journal.company_id.currency_id.rounding if all_statement == 0 and not float_is_zero( ending_balance - total, precision_rounding=digits_rounding_precision): opening_transaction = [(0, 0, { 'date': date_utils.subtract(min_date, days=1), 'payment_ref': _("Opening statement: first synchronization"), 'amount': ending_balance - total, })] statement = self.create({ 'name': _('Opening statement'), 'date': date_utils.subtract(min_date, days=1), 'line_ids': opening_transaction, 'journal_id': journal.id, 'balance_end_real': ending_balance - total, }) statement.button_post() transactions_in_statements = [] statement_to_reset_to_draft = self.env['account.bank.statement'] transactions_to_create = {} number_added = 0 for transaction in sorted_transactions: if transaction['online_identifier'] in existing_transactions: continue line = transaction.copy() number_added += 1 if journal.bank_statement_creation == 'day': # key is full date key = transaction['date'] elif journal.bank_statement_creation == 'week': # key is first day of the week weekday = transaction['date'].weekday() key = date_utils.subtract(transaction['date'], days=weekday) elif journal.bank_statement_creation == 'bimonthly': if transaction['date'].day >= 15: # key is the 15 of that month key = transaction['date'].replace(day=15) else: # key if the first of the month key = date_utils.start_of(transaction['date'], 'month') # key is year-month-0 or year-month-1 elif journal.bank_statement_creation == 'month': # key is first of the month key = date_utils.start_of(transaction['date'], 'month') else: # key is last date of transactions fetched key = max_date # Decide if we have to update an existing statement or create a new one with this line stmt = statements_in_range.filtered(lambda x: x.date == key) if stmt and stmt[0].id: line['statement_id'] = stmt[0].id transactions_in_statements.append(line) statement_to_reset_to_draft += stmt[0] else: if not transactions_to_create.get(key): transactions_to_create[key] = [] transactions_to_create[key].append((0, 0, line)) # Create the lines that should be inside an existing bank statement and reset those stmt in draft if len(transactions_in_statements): for st in statement_to_reset_to_draft: if st.state == 'confirm': st.message_post(body=_( 'Statement has been reset to draft because some transactions from online synchronization were added to it.' )) st.state = 'posted' posted_statements = statement_to_reset_to_draft.filtered( lambda st: st.state == 'posted') posted_statements.state = 'open' statement_lines = self.env['account.bank.statement.line'].create( transactions_in_statements) posted_statements.state = 'posted' # Post only the newly created statement lines if the related statement is already posted. statement_lines.filtered(lambda line: line.statement_id.state == 'posted')\ .mapped('move_id')\ .with_context(skip_account_move_synchronization=True)\ ._post() # Recompute the balance_end_real of the first statement where we added line # because adding line don't trigger a recompute and balance_end_real is not updated. # We only trigger the recompute on the first element of the list as it is the one # the most in the past and this will trigger the recompute of all the statements # that are next. statement_to_reset_to_draft[0]._compute_ending_balance() # Create lines inside new bank statements st_vals_list = [] for date, lines in transactions_to_create.items(): # balance_start and balance_end_real will be computed automatically name = _('Online synchronization of %s') % (date, ) if journal.bank_statement_creation in ('bimonthly', 'week', 'month'): name = _('Online synchronization from %s to %s') end_date = date if journal.bank_statement_creation == 'month': end_date = date_utils.end_of(date, 'month') elif journal.bank_statement_creation == 'week': end_date = date_utils.add(date, days=6) elif journal.bank_statement_creation == 'bimonthly': if end_date.day == 1: end_date = date.replace(day=14) else: end_date = date_utils.end_of(date, 'month') name = name % (date, end_date) st_vals_list.append({ 'name': name, 'date': date, 'line_ids': lines, 'journal_id': journal.id }) statements = self.env['account.bank.statement'].create(st_vals_list) statements.button_post() # write account balance on the last statement of the journal # That way if there are missing transactions, it will show in the last statement # and the day missing transactions are fetched or manually written, everything will be corrected last_bnk_stmt = self.search([('journal_id', '=', journal.id)], limit=1) if last_bnk_stmt: last_bnk_stmt.balance_end_real = ending_balance if last_bnk_stmt.state == 'posted' and last_bnk_stmt.balance_end != last_bnk_stmt.balance_end_real: last_bnk_stmt.button_reopen() # Set last sync date as the last transaction date journal.account_online_journal_id.sudo().write({'last_sync': max_date}) return number_added
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='none', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env[ 'account.analytic.line']._timesheet_get_portal_domain() _items_per_page = 100 searchbar_sortings = { 'date': { 'label': _('Newest'), 'order': 'date desc' }, 'name': { 'label': _('Description'), 'order': 'name' }, } searchbar_inputs = self._get_searchbar_inputs() searchbar_groupby = self._get_searchbar_groupby() today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': { 'label': _('All'), 'domain': [] }, 'today': { 'label': _('Today'), 'domain': [("date", "=", today)] }, 'week': { 'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))] }, 'month': { 'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))] }, 'year': { 'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))] }, 'quarter': { 'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)] }, 'last_week': { 'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))] }, 'last_month': { 'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))] }, 'last_year': { 'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))] }, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: domain += self._get_search_domain(search_in, search) timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager(url="/my/timesheets", url_args={ 'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby, 'groupby': groupby }, total=timesheet_count, page=page, step=_items_per_page) def get_timesheets(): groupby_mapping = self._get_groupby_mapping() field = groupby_mapping.get(groupby, None) orderby = '%s, %s' % (field, order) if field else order timesheets = Timesheet_sudo.search(domain, order=orderby, limit=_items_per_page, offset=pager['offset']) if field: if groupby == 'date': time_data = Timesheet_sudo.read_group( domain, ['date', 'unit_amount:sum'], ['date:day']) mapped_time = dict([ (datetime.strptime(m['date:day'], '%d %b %Y').date(), m['unit_amount']) for m in time_data ]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k]) for k, g in groupbyelem(timesheets, itemgetter('date')) ] else: time_data = time_data = Timesheet_sudo.read_group( domain, [field, 'unit_amount:sum'], [field]) mapped_time = dict([(m[field][0] if m[field] else False, m['unit_amount']) for m in time_data]) grouped_timesheets = [ (Timesheet_sudo.concat(*g), mapped_time[k.id]) for k, g in groupbyelem(timesheets, itemgetter(field)) ] return timesheets, grouped_timesheets grouped_timesheets = [ (timesheets, sum(Timesheet_sudo.search(domain).mapped('unit_amount'))) ] if timesheets else [] return timesheets, grouped_timesheets timesheets, grouped_timesheets = get_timesheets() values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'search': search, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, 'is_uom_day': request.env['account.analytic.line']._is_timesheet_encode_uom_day( ), }) return request.render("hr_timesheet.portal_my_timesheets", values)
def onchange_employee(self): if (not self.employee_id) or (not self.date_from) or ( not self.date_to): return employee = self.employee_id date_from = self.date_from date_to = self.date_to contract_ids = [] contracts = self.env['hr.contract'] ttyme = datetime.fromtimestamp( time.mktime(time.strptime(str(date_from), "%Y-%m-%d"))) locale = self.env.context.get('lang') or 'en_US' self.name = _('Salary Slip of %s for %s') % ( employee.name, tools.ustr( babel.dates.format_date( date=ttyme, format='MMMM-y', locale=locale))) self.company_id = employee.company_id if not self.contract_id or self.employee_id != self.contract_id.employee_id: # Add a default contract if not already defined contracts = employee._get_contracts(date_from, date_to) if not contracts or not contracts[ 0].structure_type_id.default_struct_id: self.contract_id = False self.struct_id = False return self.contract_id = contracts[0] self.struct_id = contracts[0].structure_type_id.default_struct_id if date_to > date_utils.end_of(fields.Date.today(), 'month'): self.warning_message = _( "This payslip can be erroneous! Work entries may not be generated for the period from %s to %s." % (date_utils.add(date_utils.end_of(fields.Date.today(), 'month'), days=1), date_to)) else: self.warning_message = False self.worked_days_line_ids = self._get_new_worked_days_lines() # if not self.env.context.get('contract') or not self.contract_id: # contract_ids = self.get_contract(employee, date_from, date_to) # if not contract_ids: # return # self.contract_id = self.env['hr.contract'].browse(contract_ids[0]) # # if not self.contract_id.struct_id: # return # self.struct_id = self.contract_id.struct_id # # # computation of the salary input # contracts = self.env['hr.contract'].browse(contract_ids) # worked_days_line_ids = self.get_worked_day_lines(contracts, date_from, date_to) # worked_days_lines = self.worked_days_line_ids.browse([]) # for r in worked_days_line_ids: # worked_days_lines += worked_days_lines.new(r) # self.worked_days_line_ids = worked_days_lines # if len(contracts) > 0: # input_line_ids = self.get_inputs(contracts, date_from, date_to) # input_lines = self.input_line_ids.browse([]) # for r in input_line_ids: # input_lines += input_lines.new(r) # self.input_line_ids = input_lines lon_obj = self.env['hr.loan'].search([('employee_id', '=', employee.id), ('state', '=', 'approve')]) # _logger.info("<<<<<<<<<<<<<<<<>>>>>>>>>>>>>>>>>>") # _logger.info(res) # _logger.info(contract_ids) # _logger.info(lon_obj) # _logger.info("<<<<<<<<<<<<<<<<>>>>>>>>>>>>>>>>>>") for loan in lon_obj: for loan_line in loan.loan_lines: if date_from <= loan_line.date <= date_to and not loan_line.paid: payslip_other_input_type = self.env[ 'hr.payslip.input.type'].search([('code', '=', 'LO')], limit=1) if self.changed_get_loan: pass else: self.input_line_ids = [(0, 0, { 'input_type_id': payslip_other_input_type.id, 'amount': loan_line.amount, 'loan_line_id': loan_line.id })] self.changed_get_loan = True # for result in res: # if result.get('code') == 'LO': # result['amount'] = loan_line.amount # result['loan_line_id'] = loan_line.id return
def _online_sync_bank_statement(self, transactions, online_account): """ build a bank statement from a list of transaction and post messages is also post in the online_account of the journal. :param transactions: A list of transactions that will be created in the new bank statement. The format is : [{ 'id': online id, (unique ID for the transaction) 'date': transaction date, (The date of the transaction) 'name': transaction description, (The description) 'amount': transaction amount, (The amount of the transaction. Negative for debit, positive for credit) 'online_partner_information': optional field used to store information on the statement line under the online_partner_information field (typically information coming from plaid/yodlee). This is use to find partner for next statements }, ...] :param online_account: The online account for this statement Return: The number of imported transaction for the journal """ line_to_reconcile = self.env['account.bank.statement.line'] for journal in online_account.journal_ids: # Since the synchronization succeeded, set it as the bank_statements_source of the journal journal.sudo().write({'bank_statements_source': 'online_sync'}) if not transactions: continue transactions_identifiers = [ line['online_transaction_identifier'] for line in transactions ] existing_transactions_ids = self.env[ 'account.bank.statement.line'].search([ ('online_transaction_identifier', 'in', transactions_identifiers), ('journal_id', '=', journal.id) ]) existing_transactions = [ t.online_transaction_identifier for t in existing_transactions_ids ] transactions_partner_information = [] for transaction in transactions: transaction['date'] = fields.Date.from_string( transaction['date']) if transaction.get('online_partner_information'): transactions_partner_information.append( transaction['online_partner_information']) if transactions_partner_information: self._cr.execute( """ SELECT p.online_partner_information, p.id FROM res_partner p WHERE p.online_partner_information IN %s """, [tuple(transactions_partner_information)]) partner_id_per_information = dict(self._cr.fetchall()) else: partner_id_per_information = {} sorted_transactions = sorted(transactions, key=lambda l: l['date']) min_date = date_utils.start_of(sorted_transactions[0]['date'], 'month') if journal.bank_statement_creation_groupby == 'week': # key is not always the first of month weekday = min_date.weekday() min_date = date_utils.subtract(min_date, days=weekday) max_date = sorted_transactions[-1]['date'] total = sum([t['amount'] for t in sorted_transactions]) statements_in_range = self.search([('date', '>=', min_date), ('journal_id', '=', journal.id) ]) # For first synchronization, an opening bank statement is created to fill the missing bank statements all_statement = self.search_count([('journal_id', '=', journal.id) ]) digits_rounding_precision = journal.currency_id.rounding if journal.currency_id else journal.company_id.currency_id.rounding # If there are neither statement and the ending balance != 0, we create an opening bank statement if all_statement == 0 and not float_is_zero( online_account.balance - total, precision_rounding=digits_rounding_precision): opening_transaction = [(0, 0, { 'date': date_utils.subtract(min_date, days=1), 'payment_ref': _("Opening statement: first synchronization"), 'amount': online_account.balance - total, })] op_stmt = self.create({ 'date': date_utils.subtract(min_date, days=1), 'line_ids': opening_transaction, 'journal_id': journal.id, 'balance_end_real': online_account.balance - total, }) op_stmt.button_post() line_to_reconcile += op_stmt.mapped('line_ids') transactions_in_statements = [] statement_to_reset_to_draft = self.env['account.bank.statement'] transactions_to_create = {} for transaction in sorted_transactions: if transaction[ 'online_transaction_identifier'] in existing_transactions: continue # Do nothing if the transaction already exists line = transaction.copy() line['online_account_id'] = online_account.id if journal.bank_statement_creation_groupby == 'day': # key is full date key = transaction['date'] elif journal.bank_statement_creation_groupby == 'week': # key is first day of the week weekday = transaction['date'].weekday() key = date_utils.subtract(transaction['date'], days=weekday) elif journal.bank_statement_creation_groupby == 'bimonthly': if transaction['date'].day >= 15: # key is the 15 of that month key = transaction['date'].replace(day=15) else: # key if the first of the month key = date_utils.start_of(transaction['date'], 'month') # key is year-month-0 or year-month-1 elif journal.bank_statement_creation_groupby == 'month': # key is first of the month key = date_utils.start_of(transaction['date'], 'month') else: # key is last date of transactions fetched key = max_date # Find partner id if exists if line.get('online_partner_information'): partner_info = line['online_partner_information'] if partner_id_per_information.get(partner_info): line['partner_id'] = partner_id_per_information[ partner_info] # Decide if we have to update an existing statement or create a new one with this line stmt = statements_in_range.filtered(lambda x: x.date == key) if stmt: line['statement_id'] = stmt[0].id transactions_in_statements.append(line) statement_to_reset_to_draft += stmt[0] else: if not transactions_to_create.get(key): transactions_to_create[key] = [] transactions_to_create[key].append((0, 0, line)) # Create the lines that should be inside an existing bank statement and reset those stmt in draft if transactions_in_statements: for st in statement_to_reset_to_draft: if st.state != 'open': st.message_post(body=_( 'Statement has been reset to draft because some transactions from online synchronization were added to it.' )) statement_to_reset_to_draft.write({'state': 'open'}) line_to_reconcile += self.env[ 'account.bank.statement.line'].create( transactions_in_statements) # Recompute the balance_end_real of the first statement where we added line # because adding line don't trigger a recompute and balance_end_real is not updated. # We only trigger the recompute on the first element of the list as it is the one # the most in the past and this will trigger the recompute of all the statements # that are next. statement_to_reset_to_draft[0]._compute_ending_balance() # Create lines inside new bank statements created_stmts = self.env['account.bank.statement'] for date, lines in transactions_to_create.items(): # balance_start and balance_end_real will be computed automatically if journal.bank_statement_creation_groupby in ('bimonthly', 'week', 'month'): end_date = date if journal.bank_statement_creation_groupby == 'month': end_date = date_utils.end_of(date, 'month') elif journal.bank_statement_creation_groupby == 'week': end_date = date_utils.add(date, days=6) elif journal.bank_statement_creation_groupby == 'bimonthly': if end_date.day == 1: end_date = date.replace(day=14) else: end_date = date_utils.end_of(date, 'month') created_stmts += self.env['account.bank.statement'].create({ 'date': date, 'line_ids': lines, 'journal_id': journal.id, }) created_stmts.button_post() line_to_reconcile += created_stmts.mapped('line_ids') # write account balance on the last statement of the journal # That way if there are missing transactions, it will show in the last statement # and the day missing transactions are fetched or manually written, everything will be corrected last_bnk_stmt = self.search([('journal_id', '=', journal.id)], limit=1) if last_bnk_stmt and (created_stmts or transactions_in_statements): last_bnk_stmt.balance_end_real = online_account.balance # Set last sync date as the last transaction date journal.account_online_account_id.sudo().write( {'last_sync': max_date}) return line_to_reconcile
def portal_my_timesheets(self, page=1, sortby=None, filterby=None, search=None, search_in='all', groupby='project', **kw): Timesheet_sudo = request.env['account.analytic.line'].sudo() values = self._prepare_portal_layout_values() domain = request.env['account.analytic.line']._timesheet_get_portal_domain() searchbar_sortings = { 'date': {'label': _('Newest'), 'order': 'date desc'}, 'name': {'label': _('Name'), 'order': 'name'}, } searchbar_inputs = { 'all': {'input': 'all', 'label': _('Search in All')}, } searchbar_groupby = { 'none': {'input': 'none', 'label': _('None')}, 'project': {'input': 'project', 'label': _('Project')}, } today = fields.Date.today() quarter_start, quarter_end = date_utils.get_quarter(today) last_week = today + relativedelta(weeks=-1) last_month = today + relativedelta(months=-1) last_year = today + relativedelta(years=-1) searchbar_filters = { 'all': {'label': _('All'), 'domain': []}, 'today': {'label': _('Today'), 'domain': [("date", "=", today)]}, 'week': {'label': _('This week'), 'domain': [('date', '>=', date_utils.start_of(today, "week")), ('date', '<=', date_utils.end_of(today, 'week'))]}, 'month': {'label': _('This month'), 'domain': [('date', '>=', date_utils.start_of(today, 'month')), ('date', '<=', date_utils.end_of(today, 'month'))]}, 'year': {'label': _('This year'), 'domain': [('date', '>=', date_utils.start_of(today, 'year')), ('date', '<=', date_utils.end_of(today, 'year'))]}, 'quarter': {'label': _('This Quarter'), 'domain': [('date', '>=', quarter_start), ('date', '<=', quarter_end)]}, 'last_week': {'label': _('Last week'), 'domain': [('date', '>=', date_utils.start_of(last_week, "week")), ('date', '<=', date_utils.end_of(last_week, 'week'))]}, 'last_month': {'label': _('Last month'), 'domain': [('date', '>=', date_utils.start_of(last_month, 'month')), ('date', '<=', date_utils.end_of(last_month, 'month'))]}, 'last_year': {'label': _('Last year'), 'domain': [('date', '>=', date_utils.start_of(last_year, 'year')), ('date', '<=', date_utils.end_of(last_year, 'year'))]}, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # default filter by value if not filterby: filterby = 'all' domain = AND([domain, searchbar_filters[filterby]['domain']]) if search and search_in: domain = AND([domain, [('name', 'ilike', search)]]) timesheet_count = Timesheet_sudo.search_count(domain) # pager pager = portal_pager( url="/my/timesheets", url_args={'sortby': sortby, 'search_in': search_in, 'search': search, 'filterby': filterby}, total=timesheet_count, page=page, step=self._items_per_page ) if groupby == 'project': order = "project_id, %s" % order timesheets = Timesheet_sudo.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) if groupby == 'project': grouped_timesheets = [Timesheet_sudo.concat(*g) for k, g in groupbyelem(timesheets, itemgetter('project_id'))] else: grouped_timesheets = [timesheets] values.update({ 'timesheets': timesheets, 'grouped_timesheets': grouped_timesheets, 'page_name': 'timesheet', 'default_url': '/my/timesheets', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'search_in': search_in, 'sortby': sortby, 'groupby': groupby, 'searchbar_inputs': searchbar_inputs, 'searchbar_groupby': searchbar_groupby, 'searchbar_filters': OrderedDict(sorted(searchbar_filters.items())), 'filterby': filterby, }) return request.render("hr_timesheet.portal_my_timesheets", values)
def _compute_next_scheduled_payout(self): today = fields.date.today() for adyen_payout_id in self: adyen_payout_id.next_scheduled_payout = date_utils.end_of( today, adyen_payout_id.payout_schedule)
def _get_work_hours(self, date_from, date_to): """ Returns the amount (expressed in hours) of work for a contract between two dates. If called on multiple contracts, sum work amounts of each contract. :param date_from: The start date :param date_to: The end date :returns: a dictionary {work_entry_id: hours_1, work_entry_2: hours_2} """ generated_date_max = min( fields.Date.to_date(date_to), date_utils.end_of(fields.Date.today(), 'month')) self._generate_work_entries(date_from, generated_date_max) date_from = datetime.combine(date_from, datetime.min.time()) date_to = datetime.combine(date_to, datetime.max.time()) work_data = defaultdict(int) work_data_double_entry = defaultdict(int) # First, found work entry that didn't exceed interval. work_entries = self.env['hr.work.entry'].read_group( [('state', 'in', ['validated', 'button_validate_approval']), ('date_start', '>=', date_from), ('date_stop', '<=', date_to), ('contract_id', 'in', self.ids), ('work_entry_type_id.code', '!=', 'DB100')], ['hours:sum(duration)'], ['work_entry_type_id']) work_data.update({ data['work_entry_type_id'][0] if data['work_entry_type_id'] else False: data['hours'] for data in work_entries }) # Second, found work entry that exceed interval and compute right duration. work_entries = self.env['hr.work.entry'].search([ '&', '&', '&', ('work_entry_type_id.code', '!=', 'DB100'), ('state', 'in', ['validated', 'button_validate_approval']), ('contract_id', 'in', self.ids), '|', '|', '&', '&', ('date_start', '>=', date_from), ('date_start', '<', date_to), ('date_stop', '>', date_to), '&', '&', ('date_start', '<', date_from), ('date_stop', '<=', date_to), ('date_stop', '>', date_from), '&', ('date_start', '<', date_from), ('date_stop', '>', date_to), ]) work_entries_double_entry = self.env['hr.work.entry'].read_group( [('state', 'in', ['validated', 'button_validate_approval']), ('date_start', '>=', date_from), ('date_start', '<=', date_to), ('contract_id', 'in', self.ids), ('work_entry_type_id.code', '=', 'DB100')], ['hours:sum(duration)'], ['work_entry_type_id']) work_data.update({ data['work_entry_type_id'][0] if data['work_entry_type_id'] else False: data['hours'] for data in work_entries_double_entry }) # Second, found work entry that exceed interval and compute right duration. work_entries_double_entry = self.env['hr.work.entry'].search([ '&', '&', '&', ('work_entry_type_id.code', '=', 'DB100'), ('state', 'in', ['validated', 'button_validate_approval']), ('contract_id', 'in', self.ids), '|', '|', '&', '&', ('date_start', '>=', date_from), ('date_start', '<', date_to), ('date_start', '>', date_to), '&', '&', ('date_start', '<', date_from), ('date_start', '<=', date_to), ('date_start', '>', date_from), '&', ('date_start', '<', date_from), ('date_start', '>', date_to), ]) for work_entrys in work_entries_double_entry: work_data[work_entrys.work_entry_type_id. id] += work_entrys.duration # Number of hours for work_entry in work_entries: date_start = max(date_from, work_entry.date_start) date_stop = min(date_to, work_entry.date_stop) if work_entry.work_entry_type_id.is_leave: contract = work_entry.contract_id calendar = contract.resource_calendar_id contract_data = contract.employee_id._get_work_days_data( date_start, date_stop, compute_leaves=False, calendar=calendar) work_data[ work_entry.work_entry_type_id.id] += contract_data.get( 'hours', 0) else: dt = date_stop - date_start work_data[ work_entry.work_entry_type_id. id] += dt.days * 24 + dt.seconds / 3600 # Number of hours return work_data
# (datetime.datetime(2019, 1, 1, 0, 0), datetime.datetime(2019, 12, 31, 0, 0)) date_utils.start_of(today, 'hour') # 2019-03-29 01:00:00 date_utils.start_of(today, 'day') # 2019-03-29 00:00:00 date_utils.start_of(today, 'week') # 2019-03-25 00:00:00 date_utils.start_of(today, 'month') # 2019-03-01 00:00:00 date_utils.start_of(today, 'quarter') # 2019-01-01 00:00:00 date_utils.start_of(today, 'year') # 2019-01-01 00:00:00 date_utils.end_of(today, 'hour') # 2019-03-29 01:59:59.999999 date_utils.end_of(today, 'day') # 2019-03-29 23:59:59.999999 date_utils.end_of(today, 'week') # 2019-03-31 23:59:59.999999 date_utils.end_of(today, 'month') # 2019-03-31 23:59:59.999999 date_utils.end_of(today, 'quarter') # 2019-03-31 23:59:59.999999 date_utils.end_of(today, 'year') # 2019-12-31 23:59:59.999999 for date in date_utils.date_range(start=today, end=date_utils.add(today, days=15), step=relativedelta(days=1)):
def _create_edit_tax_reminder(self, values=None): # Create/Edit activity type if needed if self._context.get('no_create_move', False): return self.env['account.move'] if not values: values = {} company = values.get('company_id', False) or self.company_id or self.env.company move_res_model_id = self.env['ir.model'].search( [('model', '=', 'account.move')], limit=1).id activity_type = company.account_tax_next_activity_type or False vals = { 'category': 'tax_report', 'delay_count': values.get('account_tax_periodicity', company.account_tax_periodicity) == 'monthly' and 1 or 3, 'delay_unit': 'months', 'delay_from': 'previous_activity', 'res_model_id': move_res_model_id, 'force_next': False, 'summary': _('Periodic Tax Return') } if not activity_type: vals['name'] = _('Tax Report for company %s') % (company.name, ) activity_type = self.env['mail.activity.type'].create(vals) company.account_tax_next_activity_type = activity_type else: activity_type.write(vals) # search for an existing reminder for given journal and change it's date account_tax_periodicity_journal_id = values.get( 'account_tax_periodicity_journal_id', company.account_tax_periodicity_journal_id) date = values.get('account_tax_periodicity_next_deadline', False) if not date: date = date_utils.end_of( fields.Date.today(), "quarter") + relativedelta( days=company.account_tax_periodicity_reminder_day) end_date_last_month = date_utils.end_of( date + relativedelta(months=-1), 'month') move_id = self.env['account.move'].search( [('state', '=', 'draft'), ('is_tax_closing', '=', True), ('journal_id', '=', account_tax_periodicity_journal_id.id), ('activity_ids.activity_type_id', '=', activity_type.id), ('date', '<=', end_date_last_month), ('date', '>=', date_utils.start_of( end_date_last_month + relativedelta(months=-vals['delay_count']), 'month'))], limit=1) # Create empty move if activity_type.delay_count == 1: formatted_date = format_date(self.env, end_date_last_month, date_format='LLLL') else: formatted_date = format_date(self.env, end_date_last_month, date_format='qqq') if len(move_id): for act in move_id.activity_ids: if act.activity_type_id == activity_type: act.write({'date_deadline': date}) move_id.date = end_date_last_month move_id.ref = _('Tax Return for %s') % (formatted_date, ) else: move_id = self.env['account.move'].create({ 'journal_id': account_tax_periodicity_journal_id.id, 'date': end_date_last_month, 'is_tax_closing': True, 'ref': _('Tax Return for %s') % (formatted_date, ) }) advisor_user = self.env['res.users'].search( [('company_ids', 'in', (company.id, )), ('groups_id', 'in', self.env.ref('account.group_account_manager').ids)], limit=1, order="id ASC") activity_vals = { 'res_id': move_id.id, 'res_model_id': move_res_model_id, 'activity_type_id': activity_type.id, 'summary': _('TAX Report'), 'date_deadline': date, 'automated': True, 'user_id': advisor_user.id or self.env.user.id } self.env['mail.activity'].with_context( mail_activity_quick_update=True).create(activity_vals) return move_id