class sale_order_line(osv.osv): _inherit = 'sale.order.line' _columns = { 'event_id': fields.many2one( 'event.event', 'Event', help="Choose an event and it will automatically create a registration for this event."), 'event_ticket_id': fields.many2one( 'event.event.ticket', 'Event Ticket', help="Choose an event ticket and it will automatically create a registration for this event ticket."), # those 2 fields are used for dynamic domains and filled by onchange # TDE: really necessary ? ... 'event_type_id': fields.related('product_id', 'event_type_id', type='many2one', relation="event.type", string="Event Type"), 'event_ok': fields.related('product_id', 'event_ok', string='event_ok', type='boolean'), } def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None): res = super(sale_order_line, self)._prepare_order_line_invoice_line(cr, uid, line, account_id=account_id, context=context) if line.event_id: event = self.pool['event.event'].read(cr, uid, line.event_id.id, ['name'], context=context) res['name'] = '%s: %s' % (res.get('name', ''), event['name']) return res @api.onchange('product_id') def product_id_change_event(self): if self.product_id.event_ok: values = dict(event_type_id=self.product_id.event_type_id.id, event_ok=self.product_id.event_ok) else: values = dict(event_type_id=False, event_ok=False) self.update(values) @api.multi def _update_registrations(self, confirm=True, registration_data=None): """ Create or update registrations linked to a sale order line. A sale order line has a product_uom_qty attribute that will be the number of registrations linked to this line. This method update existing registrations and create new one for missing one. """ Registration = self.env['event.registration'] registrations = Registration.search([('sale_order_line_id', 'in', self.ids)]) for so_line in [l for l in self if l.event_id]: existing_registrations = registrations.filtered(lambda self: self.sale_order_line_id.id == so_line.id) if confirm: existing_registrations.filtered(lambda self: self.state != 'open').confirm_registration() else: existing_registrations.filtered(lambda self: self.state == 'cancel').do_draft() for count in range(int(so_line.product_uom_qty) - len(existing_registrations)): registration = {} if registration_data: registration = registration_data.pop() # TDE CHECK: auto confirmation registration['sale_order_line_id'] = so_line self.env['event.registration'].with_context(registration_force_draft=True).create( Registration._prepare_attendee_values(registration)) return True def onchange_event_ticket_id(self, cr, uid, ids, event_ticket_id=False, context=None): price = event_ticket_id and self.pool["event.event.ticket"].browse(cr, uid, event_ticket_id, context=context).price or False return {'value': {'price_unit': price}}
class Bravo(osv.Model): _name = 'test_new_api.bravo' _columns = { 'alpha_id': fields.many2one('test_new_api.alpha'), # a related field with a non-trivial path 'alpha_name': fields.related('alpha_id', 'name', type='char'), # a related field with a single field 'related_alpha_id': fields.related('alpha_id', type='many2one', obj='test_new_api.alpha'), # a related field with a single field that is also a related field! 'related_related_alpha_id': fields.related('related_alpha_id', type='many2one', obj='test_new_api.alpha'), }
class payroll_advice_line(osv.osv): ''' Bank Advice Lines ''' def onchange_employee_id(self, cr, uid, ids, employee_id=False, context=None): res = {} hr_obj = self.pool.get('hr.employee') if not employee_id: return {'value': res} employee = hr_obj.browse(cr, uid, [employee_id], context=context)[0] res.update({ 'name': employee.bank_account_id.acc_number, 'ifsc_code': employee.bank_account_id.bank_bic or '' }) return {'value': res} _name = 'hr.payroll.advice.line' _description = 'Bank Advice Lines' _columns = { 'advice_id': fields.many2one('hr.payroll.advice', 'Bank Advice'), 'name': fields.char('Bank Account No.', size=25, required=True), 'ifsc_code': fields.char('IFSC Code', size=16), 'employee_id': fields.many2one('hr.employee', 'Employee', required=True), 'bysal': fields.float('By Salary', digits_compute=dp.get_precision('Payroll')), 'debit_credit': fields.char('C/D', size=3, required=False), 'company_id': fields.related('advice_id', 'company_id', type='many2one', required=False, relation='res.company', string='Company', store=True), 'ifsc': fields.related('advice_id', 'neft', type='boolean', string='IFSC'), } _defaults = { 'debit_credit': 'C', }
class wkf_workitem(osv.osv): _table = "wkf_workitem" _name = "workflow.workitem" _log_access = False _rec_name = 'state' _columns = { 'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade", select=True), 'wkf_id': fields.related('act_id', 'wkf_id', type='many2one', relation='workflow', string='Workflow'), 'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="set null", select=True), 'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=True), 'state': fields.char('Status', select=True), }
class purchase_requisition_line(osv.osv): _name = "purchase.requisition.line" _description = "Purchase Requisition Line" _rec_name = 'product_id' _columns = { 'product_id': fields.many2one('product.product', 'Product', domain=[('purchase_ok', '=', True)]), 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'), 'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure')), 'requisition_id': fields.many2one('purchase.requisition', 'Call for Tenders', ondelete='cascade'), 'company_id': fields.related('requisition_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account',), 'schedule_date': fields.date('Scheduled Date'), } def onchange_product_id(self, cr, uid, ids, product_id, product_uom_id, parent_analytic_account, analytic_account, parent_date, date, context=None): """ Changes UoM and name if product_id changes. @param name: Name of the field @param product_id: Changed product_id @return: Dictionary of changed values """ value = {'product_uom_id': ''} if product_id: prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context) value = {'product_uom_id': prod.uom_id.id, 'product_qty': 1.0} if not analytic_account: value.update({'account_analytic_id': parent_analytic_account}) if not date: value.update({'schedule_date': parent_date}) return {'value': value} _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'purchase.requisition.line', context=c), }
class wkf_transition(osv.osv): _table = "wkf_transition" _name = "workflow.transition" _rec_name = 'signal' _columns = { 'trigger_model': fields.char('Trigger Object'), 'trigger_expr_id': fields.char('Trigger Expression'), 'sequence': fields.integer('Sequence'), 'signal': fields.char('Signal (Button Name)', help="When the operation of transition comes from a button pressed in the client form, "\ "signal tests the name of the pressed button. If signal is NULL, no button is necessary to validate this transition."), 'group_id': fields.many2one('res.groups', 'Group Required', help="The group that a user must have to be authorized to validate this transition."), 'condition': fields.char('Condition', required=True, help="Expression to be satisfied if we want the transition done."), 'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade', help="Source activity. When this activity is over, the condition is tested to determine if we can start the ACT_TO activity."), 'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade', help="The destination activity."), 'wkf_id': fields.related('act_from','wkf_id', type='many2one', relation='workflow', string='Workflow', select=True), } _defaults = { 'condition': lambda *a: 'True', 'sequence': 10, } _order = 'sequence,id' def name_get(self, cr, uid, ids, context=None): return [(line.id, (line.act_from.name) + '+' + (line.act_to.name)) if line.signal == False else (line.id, line.signal) for line in self.browse(cr, uid, ids, context=context)] def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100): if args is None: args = [] if name: ids = self.search(cr, user, [ '|', ('act_from', operator, name), ('act_to', operator, name) ] + args, limit=limit) return self.name_get(cr, user, ids, context=context) return super(wkf_transition, self).name_search(cr, user, name, args=args, operator=operator, context=context, limit=limit)
class ir_attachment(osv.osv): _inherit = "ir.attachment" _columns = { 'website_url': fields.related( "local_url", string="Attachment URL", type='char', deprecated=True), # related for backward compatibility with saas-6 }
class stock_picking(osv.osv): _inherit = 'stock.picking' _columns = { 'purchase_id': fields.related('move_lines', 'purchase_line_id', 'order_id', string="Purchase Orders", readonly=True, relation="many2one"), }
class delivery_carrier(orm.Model): _name = 'delivery.carrier' _inherit = ['delivery.carrier', 'website.published.mixin'] _columns = { 'website_description': fields.related('product_id', 'description_sale', type="text", string='Description for Online Quotations'), } _defaults = {'website_published': False}
class stock_quant(osv.osv): _inherit = 'stock.quant' def _get_quants(self, cr, uid, ids, context=None): return self.pool.get('stock.quant').search(cr, uid, [('lot_id', 'in', ids)], context=context) _columns = { 'removal_date': fields.related('lot_id', 'removal_date', type='datetime', string='Removal Date', store={ 'stock.quant': (lambda self, cr, uid, ids, ctx: ids, ['lot_id'], 20), 'stock.production.lot': (_get_quants, ['removal_date'], 20), }), } def apply_removal_strategy(self, cr, uid, qty, move, ops=False, domain=None, removal_strategy='fifo', context=None): if removal_strategy == 'fefo': order = 'removal_date, in_date, id' return self._quants_get_order(cr, uid, qty, move, ops=ops, domain=domain, orderby=order, context=context) return super(stock_quant, self).apply_removal_strategy( cr, uid, qty, move, ops=ops, domain=domain, removal_strategy=removal_strategy, context=context)
class resource_calendar_leaves(osv.osv): _name = "resource.calendar.leaves" _description = "Leave Detail" _columns = { 'name': fields.char("Name"), 'company_id': fields.related('calendar_id', 'company_id', type='many2one', relation='res.company', string="Company", store=True, readonly=True), 'calendar_id': fields.many2one("resource.calendar", "Working Time"), 'date_from': fields.datetime('Start Date', required=True), 'date_to': fields.datetime('End Date', required=True), 'resource_id': fields.many2one( "resource.resource", "Resource", help= "If empty, this is a generic holiday for the company. If a resource is set, the holiday/leave is only for this resource" ), } def check_dates(self, cr, uid, ids, context=None): for leave in self.browse(cr, uid, ids, context=context): if leave.date_from and leave.date_to and leave.date_from > leave.date_to: return False return True _constraints = [ (check_dates, 'Error! leave start-date must be lower then leave end-date.', ['date_from', 'date_to']) ] def onchange_resource(self, cr, uid, ids, resource, context=None): result = {} if resource: resource_pool = self.pool.get('resource.resource') result['calendar_id'] = resource_pool.browse( cr, uid, resource, context=context).calendar_id.id return {'value': result} return {'value': {'calendar_id': []}}
class project_task(osv.osv): _name = "project.task" _inherit = "project.task" _columns = { 'procurement_id': fields.many2one('procurement.order', 'Procurement', ondelete='set null'), 'sale_line_id': fields.related('procurement_id', 'sale_line_id', type='many2one', relation='sale.order.line', store=True, string='Sales Order Line'), } def _validate_subflows(self, cr, uid, ids, context=None): proc_obj = self.pool.get("procurement.order") for task in self.browse(cr, uid, ids, context=context): if task.procurement_id: proc_obj.check(cr, SUPERUSER_ID, [task.procurement_id.id], context=context) def write(self, cr, uid, ids, values, context=None): """ When closing tasks, validate subflows. """ res = super(project_task, self).write(cr, uid, ids, values, context=context) if values.get('stage_id'): stage = self.pool.get('project.task.type').browse( cr, uid, values.get('stage_id'), context=context) if stage.closed: self._validate_subflows(cr, uid, ids, context=context) return res def unlink(self, cr, uid, ids, context=None): if context is None: context = {} for task in self.browse(cr, uid, ids, context=context): if task.sale_line_id: raise UserError( _('You cannot delete a task related to a Sale Order. You can only archive this task.' )) res = super(project_task, self).unlink(cr, uid, ids, context) return res
class hr_grant_badge_wizard(osv.TransientModel): _name = 'gamification.badge.user.wizard' _inherit = ['gamification.badge.user.wizard'] _columns = { 'employee_id': fields.many2one("hr.employee", string='Employee', required=True), 'user_id': fields.related("employee_id", "user_id", type="many2one", relation="res.users", store=True, string='User') } def action_grant_badge(self, cr, uid, ids, context=None): """Wizard action for sending a badge to a chosen employee""" if context is None: context = {} badge_user_obj = self.pool.get('gamification.badge.user') for wiz in self.browse(cr, uid, ids, context=context): if not wiz.user_id: raise UserError( _('You can send badges only to employees linked to a user.' )) if uid == wiz.user_id.id: raise UserError(_('You can not send a badge to yourself')) values = { 'user_id': wiz.user_id.id, 'sender_id': uid, 'badge_id': wiz.badge_id.id, 'employee_id': wiz.employee_id.id, 'comment': wiz.comment, } badge_user = badge_user_obj.create(cr, uid, values, context=context) result = badge_user_obj._send_badge(cr, uid, [badge_user], context=context) return result
class SaleOrderLine(osv.Model): _inherit = 'sale.order.line' _columns = { 'sale_layout_cat_id': fields.many2one('sale_layout.category', string='Section'), 'categ_sequence': fields.related('sale_layout_cat_id', 'sequence', type='integer', string='Layout Sequence', store=True) # Store is intentionally set in order to keep the "historic" order. } _order = 'order_id, categ_sequence, sale_layout_cat_id, sequence, id' def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None): """Save the layout when converting to an invoice line.""" invoice_vals = super(SaleOrderLine, self)._prepare_order_line_invoice_line( cr, uid, line, account_id=account_id, context=context) if line.sale_layout_cat_id: invoice_vals['sale_layout_cat_id'] = line.sale_layout_cat_id.id if line.categ_sequence: invoice_vals['categ_sequence'] = line.categ_sequence return invoice_vals @api.multi def _prepare_invoice_line(self, qty): """ Prepare the dict of values to create the new invoice line for a sales order line. :param qty: float quantity to invoice """ res = super(SaleOrderLine, self)._prepare_invoice_line(qty) if self.sale_layout_cat_id: res['sale_layout_cat_id'] = self.sale_layout_cat_id.id return res
class gamification_badge_user(osv.Model): """User having received a badge""" _name = 'gamification.badge.user' _description = 'Gamification user badge' _order = "create_date desc" _rec_name = "badge_name" _columns = { 'user_id': fields.many2one('res.users', string="User", required=True, ondelete="cascade"), 'sender_id': fields.many2one('res.users', string="Sender", help="The user who has send the badge"), 'badge_id': fields.many2one('gamification.badge', string='Badge', required=True, ondelete="cascade"), 'challenge_id': fields.many2one('gamification.challenge', string='Challenge originating', help="If this badge was rewarded through a challenge"), 'comment': fields.text('Comment'), 'badge_name': fields.related('badge_id', 'name', type="char", string="Badge Name"), 'create_date': fields.datetime('Created', readonly=True), 'create_uid': fields.many2one('res.users', string='Creator', readonly=True), } def _send_badge(self, cr, uid, ids, context=None): """Send a notification to a user for receiving a badge Does not verify constrains on badge granting. The users are added to the owner_ids (create badge_user if needed) The stats counters are incremented :param ids: list(int) of badge users that will receive the badge """ res = True temp_obj = self.pool.get('mail.template') user_obj = self.pool.get('res.users') template_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'gamification', 'email_template_badge_received')[1] for badge_user in self.browse(cr, uid, ids, context=context): template = temp_obj.get_email_template(cr, uid, template_id, badge_user.id, context=context) body_html = temp_obj.render_template(cr, uid, template.body_html, 'gamification.badge.user', badge_user.id, context=template._context) res = user_obj.message_post( cr, uid, badge_user.user_id.id, body=body_html, subtype='gamification.mt_badge_granted', partner_ids=[badge_user.user_id.partner_id.id], context=context) return res def create(self, cr, uid, vals, context=None): self.pool.get('gamification.badge').check_granting(cr, uid, badge_id=vals.get('badge_id'), context=context) return super(gamification_badge_user, self).create(cr, uid, vals, context=context)
class stock_reportcustomer_invoices_line(osv.osv): _name = 'stock.reportcustomer.invoices.line' _description = 'Facturas' _rec_name = 'invoice_id' _columns = { 'invoice_id': fields.many2one('account.invoice', 'Factura', required=False), 'amount_total': fields.related('invoice_id', 'amount_total', string="Total", type="float", digits=(14, 2)), 'reportcustomer_id': fields.many2one('stock.reportcustomer.model', 'ID Ref'), } _defaults = {}
class product_pricelist_item(osv.osv): _name = "product.pricelist.item" _description = "Pricelist item" _order = "applied_on, min_quantity desc" def _check_recursion(self, cr, uid, ids, context=None): for obj_list in self.browse(cr, uid, ids, context=context): if obj_list.base == 'pricelist': main_pricelist = obj_list.pricelist_id.id other_pricelist = obj_list.base_pricelist_id.id if main_pricelist == other_pricelist: return False return True def _check_margin(self, cr, uid, ids, context=None): for item in self.browse(cr, uid, ids, context=context): if item.price_max_margin and item.price_min_margin and ( item.price_min_margin > item.price_max_margin): return False return True _columns = { 'product_tmpl_id': fields.many2one('product.template', 'Product Template', ondelete='cascade', help="Specify a template if this rule only applies to one product template. Keep empty otherwise."), 'product_id': fields.many2one('product.product', 'Product', ondelete='cascade', help="Specify a product if this rule only applies to one product. Keep empty otherwise."), 'categ_id': fields.many2one('product.category', 'Product Category', ondelete='cascade', help="Specify a product category if this rule only applies to products belonging to this category or its children categories. Keep empty otherwise."), 'min_quantity': fields.integer('Min. Quantity', help="For the rule to apply, bought/sold quantity must be greater " "than or equal to the minimum quantity specified in this field.\n" "Expressed in the default unit of measure of the product." ), 'applied_on': fields.selection([('3_global', 'Global'),('2_product_category', ' Product Category'), ('1_product', 'Product'), ('0_product_variant', 'Product Variant')], string="Apply On", required=True, help='Pricelist Item applicable on selected option'), 'sequence': fields.integer('Sequence', required=True, help="Gives the order in which the pricelist items will be checked. The evaluation gives highest priority to lowest sequence and stops as soon as a matching item is found."), 'base': fields.selection([('list_price', 'Public Price'), ('standard_price', 'Cost'), ('pricelist', 'Other Pricelist')], string="Based on", required=True, help='Base price for computation. \n Public Price: The base price will be the Sale/public Price. \n Cost Price : The base price will be the cost price. \n Other Pricelist : Computation of the base price based on another Pricelist.'), 'base_pricelist_id': fields.many2one('product.pricelist', 'Other Pricelist'), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist'), 'price_surcharge': fields.float('Price Surcharge', digits_compute= dp.get_precision('Product Price'), help='Specify the fixed amount to add or substract(if negative) to the amount calculated with the discount.'), 'price_discount': fields.float('Price Discount', digits=(16,2)), 'price_round': fields.float('Price Rounding', digits_compute= dp.get_precision('Product Price'), help="Sets the price so that it is a multiple of this value.\n" \ "Rounding is applied after the discount and before the surcharge.\n" \ "To have prices that end in 9.99, set rounding 10, surcharge -0.01" \ ), 'price_min_margin': fields.float('Min. Price Margin', digits_compute= dp.get_precision('Product Price'), help='Specify the minimum amount of margin over the base price.'), 'price_max_margin': fields.float('Max. Price Margin', digits_compute= dp.get_precision('Product Price'), help='Specify the maximum amount of margin over the base price.'), 'company_id': fields.related('pricelist_id','company_id',type='many2one', readonly=True, relation='res.company', string='Company', store=True), 'currency_id': fields.related('pricelist_id', 'currency_id', type='many2one', readonly=True, relation='res.currency', string='Currency', store=True), 'date_start': fields.date('Start Date', help="Starting date for the pricelist item validation"), 'date_end': fields.date('End Date', help="Ending valid for the pricelist item validation"), 'compute_price': fields.selection([('fixed', 'Fix Price'), ('percentage', 'Percentage (discount)'), ('formula', 'Formula')], select=True, default='fixed'), 'fixed_price': fields.float('Fixed Price'), 'percent_price': fields.float('Percentage Price'), } _defaults = { 'base': 'list_price', 'min_quantity': 1, 'sequence': 5, 'price_discount': 0, 'applied_on': '3_global', } _constraints = [ (_check_recursion, 'Error! You cannot assign the Main Pricelist as Other Pricelist in PriceList Item!', ['base_pricelist_id']), (_check_margin, 'Error! The minimum margin should be lower than the maximum margin.', ['price_min_margin', 'price_max_margin']) ]
class task(osv.osv): _inherit = "project.task" # Compute: effective_hours, total_hours, progress def _hours_get(self, cr, uid, ids, field_names, args, context=None): res = {} tasks_data = self.pool['account.analytic.line'].read_group(cr, uid, [('task_id', 'in', ids)], ['task_id','unit_amount'], ['task_id'], context=context) for data in tasks_data: task = self.browse(cr, uid, data['task_id'][0], context=context) res[data['task_id'][0]] = {'effective_hours': data.get('unit_amount', 0.0), 'remaining_hours': task.planned_hours - data.get('unit_amount', 0.0)} res[data['task_id'][0]]['total_hours'] = res[data['task_id'][0]]['remaining_hours'] + data.get('unit_amount', 0.0) res[data['task_id'][0]]['delay_hours'] = res[data['task_id'][0]]['total_hours'] - task.planned_hours res[data['task_id'][0]]['progress'] = 0.0 if (task.planned_hours > 0.0 and data.get('unit_amount', 0.0)): res[data['task_id'][0]]['progress'] = round(min(100.0 * data.get('unit_amount', 0.0) / task.planned_hours, 99.99),2) # TDE CHECK: if task.state in ('done','cancelled'): if task.stage_id and task.stage_id.fold: res[data['task_id'][0]]['progress'] = 100.0 return res def _get_task(self, cr, uid, id, context=None): res = [] for line in self.pool.get('account.analytic.line').search_read(cr,uid,[('task_id', '!=', False),('id','in',id)], context=context): res.append(line['task_id'][0]) return res def _get_total_hours(self): return super(task, self)._get_total_hours() + self.effective_hours _columns = { 'remaining_hours': fields.function(_hours_get, string='Remaining Hours', multi='line_id', help="Total remaining time, can be re-estimated periodically by the assignee of the task.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['timesheet_ids', 'remaining_hours', 'planned_hours'], 10), 'account.analytic.line': (_get_task, ['task_id', 'unit_amount'], 10), }), 'effective_hours': fields.function(_hours_get, string='Hours Spent', multi='line_id', help="Computed using the sum of the task work done.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['timesheet_ids', 'remaining_hours', 'planned_hours'], 10), 'account.analytic.line': (_get_task, ['task_id', 'unit_amount'], 10), }), 'total_hours': fields.function(_hours_get, string='Total', multi='line_id', help="Computed as: Time Spent + Remaining Time.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['timesheet_ids', 'remaining_hours', 'planned_hours'], 10), 'account.analytic.line': (_get_task, ['task_id', 'unit_amount'], 10), }), 'progress': fields.function(_hours_get, string='Working Time Progress (%)', multi='line_id', group_operator="avg", help="If the task has a progress of 99.99% you should close the task if it's finished or reevaluate the time", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['timesheet_ids', 'remaining_hours', 'planned_hours', 'state', 'stage_id'], 10), 'account.analytic.line': (_get_task, ['task_id', 'unit_amount'], 10), }), 'delay_hours': fields.function(_hours_get, string='Delay Hours', multi='line_id', help="Computed as difference between planned hours by the project manager and the total hours of the task.", store = { 'project.task': (lambda self, cr, uid, ids, c={}: ids, ['timesheet_ids', 'remaining_hours', 'planned_hours'], 10), 'account.analytic.line': (_get_task, ['task_id', 'unit_amount'], 10), }), 'timesheet_ids': fields.one2many('account.analytic.line', 'task_id', 'Timesheets'), 'analytic_account_id': fields.related('project_id', 'analytic_account_id', type='many2one', relation='account.analytic.account', string='Analytic Account', store=True), } _defaults = { 'progress': 0, } def _prepare_delegate_values(self, cr, uid, ids, delegate_data, context=None): vals = super(task, self)._prepare_delegate_values(cr, uid, ids, delegate_data, context) for task in self.browse(cr, uid, ids, context=context): vals[task.id]['planned_hours'] += task.effective_hours return vals def onchange_project(self, cr, uid, ids, project_id, context=None): result = super(task, self).onchange_project(cr, uid, ids, project_id, context=context) if not project_id: return result if 'value' not in result: result['value'] = {} project = self.pool['project.project'].browse(cr, uid, project_id, context=context) return result
class make_procurement(osv.osv_memory): _name = 'make.procurement' _description = 'Make Procurements' def onchange_product_id(self, cr, uid, ids, prod_id, context=None): product = self.pool.get('product.product').browse(cr, uid, prod_id, context=context) return {'value': { 'uom_id': product.uom_id.id, 'product_tmpl_id': product.product_tmpl_id.id, 'product_variant_count': product.product_tmpl_id.product_variant_count }} _columns = { 'qty': fields.float('Quantity', digits=(16,2), required=True), 'res_model': fields.char('Res Model'), 'product_id': fields.many2one('product.product', 'Product', required=True), 'product_tmpl_id': fields.many2one('product.template', 'Template', required=True), 'product_variant_count': fields.related('product_tmpl_id', 'product_variant_count', type='integer', string='Variant Number'), 'uom_id': fields.many2one('product.uom', 'Unit of Measure', required=True), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True), 'date_planned': fields.date('Planned Date', required=True), 'route_ids': fields.many2many('stock.location.route', string='Preferred Routes'), } _defaults = { 'date_planned': fields.date.context_today, 'qty': lambda *args: 1.0, } def make_procurement(self, cr, uid, ids, context=None): """ Creates procurement order for selected product. """ user = self.pool.get('res.users').browse(cr, uid, uid, context=context).login wh_obj = self.pool.get('stock.warehouse') procurement_obj = self.pool.get('procurement.order') data_obj = self.pool.get('ir.model.data') for proc in self.browse(cr, uid, ids, context=context): wh = wh_obj.browse(cr, uid, proc.warehouse_id.id, context=context) procure_id = procurement_obj.create(cr, uid, { 'name':'INT: '+str(user), 'date_planned': proc.date_planned, 'product_id': proc.product_id.id, 'product_qty': proc.qty, 'product_uom': proc.uom_id.id, 'warehouse_id': proc.warehouse_id.id, 'location_id': wh.lot_stock_id.id, 'company_id': wh.company_id.id, 'route_ids': [(6, 0, proc.route_ids.ids)], }) procurement_obj.signal_workflow(cr, uid, [procure_id], 'button_confirm') id2 = data_obj._get_id(cr, uid, 'procurement', 'procurement_tree_view') id3 = data_obj._get_id(cr, uid, 'procurement', 'procurement_form_view') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id return { 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'procurement.order', 'res_id' : procure_id, 'views': [(id3,'form'),(id2,'tree')], 'type': 'ir.actions.act_window', } def default_get(self, cr, uid, fields, context=None): if context is None: context = {} record_id = context.get('active_id') if context.get('active_model') == 'product.template': product_ids = self.pool.get('product.product').search(cr, uid, [('product_tmpl_id', '=', context.get('active_id'))], context=context) if product_ids: record_id = product_ids[0] res = super(make_procurement, self).default_get(cr, uid, fields, context=context) if record_id and 'product_id' in fields: proxy = self.pool.get('product.product') product_ids = proxy.search(cr, uid, [('id', '=', record_id)], context=context, limit=1) if product_ids: product_id = product_ids[0] product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) res['product_id'] = product.id res['uom_id'] = product.uom_id.id if 'warehouse_id' in fields: warehouse_id = self.pool.get('stock.warehouse').search(cr, uid, [], context=context) res['warehouse_id'] = warehouse_id[0] if warehouse_id else False return res def create(self, cr, uid, values, context=None): if values.get('product_id'): values.update(self.onchange_product_id(cr, uid, None, values['product_id'], context=context)['value']) return super(make_procurement, self).create(cr, uid, values, context=context)
class product_bom(osv.osv): _inherit = 'mrp.bom' _columns = { 'standard_price': fields.related('product_tmpl_id','standard_price',type="float",relation="product.product",string="Standard Price",store=False) }
class base_action_rule(osv.osv): """ Base Action Rules """ _name = 'base.action.rule' _description = 'Action Rules' _order = 'sequence' _columns = { 'name': fields.char('Rule Name', required=True), 'model_id': fields.many2one('ir.model', 'Related Document Model', required=True, domain=[('transient', '=', False)]), 'model': fields.related('model_id', 'model', type="char", string='Model'), 'create_date': fields.datetime('Create Date', readonly=1), 'active': fields.boolean('Active', help="When unchecked, the rule is hidden and will not be executed."), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of rules."), 'kind': fields.selection( [('on_create', 'On Creation'), ('on_write', 'On Update'), ('on_create_or_write', 'On Creation & Update'), ('on_unlink', 'On Deletion'), ('on_change', 'Based on Form Modification'), ('on_time', 'Based on Timed Condition')], string='When to Run'), 'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date', help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.", domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"), 'trg_date_range': fields.integer('Delay after trigger date', help="Delay after the trigger date." \ "You can put a negative number if you need a delay before the" \ "trigger date, like sending a reminder 15 minutes before a meeting."), 'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'), ('day', 'Days'), ('month', 'Months')], 'Delay type'), 'trg_date_calendar_id': fields.many2one( 'resource.calendar', 'Use Calendar', help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.', ondelete='set null', ), 'act_user_id': fields.many2one('res.users', 'Set Responsible'), 'act_followers': fields.many2many("res.partner", string="Add Followers"), 'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions', domain="[('model_id', '=', model_id)]", help="Examples: email reminders, call object service, etc."), 'filter_pre_id': fields.many2one( 'ir.filters', string='Before Update Filter', ondelete='restrict', domain="[('model_id', '=', model_id.model)]", help="If present, this condition must be satisfied before the update of the record."), 'filter_pre_domain': fields.char(string='Before Update Domain', help="If present, this condition must be satisfied before the update of the record."), 'filter_id': fields.many2one( 'ir.filters', string='Filter', ondelete='restrict', domain="[('model_id', '=', model_id.model)]", help="If present, this condition must be satisfied before executing the action rule."), 'filter_domain': fields.char(string='Domain', help="If present, this condition must be satisfied before executing the action rule."), 'last_run': fields.datetime('Last Run', readonly=1, copy=False), 'on_change_fields': fields.char(string="On Change Fields Trigger", help="Comma-separated list of field names that triggers the onchange."), } # which fields have an impact on the registry CRITICAL_FIELDS = ['model_id', 'active', 'kind', 'on_change_fields'] _defaults = { 'active': True, 'trg_date_range_type': 'day', } def onchange_kind(self, cr, uid, ids, kind, context=None): clear_fields = [] if kind in ['on_create', 'on_create_or_write', 'on_unlink']: clear_fields = ['filter_pre_id', 'filter_pre_domain', 'trg_date_id', 'trg_date_range', 'trg_date_range_type'] elif kind in ['on_write', 'on_create_or_write']: clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type'] elif kind == 'on_time': clear_fields = ['filter_pre_id', 'filter_pre_domain'] return {'value': dict.fromkeys(clear_fields, False)} def onchange_filter_pre_id(self, cr, uid, ids, filter_pre_id, context=None): ir_filter = self.pool['ir.filters'].browse(cr, uid, filter_pre_id, context=context) return {'value': {'filter_pre_domain': ir_filter.domain}} def onchange_filter_id(self, cr, uid, ids, filter_id, context=None): ir_filter = self.pool['ir.filters'].browse(cr, uid, filter_id, context=context) return {'value': {'filter_domain': ir_filter.domain}} @ecore.api.model def _get_actions(self, records, kinds): """ Return the actions of the given kinds for records' model. The returned actions' context contain an object to manage processing. """ if '__action_done' not in self._context: self = self.with_context(__action_done={}) domain = [('model', '=', records._name), ('kind', 'in', kinds)] actions = self.with_context(active_test=True).search(domain) return actions.with_env(self.env) @ecore.api.model def _get_eval_context(self): """ Prepare the context used when evaluating python code :returns: dict -- evaluation context given to (safe_)eval """ return { 'datetime': DT, 'dateutil': dateutil, 'time': time, 'uid': self.env.uid, 'user': self.env.user, } @ecore.api.model def _filter_pre(self, records): """ Filter the records that satisfy the precondition of action ``self``. """ if self.filter_pre_id and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_pre_id.domain, eval_context) ctx = eval(self.filter_pre_id.context) return records.with_context(**ctx).search(domain).with_env(records.env) elif self.filter_pre_domain and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_pre_domain, eval_context) return records.search(domain) else: return records @ecore.api.model def _filter_post(self, records): """ Filter the records that satisfy the postcondition of action ``self``. """ if self.filter_id and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_id.domain, eval_context) ctx = eval(self.filter_id.context) return records.with_context(**ctx).search(domain).with_env(records.env) elif self.filter_domain and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_domain, eval_context) return records.search(domain) else: return records @ecore.api.multi def _process(self, records): """ Process action ``self`` on the ``records`` that have not been done yet. """ # filter out the records on which self has already been done, then mark # remaining records as done (to avoid recursive processing) action_done = self._context['__action_done'] records -= action_done.setdefault(self, records.browse()) if not records: return action_done[self] |= records # modify records values = {} if 'date_action_last' in records._fields: values['date_action_last'] = ecore.fields.Datetime.now() if self.act_user_id and 'user_id' in records._fields: values['user_id'] = self.act_user_id.id if values: records.write(values) # subscribe followers if self.act_followers and hasattr(records, 'message_subscribe'): records.message_subscribe(self.act_followers.ids) # execute server actions if self.server_action_ids: for record in records: ctx = {'active_model': record._name, 'active_ids': record.ids, 'active_id': record.id} self.server_action_ids.with_context(**ctx).run() def _register_hook(self, cr): """ Patch models that should trigger action rules based on creation, modification, deletion of records and form onchanges. """ # # Note: the patched methods must be defined inside another function, # otherwise their closure may be wrong. For instance, the function # create refers to the outer variable 'create', which you expect to be # bound to create itself. But that expectation is wrong if create is # defined inside a loop; in that case, the variable 'create' is bound to # the last function defined by the loop. # def make_create(): """ Instanciate a create method that processes action rules. """ @ecore.api.model def create(self, vals): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_create', 'on_create_or_write']) # call original method record = create.origin(self.with_env(actions.env), vals) # check postconditions, and execute actions on the records that satisfy them for action in actions.with_context(old_values=None): action._process(action._filter_post(record)) return record.with_env(self.env) return create def make_write(): """ Instanciate a _write method that processes action rules. """ # # Note: we patch method _write() instead of write() in order to # catch updates made by field recomputations. # @ecore.api.multi def _write(self, vals): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_write', 'on_create_or_write']) records = self.with_env(actions.env) # check preconditions on records pre = {action: action._filter_pre(records) for action in actions} # read old values before the update old_values = { old_vals.pop('id'): old_vals for old_vals in records.read(list(vals)) } # call original method _write.origin(records, vals) # check postconditions, and execute actions on the records that satisfy them for action in actions.with_context(old_values=old_values): action._process(action._filter_post(pre[action])) return True return _write def make_unlink(): """ Instanciate an unlink method that processes action rules. """ @ecore.api.multi def unlink(self, **kwargs): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_unlink']) records = self.with_env(actions.env) # check conditions, and execute actions on the records that satisfy them for action in actions: action._process(action._filter_post(pre[action])) # call original method return unlink.origin(self, **kwargs) return unlink def make_onchange(action_rule_id): """ Instanciate an onchange method for the given action rule. """ def base_action_rule_onchange(self): action_rule = self.env['base.action.rule'].browse(action_rule_id) server_actions = action_rule.server_action_ids.with_context(active_model=self._name, onchange_self=self) result = {} for server_action in server_actions: res = server_action.run() if res and 'value' in res: res['value'].pop('id', None) self.update(self._convert_to_cache(res['value'], validate=False)) if res and 'domain' in res: result.setdefault('domain', {}).update(res['domain']) if res and 'warning' in res: result['warning'] = res['warning'] return result return base_action_rule_onchange patched_models = defaultdict(set) def patch(model, name, method): """ Patch method `name` on `model`, unless it has been patched already. """ if model not in patched_models[name]: patched_models[name].add(model) model._patch_method(name, method) # retrieve all actions, and patch their corresponding model ids = self.search(cr, SUPERUSER_ID, []) for action_rule in self.browse(cr, SUPERUSER_ID, ids): model = action_rule.model_id.model model_obj = self.pool.get(model) if not model_obj: continue if action_rule.kind == 'on_create': patch(model_obj, 'create', make_create()) elif action_rule.kind == 'on_create_or_write': patch(model_obj, 'create', make_create()) patch(model_obj, '_write', make_write()) elif action_rule.kind == 'on_write': patch(model_obj, '_write', make_write()) elif action_rule.kind == 'on_unlink': patch(model_obj, 'unlink', make_unlink()) elif action_rule.kind == 'on_change': # register an onchange method for the action_rule method = make_onchange(action_rule.id) for field_name in action_rule.on_change_fields.split(","): field_name = field_name.strip() model_obj._onchange_methods[field_name].append(method) def _update_cron(self, cr, uid, context=None): """ Activate the cron job depending on whether there exists action rules based on time conditions. """ try: cron = self.pool['ir.model.data'].get_object( cr, uid, 'base_action_rule', 'ir_cron_crm_action', context=context) except ValueError: return False return cron.toggle(model=self._name, domain=[('kind', '=', 'on_time')]) def _update_registry(self, cr, uid, context=None): """ Update the registry after a modification on action rules. """ if self.pool.ready: # for the sake of simplicity, simply force the registry to reload cr.commit() ecore.api.Environment.reset() RegistryManager.new(cr.dbname) RegistryManager.signal_registry_change(cr.dbname) def create(self, cr, uid, vals, context=None): res_id = super(base_action_rule, self).create(cr, uid, vals, context=context) self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return res_id def write(self, cr, uid, ids, vals, context=None): super(base_action_rule, self).write(cr, uid, ids, vals, context=context) if set(vals) & set(self.CRITICAL_FIELDS): self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return True def unlink(self, cr, uid, ids, context=None): res = super(base_action_rule, self).unlink(cr, uid, ids, context=context) self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return res def onchange_model_id(self, cr, uid, ids, model_id, context=None): data = {'model': False, 'filter_pre_id': False, 'filter_id': False} if model_id: model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context) data.update({'model': model.model}) return {'value': data} def _check_delay(self, cr, uid, action, record, record_dt, context=None): if action.trg_date_calendar_id and action.trg_date_range_type == 'day': start_dt = get_datetime(record_dt) action_dt = self.pool['resource.calendar'].schedule_days_get_date( cr, uid, action.trg_date_calendar_id.id, action.trg_date_range, day_date=start_dt, compute_leaves=True, context=context ) else: delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range) action_dt = get_datetime(record_dt) + delay return action_dt def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None): """ This Function is called by scheduler. """ context = context or {} # retrieve all the action rules to run based on a timed condition action_dom = [('kind', '=', 'on_time')] action_ids = self.search(cr, uid, action_dom, context=dict(context, active_test=True)) eval_context = self._get_eval_context(cr, uid, context=context) for action in self.browse(cr, uid, action_ids, context=context): now = datetime.now() if action.last_run: last_run = get_datetime(action.last_run) else: last_run = datetime.utcfromtimestamp(0) # retrieve all the records that satisfy the action's condition model = self.pool[action.model_id.model] domain = [] ctx = dict(context) if action.filter_domain is not False: domain = eval(action.filter_domain, eval_context) elif action.filter_id: domain = eval(action.filter_id.domain, eval_context) ctx.update(eval(action.filter_id.context)) if 'lang' not in ctx: # Filters might be language-sensitive, attempt to reuse creator lang # as we are usually running this as super-user in background [filter_meta] = action.filter_id.get_metadata() user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \ filter_meta['create_uid'][0] ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang record_ids = model.search(cr, uid, domain, context=ctx) # determine when action should occur for the records date_field = action.trg_date_id.name if date_field == 'date_action_last' and 'create_date' in model._fields: get_record_dt = lambda record: record[date_field] or record.create_date else: get_record_dt = lambda record: record[date_field] # process action on the records that should be executed for record in model.browse(cr, uid, record_ids, context=context): record_dt = get_record_dt(record) if not record_dt: continue action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context) if last_run <= action_dt < now: try: context = dict(context or {}, action=True) self._process(cr, uid, action, [record.id], context=context) except Exception: import traceback _logger.error(traceback.format_exc()) action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}) if automatic: # auto-commit for batch processing cr.commit()
class hr_employee(osv.osv): _name = "hr.employee" _description = "Employee" _order = 'name_related' _inherits = {'resource.resource': "resource_id"} _inherit = ['mail.thread'] _mail_post_access = 'read' _columns = { #we need a related field in order to be able to sort the employee by name 'name_related': fields.related('resource_id', 'name', type='char', string='Name', readonly=True, store=True), 'country_id': fields.many2one('res.country', 'Nationality (Country)'), 'birthday': fields.date("Date of Birth"), 'ssnid': fields.char('SSN No', help='Social Security Number'), 'sinid': fields.char('SIN No', help="Social Insurance Number"), 'identification_id': fields.char('Identification No'), 'gender': fields.selection([('male', 'Male'), ('female', 'Female'), ('other', 'Other')], 'Gender'), 'marital': fields.selection([('single', 'Single'), ('married', 'Married'), ('widower', 'Widower'), ('divorced', 'Divorced')], 'Marital Status'), 'department_id': fields.many2one('hr.department', 'Department'), 'address_id': fields.many2one('res.partner', 'Working Address'), 'address_home_id': fields.many2one('res.partner', 'Home Address'), 'bank_account_id': fields.many2one('res.partner.bank', 'Bank Account Number', domain="[('partner_id','=',address_home_id)]", help="Employee bank salary account"), 'work_phone': fields.char('Work Phone', readonly=False), 'mobile_phone': fields.char('Work Mobile', readonly=False), 'work_email': fields.char('Work Email', size=240), 'work_location': fields.char('Work Location'), 'notes': fields.text('Notes'), 'parent_id': fields.many2one('hr.employee', 'Manager'), 'category_ids': fields.many2many('hr.employee.category', 'employee_category_rel', 'emp_id', 'category_id', 'Tags'), 'child_ids': fields.one2many('hr.employee', 'parent_id', 'Subordinates'), 'resource_id': fields.many2one('resource.resource', 'Resource', ondelete='cascade', required=True, auto_join=True), 'coach_id': fields.many2one('hr.employee', 'Coach'), 'job_id': fields.many2one('hr.job', 'Job Title'), 'passport_id': fields.char('Passport No'), 'color': fields.integer('Color Index'), 'city': fields.related('address_id', 'city', type='char', string='City'), 'login': fields.related('user_id', 'login', type='char', string='Login', readonly=1), 'last_login': fields.related('user_id', 'date', type='datetime', string='Latest Connection', readonly=1), } # image: all image fields are base64 encoded and PIL-supported image = ecore.fields.Binary( "Photo", attachment=True, help= "This field holds the image used as photo for the employee, limited to 1024x1024px." ) image_medium = ecore.fields.Binary("Medium-sized photo", compute='_compute_images', inverse='_inverse_image_medium', store=True, attachment=True, help="Medium-sized photo of the employee. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views.") image_small = ecore.fields.Binary("Small-sized photo", compute='_compute_images', inverse='_inverse_image_small', store=True, attachment=True, help="Small-sized photo of the employee. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required.") @api.depends('image') def _compute_images(self): for rec in self: rec.image_medium = tools.image_resize_image_medium(rec.image) rec.image_small = tools.image_resize_image_small(rec.image) def _inverse_image_medium(self): for rec in self: rec.image = tools.image_resize_image_big(rec.image_medium) def _inverse_image_small(self): for rec in self: rec.image = tools.image_resize_image_big(rec.image_small) def _get_default_image(self, cr, uid, context=None): image_path = get_module_resource('hr', 'static/src/img', 'default_image.png') return tools.image_resize_image_big( open(image_path, 'rb').read().encode('base64')) defaults = { 'active': 1, 'image': _get_default_image, 'color': 0, } def unlink(self, cr, uid, ids, context=None): resource_ids = [] for employee in self.browse(cr, uid, ids, context=context): resource_ids.append(employee.resource_id.id) super(hr_employee, self).unlink(cr, uid, ids, context=context) return self.pool.get('resource.resource').unlink(cr, uid, resource_ids, context=context) def onchange_address_id(self, cr, uid, ids, address, context=None): if address: address = self.pool.get('res.partner').browse(cr, uid, address, context=context) return { 'value': { 'work_phone': address.phone, 'mobile_phone': address.mobile } } return {'value': {}} def onchange_company(self, cr, uid, ids, company, context=None): address_id = False if company: company_id = self.pool.get('res.company').browse(cr, uid, company, context=context) address = self.pool.get('res.partner').address_get( cr, uid, [company_id.partner_id.id], ['contact']) address_id = address and address['contact'] or False return {'value': {'address_id': address_id}} def onchange_department_id(self, cr, uid, ids, department_id, context=None): value = {'parent_id': False} if department_id: department = self.pool.get('hr.department').browse( cr, uid, department_id) value['parent_id'] = department.manager_id.id return {'value': value} def onchange_user(self, cr, uid, ids, name, image, user_id, context=None): if user_id: user = self.pool['res.users'].browse(cr, uid, user_id, context=context) values = { 'name': name or user.name, 'work_email': user.email, 'image': image or user.image, } return {'value': values} def action_follow(self, cr, uid, ids, context=None): """ Wrapper because message_subscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_subscribe_users(cr, uid, ids, context=context) def action_unfollow(self, cr, uid, ids, context=None): """ Wrapper because message_unsubscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_unsubscribe_users(cr, uid, ids, context=context) def _message_get_auto_subscribe_fields(self, cr, uid, updated_fields, auto_follow_fields=None, context=None): """ Overwrite of the original method to always follow user_id field, even when not track_visibility so that a user will follow it's employee """ if auto_follow_fields is None: auto_follow_fields = ['user_id'] user_field_lst = [] for name, field in self._fields.items(): if name in auto_follow_fields and name in updated_fields and field.comodel_name == 'res.users': user_field_lst.append(name) return user_field_lst _constraints = [ (osv.osv._check_recursion, _('Error! You cannot create recursive hierarchy of Employee(s).'), ['parent_id']), ]
class product_template(osv.osv): _name = 'product.template' _inherit = 'product.template' def _product_available(self, cr, uid, ids, name, arg, context=None): prod_available = {} product_ids = self.browse(cr, uid, ids, context=context) var_ids = [] for product in product_ids: var_ids += [p.id for p in product.product_variant_ids] variant_available = self.pool['product.product']._product_available( cr, uid, var_ids, context=context) for product in product_ids: qty_available = 0 virtual_available = 0 incoming_qty = 0 outgoing_qty = 0 for p in product.product_variant_ids: qty_available += variant_available[p.id]["qty_available"] virtual_available += variant_available[ p.id]["virtual_available"] incoming_qty += variant_available[p.id]["incoming_qty"] outgoing_qty += variant_available[p.id]["outgoing_qty"] prod_available[product.id] = { "qty_available": qty_available, "virtual_available": virtual_available, "incoming_qty": incoming_qty, "outgoing_qty": outgoing_qty, } return prod_available def _search_product_quantity(self, cr, uid, obj, name, domain, context): prod = self.pool.get("product.product") product_variant_ids = prod.search(cr, uid, domain, context=context) return [('product_variant_ids', 'in', product_variant_ids)] def _product_available_text(self, cr, uid, ids, field_names=None, arg=False, context=None): res = {} for product in self.browse(cr, uid, ids, context=context): res[product.id] = str(product.qty_available) + _(" On Hand") return res def _compute_nbr_reordering_rules(self, cr, uid, ids, field_names=None, arg=None, context=None): res = dict.fromkeys( ids, { 'nbr_reordering_rules': 0, 'reordering_min_qty': 0, 'reordering_max_qty': 0 }) product_data = self.pool['stock.warehouse.orderpoint'].read_group( cr, uid, [('product_id.product_tmpl_id', 'in', ids)], ['product_id', 'product_min_qty', 'product_max_qty'], ['product_id'], context=context) for data in product_data: product_tmpl_id = data['__domain'][1][2][0] res[product_tmpl_id][ 'nbr_reordering_rules'] = res[product_tmpl_id].get( 'nbr_reordering_rules', 0) + int(data['product_id_count']) res[product_tmpl_id]['reordering_min_qty'] = data[ 'product_min_qty'] res[product_tmpl_id]['reordering_max_qty'] = data[ 'product_max_qty'] return res def _get_product_template_type(self, cr, uid, context=None): res = super(product_template, self)._get_product_template_type(cr, uid, context=context) if 'product' not in [item[0] for item in res]: res.append(('product', 'Producto Almacenable')) return res _columns = { 'property_stock_procurement': fields.property( type='many2one', relation='stock.location', string="Procurement Location", domain=[('usage', 'like', 'procurement')], help= "This stock location will be used, instead of the default one, as the source location for stock moves generated by procurements." ), 'property_stock_production': fields.property( type='many2one', relation='stock.location', string="Production Location", domain=[('usage', 'like', 'production')], help= "This stock location will be used, instead of the default one, as the source location for stock moves generated by manufacturing orders." ), 'property_stock_inventory': fields.property( type='many2one', relation='stock.location', string="Inventory Location", domain=[('usage', 'like', 'inventory')], help= "This stock location will be used, instead of the default one, as the source location for stock moves generated when you do an inventory." ), 'sale_delay': fields.float( 'Customer Lead Time', help= "The average delay in days between the confirmation of the customer order and the delivery of the finished products. It's the time you promise to your customers." ), 'tracking': fields.selection(selection=[('serial', 'By Unique Serial Number'), ('lot', 'By Lots'), ('none', 'No Tracking')], string="Tracking", required=True), 'description_picking': fields.text('Description on Picking', translate=True), # sum of product variant qty # 'reception_count': fields.function(_product_available, multi='qty_available', # fnct_search=_search_product_quantity, type='float', string='Quantity On Hand'), # 'delivery_count': fields.function(_product_available, multi='qty_available', # fnct_search=_search_product_quantity, type='float', string='Quantity On Hand'), 'qty_available': fields.function( _product_available, multi='qty_available', digits_compute=dp.get_precision('Product Unit of Measure'), fnct_search=_search_product_quantity, type='float', string='Quantity On Hand'), 'virtual_available': fields.function( _product_available, multi='qty_available', digits_compute=dp.get_precision('Product Unit of Measure'), fnct_search=_search_product_quantity, type='float', string='Forecasted Quantity'), 'incoming_qty': fields.function( _product_available, multi='qty_available', digits_compute=dp.get_precision('Product Unit of Measure'), fnct_search=_search_product_quantity, type='float', string='Incoming'), 'outgoing_qty': fields.function( _product_available, multi='qty_available', digits_compute=dp.get_precision('Product Unit of Measure'), fnct_search=_search_product_quantity, type='float', string='Outgoing'), 'location_id': fields.dummy(string='Location', relation='stock.location', type='many2one'), 'warehouse_id': fields.dummy(string='Warehouse', relation='stock.warehouse', type='many2one'), 'route_ids': fields.many2many( 'stock.location.route', 'stock_route_product', 'product_id', 'route_id', 'Routes', domain="[('product_selectable', '=', True)]", help= "Depending on the modules installed, this will allow you to define the route of the product: whether it will be bought, manufactured, MTO/MTS,..." ), 'nbr_reordering_rules': fields.function(_compute_nbr_reordering_rules, string='Reordering Rules', type='integer', multi=True), 'reordering_min_qty': fields.function(_compute_nbr_reordering_rules, type='float', multi=True), 'reordering_max_qty': fields.function(_compute_nbr_reordering_rules, type='float', multi=True), 'route_from_categ_ids': fields.related('categ_id', 'total_route_ids', type="many2many", relation="stock.location.route", string="Category Routes"), } _defaults = { 'sale_delay': 7, 'tracking': 'none', } def action_view_routes(self, cr, uid, ids, context=None): route_obj = self.pool.get("stock.location.route") act_obj = self.pool.get('ir.actions.act_window') mod_obj = self.pool.get('ir.model.data') product_route_ids = set() for product in self.browse(cr, uid, ids, context=context): product_route_ids |= set([r.id for r in product.route_ids]) product_route_ids |= set( [r.id for r in product.categ_id.total_route_ids]) route_ids = route_obj.search(cr, uid, [ '|', ('id', 'in', list(product_route_ids)), ('warehouse_selectable', '=', True) ], context=context) result = mod_obj.xmlid_to_res_id(cr, uid, 'stock.action_routes_form', raise_if_not_found=True) result = act_obj.read(cr, uid, [result], context=context)[0] result['domain'] = "[('id','in',[" + ','.join(map(str, route_ids)) + "])]" return result def onchange_tracking(self, cr, uid, ids, tracking, context=None): if not tracking: return {} product_product = self.pool['product.product'] variant_ids = product_product.search(cr, uid, [('product_tmpl_id', 'in', ids)], context=context) return product_product.onchange_tracking(cr, uid, variant_ids, tracking, context=context) def _get_products(self, cr, uid, ids, context=None): products = [] for prodtmpl in self.browse(cr, uid, ids, context=None): products += [x.id for x in prodtmpl.product_variant_ids] return products def _get_act_window_dict(self, cr, uid, name, context=None): mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') result = mod_obj.xmlid_to_res_id(cr, uid, name, raise_if_not_found=True) result = act_obj.read(cr, uid, [result], context=context)[0] return result def action_open_quants(self, cr, uid, ids, context=None): products = self._get_products(cr, uid, ids, context=context) result = self._get_act_window_dict(cr, uid, 'stock.product_open_quants', context=context) result['domain'] = "[('product_id','in',[" + ','.join( map(str, products)) + "])]" result[ 'context'] = "{'search_default_locationgroup': 1, 'search_default_internal_loc': 1}" return result def action_view_orderpoints(self, cr, uid, ids, context=None): products = self._get_products(cr, uid, ids, context=context) result = self._get_act_window_dict(cr, uid, 'stock.product_open_orderpoint', context=context) if len(ids) == 1 and len(products) == 1: result['context'] = "{'default_product_id': " + str( products[0]) + ", 'search_default_product_id': " + str( products[0]) + "}" else: result['domain'] = "[('product_id','in',[" + ','.join( map(str, products)) + "])]" result['context'] = "{}" return result def action_view_stock_moves(self, cr, uid, ids, context=None): products = self._get_products(cr, uid, ids, context=context) result = self._get_act_window_dict(cr, uid, 'stock.act_product_stock_move_open', context=context) if products: result['context'] = "{'default_product_id': %d}" % products[0] result['domain'] = "[('product_id.product_tmpl_id','in',[" + ','.join( map(str, ids)) + "])]" return result def write(self, cr, uid, ids, vals, context=None): if 'uom_id' in vals: new_uom = self.pool.get('product.uom').browse(cr, uid, vals['uom_id'], context=context) for product in self.browse(cr, uid, ids, context=context): old_uom = product.uom_id if old_uom != new_uom: if self.pool.get('stock.move').search( cr, uid, [('product_id', 'in', [x.id for x in product.product_variant_ids]), ('state', '=', 'done')], limit=1, context=context): raise UserError( _("You can not change the unit of measure of a product that has already been used in a done stock move. If you need to change the unit of measure, you may deactivate this product." )) return super(product_template, self).write(cr, uid, ids, vals, context=context)
class MailMailStats(osv.Model): """ MailMailStats models the statistics collected about emails. Those statistics are stored in a separated model and table to avoid bloating the mail_mail table with statistics values. This also allows to delete emails send with mass mailing without loosing the statistics about them. """ _name = 'mail.mail.statistics' _description = 'Email Statistics' _rec_name = 'message_id' _order = 'message_id' def _compute_state(self, cr, uid, ids, field_names, arg, context=None): res = dict((i, { 'state': 'outgoing', 'state_update': fields.datetime.now() }) for i in ids) for stat in self.browse(cr, uid, ids, context=context): if stat.exception: res[stat.id]['state'] = 'exception' if stat.sent: res[stat.id]['state'] = 'sent' if stat.opened: res[stat.id]['state'] = 'opened' if stat.replied: res[stat.id]['state'] = 'replied' if stat.bounced: res[stat.id]['state'] = 'bounced' return res def _compute_recipient(self, cr, uid, ids, field_names, arg, context=None): res = dict.fromkeys(ids, '') for stat in self.browse(cr, uid, ids, context=context): if not self.pool.get(stat.model): continue target = self.pool[stat.model].browse(cr, uid, stat.res_id, context=context) email = '' for email_field in ('email', 'email_from'): if email_field in target and target[email_field]: email = ' <%s>' % target[email_field] break res[stat.id] = '%s%s' % (target.display_name, email) return res __store = { _name: ((lambda s, c, u, i, t: i), ['exception', 'sent', 'opened', 'replied', 'bounced'], 10) } _columns = { 'mail_mail_id': fields.many2one('mail.mail', 'Mail', ondelete='set null', select=True), 'mail_mail_id_int': fields.integer( 'Mail ID (tech)', help= 'ID of the related mail_mail. This field is an integer field because' 'the related mail_mail can be deleted separately from its statistics.' 'However the ID is needed for several action and controllers.'), 'message_id': fields.char('Message-ID'), 'model': fields.char('Document model'), 'res_id': fields.integer('Document ID'), # campaign / wave data 'mass_mailing_id': fields.many2one( 'mail.mass_mailing', 'Mass Mailing', ondelete='set null', ), 'mass_mailing_campaign_id': fields.related( 'mass_mailing_id', 'mass_mailing_campaign_id', type='many2one', ondelete='set null', relation='mail.mass_mailing.campaign', string='Mass Mailing Campaign', store=True, readonly=True, ), # Bounce and tracking 'scheduled': fields.datetime('Scheduled', help='Date when the email has been created'), 'sent': fields.datetime('Sent', help='Date when the email has been sent'), 'exception': fields.datetime( 'Exception', help='Date of technical error leading to the email not being sent' ), 'opened': fields.datetime( 'Opened', help='Date when the email has been opened the first time'), 'replied': fields.datetime( 'Replied', help='Date when this email has been replied for the first time.'), 'bounced': fields.datetime('Bounced', help='Date when this email has bounced.'), 'links_click_ids': fields.one2many('link.tracker.click', 'mail_stat_id', 'Links click'), 'state': fields.function(_compute_state, string='State', type="selection", multi="state", selection=[('outgoing', 'Outgoing'), ('exception', 'Exception'), ('sent', 'Sent'), ('opened', 'Opened'), ('replied', 'Replied'), ('bounced', 'Bounced')], store=__store), 'state_update': fields.function(_compute_state, string='State Update', type='datetime', multi='state', help='Last state update of the mail', store=__store), 'recipient': fields.function(_compute_recipient, string='Recipient', type='char'), } _defaults = { 'scheduled': fields.datetime.now, } def create(self, cr, uid, values, context=None): if 'mail_mail_id' in values: values['mail_mail_id_int'] = values['mail_mail_id'] res = super(MailMailStats, self).create(cr, uid, values, context=context) return res def _get_ids(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, domain=None, context=None): if not ids and mail_mail_ids: base_domain = [('mail_mail_id_int', 'in', mail_mail_ids)] elif not ids and mail_message_ids: base_domain = [('message_id', 'in', mail_message_ids)] else: base_domain = [('id', 'in', ids or [])] if domain: base_domain = ['&'] + domain + base_domain return self.search(cr, uid, base_domain, context=context) def set_opened(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('opened', '=', False)], context) self.write(cr, uid, stat_ids, {'opened': fields.datetime.now()}, context=context) return stat_ids def set_replied(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('replied', '=', False)], context) self.write(cr, uid, stat_ids, {'replied': fields.datetime.now()}, context=context) return stat_ids def set_bounced(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('bounced', '=', False)], context) self.write(cr, uid, stat_ids, {'bounced': fields.datetime.now()}, context=context) return stat_ids
class hr_timesheet_sheet(osv.osv): _name = "hr_timesheet_sheet.sheet" _inherit = ['mail.thread', 'ir.needaction_mixin'] _table = 'hr_timesheet_sheet_sheet' _order = "id desc" _description = "Timesheet" def _total(self, cr, uid, ids, name, args, context=None): """ Compute the attendances, analytic lines timesheets and differences between them for all the days of a timesheet and the current day """ res = dict.fromkeys( ids, { 'total_attendance': 0.0, 'total_timesheet': 0.0, 'total_difference': 0.0, }) cr.execute( """ SELECT sheet_id as id, sum(total_attendance) as total_attendance, sum(total_timesheet) as total_timesheet, sum(total_difference) as total_difference FROM hr_timesheet_sheet_sheet_day WHERE sheet_id IN %s GROUP BY sheet_id """, (tuple(ids), )) res.update(dict((x.pop('id'), x) for x in cr.dictfetchall())) return res def check_employee_attendance_state(self, cr, uid, sheet_id, context=None): ids_signin = self.pool.get('hr.attendance').search( cr, uid, [('sheet_id', '=', sheet_id), ('action', '=', 'sign_in')]) ids_signout = self.pool.get('hr.attendance').search( cr, uid, [('sheet_id', '=', sheet_id), ('action', '=', 'sign_out')]) if len(ids_signin) != len(ids_signout): raise UserError( _('The timesheet cannot be validated as it does not contain an equal number of sign ins and sign outs.' )) return True def copy(self, cr, uid, ids, *args, **argv): raise UserError(_('You cannot duplicate a timesheet.')) def create(self, cr, uid, vals, context=None): if 'employee_id' in vals: if not self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).user_id: raise UserError( _('In order to create a timesheet for this employee, you must link him/her to a user.' )) if vals.get('attendances_ids'): # If attendances, we sort them by date asc before writing them, to satisfy the alternance constraint vals['attendances_ids'] = self.sort_attendances( cr, uid, vals['attendances_ids'], context=context) return super(hr_timesheet_sheet, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): if 'employee_id' in vals: new_user_id = self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).user_id.id or False if not new_user_id: raise UserError( _('In order to create a timesheet for this employee, you must link him/her to a user.' )) if not self._sheet_date( cr, uid, ids, forced_user_id=new_user_id, context=context): raise UserError( _('You cannot have 2 timesheets that overlap!\nYou should use the menu \'My Timesheet\' to avoid this problem.' )) if not self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).product_id: raise UserError( _('In order to create a timesheet for this employee, you must link the employee to a product.' )) if vals.get('attendances_ids'): # If attendances, we sort them by date asc before writing them, to satisfy the alternance constraint # In addition to the date order, deleting attendances are done before inserting attendances vals['attendances_ids'] = self.sort_attendances( cr, uid, vals['attendances_ids'], context=context) res = super(hr_timesheet_sheet, self).write(cr, uid, ids, vals, context=context) if vals.get('attendances_ids'): for timesheet in self.browse(cr, uid, ids): if not self.pool['hr.attendance']._altern_si_so( cr, uid, [att.id for att in timesheet.attendances_ids]): raise UserError( _('Error ! Sign in (resp. Sign out) must follow Sign out (resp. Sign in)' )) return res def sort_attendances(self, cr, uid, attendance_tuples, context=None): date_attendances = [] for att_tuple in attendance_tuples: if att_tuple[0] in [0, 1, 4]: if att_tuple[0] in [0, 1]: if att_tuple[2] and att_tuple[2].has_key('name'): name = att_tuple[2]['name'] else: name = self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name else: name = self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name date_attendances.append((1, name, att_tuple)) elif att_tuple[0] in [2, 3]: date_attendances.append((0, self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name, att_tuple)) else: date_attendances.append((0, False, att_tuple)) date_attendances.sort() return [att[2] for att in date_attendances] def button_confirm(self, cr, uid, ids, context=None): for sheet in self.browse(cr, uid, ids, context=context): if sheet.employee_id and sheet.employee_id.parent_id and sheet.employee_id.parent_id.user_id: self.message_subscribe_users( cr, uid, [sheet.id], user_ids=[sheet.employee_id.parent_id.user_id.id], context=context) self.check_employee_attendance_state(cr, uid, sheet.id, context=context) di = sheet.user_id.company_id.timesheet_max_difference if (abs(sheet.total_difference) < di) or not di: sheet.signal_workflow('confirm') else: raise UserError( _('Please verify that the total difference of the sheet is lower than %.2f.' ) % (di, )) return True def attendance_action_change(self, cr, uid, ids, context=None): hr_employee = self.pool.get('hr.employee') employee_ids = [] for sheet in self.browse(cr, uid, ids, context=context): if sheet.employee_id.id not in employee_ids: employee_ids.append(sheet.employee_id.id) return hr_employee.attendance_action_change(cr, uid, employee_ids, context=context) def _count_attendances(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, 0) attendances_groups = self.pool['hr.attendance'].read_group( cr, uid, [('sheet_id', 'in', ids)], ['sheet_id'], 'sheet_id', context=context) for attendances in attendances_groups: res[attendances['sheet_id'][0]] = attendances['sheet_id_count'] return res _columns = { 'name': fields.char('Note', select=1, states={ 'confirm': [('readonly', True)], 'done': [('readonly', True)] }), 'employee_id': fields.many2one('hr.employee', 'Employee', required=True), 'user_id': fields.related( 'employee_id', 'user_id', type="many2one", relation="res.users", store=True, string="User", required=False, readonly=True ), #fields.many2one('res.users', 'User', required=True, select=1, states={'confirm':[('readonly', True)], 'done':[('readonly', True)]}), 'date_from': fields.date('Date from', required=True, select=1, readonly=True, states={'new': [('readonly', False)]}), 'date_to': fields.date('Date to', required=True, select=1, readonly=True, states={'new': [('readonly', False)]}), 'timesheet_ids': fields.one2many('account.analytic.line', 'sheet_id', 'Timesheet lines', readonly=True, states={ 'draft': [('readonly', False)], 'new': [('readonly', False)] }), 'attendances_ids': fields.one2many('hr.attendance', 'sheet_id', 'Attendances'), 'state': fields.selection( [('new', 'New'), ('draft', 'Open'), ('confirm', 'Waiting Approval'), ('done', 'Approved')], 'Status', select=True, required=True, readonly=True, track_visibility='onchange', help= ' * The \'Draft\' status is used when a user is encoding a new and unconfirmed timesheet. \ \n* The \'Confirmed\' status is used for to confirm the timesheet by user. \ \n* The \'Done\' status is used when users timesheet is accepted by his/her senior.' ), 'state_attendance': fields.related('employee_id', 'state', type='selection', selection=[('absent', 'Absent'), ('present', 'Present')], string='Current Status', readonly=True), 'total_attendance': fields.function(_total, method=True, string='Total Attendance', multi="_total"), 'total_timesheet': fields.function(_total, method=True, string='Total Timesheet', multi="_total"), 'total_difference': fields.function(_total, method=True, string='Difference', multi="_total"), 'period_ids': fields.one2many('hr_timesheet_sheet.sheet.day', 'sheet_id', 'Period', readonly=True), 'account_ids': fields.one2many('hr_timesheet_sheet.sheet.account', 'sheet_id', 'Analytic accounts', readonly=True), 'company_id': fields.many2one('res.company', 'Company'), 'department_id': fields.many2one('hr.department', 'Department'), 'attendance_count': fields.function(_count_attendances, type='integer', string="Attendances"), } def _default_date_from(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) r = user.company_id and user.company_id.timesheet_range or 'month' if r == 'month': return time.strftime('%Y-%m-01') elif r == 'week': return (datetime.today() + relativedelta(weekday=0, days=-6)).strftime('%Y-%m-%d') elif r == 'year': return time.strftime('%Y-01-01') return fields.date.context_today(self, cr, uid, context) def _default_date_to(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) r = user.company_id and user.company_id.timesheet_range or 'month' if r == 'month': return ( datetime.today() + relativedelta(months=+1, day=1, days=-1)).strftime('%Y-%m-%d') elif r == 'week': return (datetime.today() + relativedelta(weekday=6)).strftime('%Y-%m-%d') elif r == 'year': return time.strftime('%Y-12-31') return fields.date.context_today(self, cr, uid, context) def _default_employee(self, cr, uid, context=None): emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) return emp_ids and emp_ids[0] or False _defaults = { 'date_from': _default_date_from, 'date_to': _default_date_to, 'state': 'new', 'employee_id': _default_employee, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'hr_timesheet_sheet.sheet', context=c) } def _sheet_date(self, cr, uid, ids, forced_user_id=False, context=None): for sheet in self.browse(cr, uid, ids, context=context): new_user_id = forced_user_id or sheet.employee_id.user_id and sheet.employee_id.user_id.id if new_user_id: cr.execute( 'SELECT id \ FROM hr_timesheet_sheet_sheet \ WHERE (date_from <= %s and %s <= date_to) \ AND user_id=%s \ AND id <> %s', (sheet.date_to, sheet.date_from, new_user_id, sheet.id)) if cr.fetchall(): return False return True _constraints = [ (_sheet_date, 'You cannot have 2 timesheets that overlap!\nPlease use the menu \'My Current Timesheet\' to avoid this problem.', ['date_from', 'date_to']), ] def action_set_to_draft(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'draft'}) self.create_workflow(cr, uid, ids) return True def name_get(self, cr, uid, ids, context=None): if not ids: return [] if isinstance(ids, (long, int)): ids = [ids] # week number according to ISO 8601 Calendar return [(r['id'], _('Week ')+str(datetime.strptime(r['date_from'], '%Y-%m-%d').isocalendar()[1])) \ for r in self.read(cr, uid, ids, ['date_from'], context=context, load='_classic_write')] def unlink(self, cr, uid, ids, context=None): sheets = self.read(cr, uid, ids, ['state', 'total_attendance'], context=context) for sheet in sheets: if sheet['state'] in ('confirm', 'done'): raise UserError( _('You cannot delete a timesheet which is already confirmed.' )) elif sheet['total_attendance'] <> 0.00: raise UserError( _('You cannot delete a timesheet which have attendance entries.' )) toremove = [] analytic_timesheet = self.pool.get('account.analytic.line') for sheet in self.browse(cr, uid, ids, context=context): for timesheet in sheet.timesheet_ids: toremove.append(timesheet.id) analytic_timesheet.unlink(cr, uid, toremove, context=context) return super(hr_timesheet_sheet, self).unlink(cr, uid, ids, context=context) def onchange_employee_id(self, cr, uid, ids, employee_id, context=None): department_id = False user_id = False if employee_id: empl_id = self.pool.get('hr.employee').browse(cr, uid, employee_id, context=context) department_id = empl_id.department_id.id user_id = empl_id.user_id.id return { 'value': { 'department_id': department_id, 'user_id': user_id, } } # ------------------------------------------------ # OpenChatter methods and notifications # ------------------------------------------------ def _track_subtype(self, cr, uid, ids, init_values, context=None): record = self.browse(cr, uid, ids[0], context=context) if 'state' in init_values and record.state == 'confirm': return 'hr_timesheet_sheet.mt_timesheet_confirmed' elif 'state' in init_values and record.state == 'done': return 'hr_timesheet_sheet.mt_timesheet_approved' return super(hr_timesheet_sheet, self)._track_subtype(cr, uid, ids, init_values, context=context) def _needaction_domain_get(self, cr, uid, context=None): emp_obj = self.pool.get('hr.employee') empids = emp_obj.search(cr, uid, [('parent_id.user_id', '=', uid)], context=context) if not empids: return False dom = ['&', ('state', '=', 'confirm'), ('employee_id', 'in', empids)] return dom
class mrp_production_workcenter_line(osv.osv): def _get_date_end(self, cr, uid, ids, field_name, arg, context=None): """ Finds ending date. @return: Dictionary of values. """ ops = self.browse(cr, uid, ids, context=context) date_and_hours_by_cal = [(op.date_planned, op.hour, op.workcenter_id.calendar_id.id) for op in ops if op.date_planned] intervals = self.pool.get('resource.calendar').interval_get_multi( cr, uid, date_and_hours_by_cal) res = {} for op in ops: res[op.id] = False if op.date_planned: i = intervals.get((op.date_planned, op.hour, op.workcenter_id.calendar_id.id)) if i: res[op.id] = i[-1][1].strftime('%Y-%m-%d %H:%M:%S') else: res[op.id] = op.date_planned return res def onchange_production_id(self, cr, uid, ids, production_id, context=None): if not production_id: return {} production = self.pool.get('mrp.production').browse(cr, uid, production_id, context=None) result = { 'product': production.product_id.id, 'qty': production.product_qty, 'uom': production.product_uom.id, } return {'value': result} _inherit = 'mrp.production.workcenter.line' _order = "sequence, date_planned" _columns = { 'state': fields.selection([('draft','Draft'),('cancel','Cancelled'),('pause','Pending'),('startworking', 'In Progress'),('done','Finished')],'Status', readonly=True, copy=False, help="* When a work order is created it is set in 'Draft' status.\n" \ "* When user sets work order in start mode that time it will be set in 'In Progress' status.\n" \ "* When work order is in running mode, during that time if user wants to stop or to make changes in order then can set in 'Pending' status.\n" \ "* When the user cancels the work order it will be set in 'Canceled' status.\n" \ "* When order is completely processed that time it is set in 'Finished' status."), 'date_planned': fields.datetime('Scheduled Date', select=True), 'date_planned_end': fields.function(_get_date_end, string='End Date', type='datetime'), 'date_start': fields.datetime('Start Date'), 'date_finished': fields.datetime('End Date'), 'delay': fields.float('Working Hours',help="The elapsed time between operation start and stop in this Work Center",readonly=True), 'production_state':fields.related('production_id','state', type='selection', selection=[('draft','Draft'),('confirmed','Waiting Goods'),('ready','Ready to Produce'),('in_production','In Production'),('cancel','Canceled'),('done','Done')], string='Production Status', readonly=True), 'product':fields.related('production_id','product_id',type='many2one',relation='product.product',string='Product', readonly=True), 'qty':fields.related('production_id','product_qty',type='float',string='Qty',readonly=True, store=True), 'uom':fields.related('production_id','product_uom',type='many2one',relation='product.uom',string='Unit of Measure',readonly=True), } _defaults = {'state': 'draft', 'delay': 0.0, 'production_state': 'draft'} def modify_production_order_state(self, cr, uid, ids, action): """ Modifies production order state if work order state is changed. @param action: Action to perform. @return: Nothing """ prod_obj_pool = self.pool.get('mrp.production') oper_obj = self.browse(cr, uid, ids)[0] prod_obj = oper_obj.production_id if action == 'start': if prod_obj.state == 'confirmed': prod_obj_pool.force_production(cr, uid, [prod_obj.id]) prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') elif prod_obj.state == 'ready': prod_obj_pool.signal_workflow(cr, uid, [prod_obj.id], 'button_produce') elif prod_obj.state == 'in_production': return else: raise UserError( _('Manufacturing order cannot be started in state "%s"!') % (prod_obj.state, )) else: open_count = self.search_count( cr, uid, [('production_id', '=', prod_obj.id), ('state', '!=', 'done')]) flag = not bool(open_count) if flag: button_produce_done = True for production in prod_obj_pool.browse(cr, uid, [prod_obj.id], context=None): if production.move_lines or production.move_created_ids: moves = production.move_lines + production.move_created_ids # If tracking is activated, we want to make sure the user will enter the # serial numbers. if moves.filtered( lambda r: r.product_id.tracking != 'none'): button_produce_done = False else: prod_obj_pool.action_produce( cr, uid, production.id, production.product_qty, 'consume_produce', context=None) if button_produce_done: prod_obj_pool.signal_workflow(cr, uid, [oper_obj.production_id.id], 'button_produce_done') return def write(self, cr, uid, ids, vals, context=None, update=True): result = super(mrp_production_workcenter_line, self).write(cr, uid, ids, vals, context=context) prod_obj = self.pool.get('mrp.production') if vals.get('date_planned', False) and update: for prod in self.browse(cr, uid, ids, context=context): if prod.production_id.workcenter_lines: dstart = min( vals['date_planned'], prod.production_id.workcenter_lines[0]['date_planned']) prod_obj.write(cr, uid, [prod.production_id.id], {'date_start': dstart}, context=context, mini=False) return result def action_draft(self, cr, uid, ids, context=None): """ Sets state to draft. @return: True """ return self.write(cr, uid, ids, {'state': 'draft'}, context=context) def action_start_working(self, cr, uid, ids, context=None): """ Sets state to start working and writes starting date. @return: True """ self.modify_production_order_state(cr, uid, ids, 'start') self.write(cr, uid, ids, { 'state': 'startworking', 'date_start': time.strftime('%Y-%m-%d %H:%M:%S') }, context=context) return True def action_done(self, cr, uid, ids, context=None): """ Sets state to done, writes finish date and calculates delay. @return: True """ delay = 0.0 date_now = time.strftime('%Y-%m-%d %H:%M:%S') obj_line = self.browse(cr, uid, ids[0]) date_start = datetime.strptime(obj_line.date_start, '%Y-%m-%d %H:%M:%S') date_finished = datetime.strptime(date_now, '%Y-%m-%d %H:%M:%S') delay += (date_finished - date_start).days * 24 delay += (date_finished - date_start).seconds / float(60 * 60) self.write(cr, uid, ids, { 'state': 'done', 'date_finished': date_now, 'delay': delay }, context=context) self.modify_production_order_state(cr, uid, ids, 'done') return True def action_cancel(self, cr, uid, ids, context=None): """ Sets state to cancel. @return: True """ return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) def action_pause(self, cr, uid, ids, context=None): """ Sets state to pause. @return: True """ return self.write(cr, uid, ids, {'state': 'pause'}, context=context) def action_resume(self, cr, uid, ids, context=None): """ Sets state to startworking. @return: True """ return self.write(cr, uid, ids, {'state': 'startworking'}, context=context)
class procurement_rule(osv.osv): _inherit = 'procurement.rule' def _get_action(self, cr, uid, context=None): result = super(procurement_rule, self)._get_action(cr, uid, context=context) return result + [('move', _('Move From Another Location'))] def _get_rules(self, cr, uid, ids, context=None): res = [] for route in self.browse(cr, uid, ids): res += [x.id for x in route.pull_ids] return res _columns = { 'location_id': fields.many2one('stock.location', 'Procurement Location'), 'location_src_id': fields.many2one('stock.location', 'Source Location', help="Source location is action=move"), 'route_id': fields.many2one('stock.location.route', 'Route', help="If route_id is False, the rule is global"), 'procure_method': fields.selection( [('make_to_stock', 'Take From Stock'), ('make_to_order', 'Create Procurement')], 'Move Supply Method', required=True, help= """Determines the procurement method of the stock move that will be generated: whether it will need to 'take from the available stock' in its source location or needs to ignore its stock and create a procurement over there.""" ), 'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence', store={ 'stock.location.route': (_get_rules, ['sequence'], 10), 'procurement.rule': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10), }), 'picking_type_id': fields.many2one( 'stock.picking.type', 'Picking Type', help= "Picking Type determines the way the picking should be shown in the view, reports, ..." ), 'delay': fields.integer('Number of Days'), 'partner_address_id': fields.many2one('res.partner', 'Partner Address'), 'propagate': fields.boolean( 'Propagate cancel and split', help= 'If checked, when the previous move of the move (which was generated by a next procurement) is cancelled or split, the move generated by this move will too' ), 'warehouse_id': fields.many2one('stock.warehouse', 'Served Warehouse', help='The warehouse this rule is for'), 'propagate_warehouse_id': fields.many2one( 'stock.warehouse', 'Warehouse to Propagate', help= "The warehouse to propagate on the created move/procurement, which can be different of the warehouse this rule is for (e.g for resupplying rules from another warehouse)" ), } _defaults = { 'procure_method': 'make_to_stock', 'propagate': True, 'delay': 0, }
class config(osv.Model): _name = 'google.drive.config' _description = "Google Drive templates config" def get_google_drive_url(self, cr, uid, config_id, res_id, template_id, context=None): config = self.browse(cr, SUPERUSER_ID, config_id, context=context) model = config.model_id filter_name = config.filter_id and config.filter_id.name or False record = self.pool.get(model.model).read(cr, uid, [res_id], context=context)[0] record.update({'model': model.name, 'filter': filter_name}) name_gdocs = config.name_template try: name_gdocs = name_gdocs % record except: raise UserError( _("At least one key cannot be found in your Google Drive name pattern" )) attach_pool = self.pool.get("ir.attachment") attach_ids = attach_pool.search(cr, uid, [('res_model', '=', model.model), ('name', '=', name_gdocs), ('res_id', '=', res_id)]) url = False if attach_ids: attachment = attach_pool.browse(cr, uid, attach_ids[0], context) url = attachment.url else: url = self.copy_doc(cr, uid, res_id, template_id, name_gdocs, model.model, context).get('url') return url def get_access_token(self, cr, uid, scope=None, context=None): ir_config = self.pool['ir.config_parameter'] google_drive_refresh_token = ir_config.get_param( cr, SUPERUSER_ID, 'google_drive_refresh_token') if not google_drive_refresh_token: if self.pool['res.users']._is_admin(cr, uid, [uid]): model, action_id = self.pool[ 'ir.model.data'].get_object_reference( cr, uid, 'base_setup', 'action_general_configuration') msg = _( "You haven't configured 'Authorization Code' generated from google, Please generate and configure it ." ) raise ecore.exceptions.RedirectWarning( msg, action_id, _('Go to the configuration panel')) else: raise UserError( _("Google Drive is not yet configured. Please contact your administrator." )) google_drive_client_id = ir_config.get_param(cr, SUPERUSER_ID, 'google_drive_client_id') google_drive_client_secret = ir_config.get_param( cr, SUPERUSER_ID, 'google_drive_client_secret') #For Getting New Access Token With help of old Refresh Token data = werkzeug.url_encode( dict(client_id=google_drive_client_id, refresh_token=google_drive_refresh_token, client_secret=google_drive_client_secret, grant_type="refresh_token", scope=scope or 'https://www.googleapis.com/auth/drive')) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = urllib2.Request('https://accounts.google.com/o/oauth2/token', data, headers) content = urllib2.urlopen(req, timeout=TIMEOUT).read() except urllib2.HTTPError: if user_is_admin: model, action_id = self.pool[ 'ir.model.data'].get_object_reference( cr, uid, 'base_setup', 'action_general_configuration') msg = _( "Something went wrong during the token generation. Please request again an authorization code ." ) raise ecore.exceptions.RedirectWarning( msg, action_id, _('Go to the configuration panel')) else: raise UserError( _("Google Drive is not yet configured. Please contact your administrator." )) content = json.loads(content) return content.get('access_token') def copy_doc(self, cr, uid, res_id, template_id, name_gdocs, res_model, context=None): ir_config = self.pool['ir.config_parameter'] google_web_base_url = ir_config.get_param(cr, SUPERUSER_ID, 'web.base.url') access_token = self.get_access_token(cr, uid, context=context) # Copy template in to drive with help of new access token request_url = "https://www.googleapis.com/drive/v2/files/%s?fields=parents/id&access_token=%s" % ( template_id, access_token) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = urllib2.Request(request_url, None, headers) parents = urllib2.urlopen(req, timeout=TIMEOUT).read() except urllib2.HTTPError: raise UserError( _("The Google Template cannot be found. Maybe it has been deleted." )) parents_dict = json.loads(parents) record_url = "Click on link to open Record in eCore\n %s/?db=%s#id=%s&model=%s" % ( google_web_base_url, cr.dbname, res_id, res_model) data = { "title": name_gdocs, "description": record_url, "parents": parents_dict['parents'] } request_url = "https://www.googleapis.com/drive/v2/files/%s/copy?access_token=%s" % ( template_id, access_token) headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} data_json = json.dumps(data) # resp, content = Http().request(request_url, "POST", data_json, headers) req = urllib2.Request(request_url, data_json, headers) content = urllib2.urlopen(req, timeout=TIMEOUT).read() content = json.loads(content) res = {} if content.get('alternateLink'): attach_pool = self.pool.get("ir.attachment") attach_vals = { 'res_model': res_model, 'name': name_gdocs, 'res_id': res_id, 'type': 'url', 'url': content['alternateLink'] } res['id'] = attach_pool.create(cr, uid, attach_vals) # Commit in order to attach the document to the current object instance, even if the permissions has not been written. cr.commit() res['url'] = content['alternateLink'] key = self._get_key_from_url(res['url']) request_url = "https://www.googleapis.com/drive/v2/files/%s/permissions?emailMessage=This+is+a+drive+file+created+by+eCore&sendNotificationEmails=false&access_token=%s" % ( key, access_token) data = { 'role': 'writer', 'type': 'anyone', 'value': '', 'withLink': True } try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: raise self.pool.get('res.config.settings').get_config_warning( cr, _("The permission 'reader' for 'anyone with the link' has not been written on the document" ), context=context) user = self.pool['res.users'].browse(cr, uid, uid, context=context) if user.email: data = {'role': 'writer', 'type': 'user', 'value': user.email} try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: pass return res def get_google_drive_config(self, cr, uid, res_model, res_id, context=None): ''' Function called by the js, when no google doc are yet associated with a record, with the aim to create one. It will first seek for a google.docs.config associated with the model `res_model` to find out what's the template of google doc to copy (this is usefull if you want to start with a non-empty document, a type or a name different than the default values). If no config is associated with the `res_model`, then a blank text document with a default name is created. :param res_model: the object for which the google doc is created :param ids: the list of ids of the objects for which the google doc is created. This list is supposed to have a length of 1 element only (batch processing is not supported in the code, though nothing really prevent it) :return: the config id and config name ''' if not res_id: raise UserError( _("Creating google drive may only be done by one at a time.")) # check if a model is configured with a template config_ids = self.search(cr, uid, [('model_id', '=', res_model)], context=context) configs = [] for config in self.browse(cr, uid, config_ids, context=context): if config.filter_id: if (config.filter_id.user_id and config.filter_id.user_id.id != uid): #Private continue domain = [('id', 'in', [res_id])] + eval( config.filter_id.domain) local_context = context and context.copy() or {} local_context.update(eval(config.filter_id.context)) google_doc_configs = self.pool.get( config.filter_id.model_id).search(cr, uid, domain, context=local_context) if google_doc_configs: configs.append({'id': config.id, 'name': config.name}) else: configs.append({'id': config.id, 'name': config.name}) return configs def _get_key_from_url(self, url): mo = re.search("(key=|/d/)([A-Za-z0-9-_]+)", url) if mo: return mo.group(2) return None def _resource_get(self, cr, uid, ids, name, arg, context=None): result = {} for data in self.browse(cr, uid, ids, context): mo = self._get_key_from_url(data.google_drive_template_url) if mo: result[data.id] = mo else: raise UserError(_("Please enter a valid Google Document URL.")) return result def _client_id_get(self, cr, uid, ids, name, arg, context=None): result = {} client_id = self.pool['ir.config_parameter'].get_param( cr, SUPERUSER_ID, 'google_drive_client_id') for config_id in ids: result[config_id] = client_id return result _columns = { 'name': fields.char('Template Name', required=True), 'model_id': fields.many2one('ir.model', 'Model', ondelete='set null', required=True), 'model': fields.related('model_id', 'model', type='char', string='Model', readonly=True), 'filter_id': fields.many2one('ir.filters', 'Filter', domain="[('model_id', '=', model)]"), 'google_drive_template_url': fields.char('Template URL', required=True, size=1024), 'google_drive_resource_id': fields.function(_resource_get, type="char", string='Resource Id'), 'google_drive_client_id': fields.function(_client_id_get, type="char", string='Google Client '), 'name_template': fields.char( 'Google Drive Name Pattern', help= 'Choose how the new google drive will be named, on google side. Eg. gdoc_%(field_name)s', required=True), 'active': fields.boolean('Active'), } def onchange_model_id(self, cr, uid, ids, model_id, context=None): res = {} if model_id: model = self.pool['ir.model'].browse(cr, uid, model_id, context=context) res['value'] = {'model': model.model} else: res['value'] = {'filter_id': False, 'model': False} return res _defaults = { 'name_template': 'Document %(name)s', 'active': True, } def _check_model_id(self, cr, uid, ids, context=None): config_id = self.browse(cr, uid, ids[0], context=context) if config_id.filter_id and config_id.model_id.model != config_id.filter_id.model_id: return False return True _constraints = [ (_check_model_id, 'Model of selected filter is not matching with model of current template.', ['model_id', 'filter_id']), ] def get_google_scope(self): return 'https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file'
class project_issue(osv.Model): _name = "project.issue" _description = "Project Issue" _order = "priority desc, create_date desc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _mail_post_access = 'read' def _get_default_partner(self, cr, uid, context=None): if context is None: context = {} if 'default_project_id' in context: project = self.pool.get('project.project').browse( cr, uid, context['default_project_id'], context=context) if project and project.partner_id: return project.partner_id.id return False def _get_default_stage_id(self, cr, uid, context=None): """ Gives default stage_id """ if context is None: context = {} return self.stage_find(cr, uid, [], context.get('default_project_id'), [('fold', '=', False)], context=context) def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): if context is None: context = {} access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('project.task.type') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # retrieve team_id from the context, add them to already fetched columns (ids) if 'default_project_id' in context: search_domain = [ '|', ('project_ids', '=', context['default_project_id']), ('id', 'in', ids) ] else: search_domain = [('id', 'in', ids)] # perform search stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort( lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) fold = {} for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context): fold[stage.id] = stage.fold or False return result, fold def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ Calendar = self.pool['resource.calendar'] res = dict((res_id, {}) for res_id in ids) for issue in self.browse(cr, uid, ids, context=context): values = { 'day_open': 0.0, 'day_close': 0.0, 'working_hours_open': 0.0, 'working_hours_close': 0.0, 'days_since_creation': 0.0, 'inactivity_days': 0.0, } # if the working hours on the project are not defined, use default ones (8 -> 12 and 13 -> 17 * 5), represented by None calendar_id = None if issue.project_id and issue.project_id.resource_calendar_id: calendar_id = issue.project_id.resource_calendar_id.id dt_create_date = datetime.strptime(issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) if issue.date_open: dt_date_open = datetime.strptime( issue.date_open, DEFAULT_SERVER_DATETIME_FORMAT) values['day_open'] = (dt_date_open - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_open'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_open, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) if issue.date_closed: dt_date_closed = datetime.strptime( issue.date_closed, DEFAULT_SERVER_DATETIME_FORMAT) values['day_close'] = (dt_date_closed - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_close'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_closed, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) days_since_creation = datetime.today() - dt_create_date values['days_since_creation'] = days_since_creation.days if issue.date_action_last: inactive_days = datetime.today() - datetime.strptime( issue.date_action_last, DEFAULT_SERVER_DATETIME_FORMAT) elif issue.date_last_stage_update: inactive_days = datetime.today() - datetime.strptime( issue.date_last_stage_update, DEFAULT_SERVER_DATETIME_FORMAT) else: inactive_days = datetime.today() - datetime.strptime( issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) values['inactivity_days'] = inactive_days.days # filter only required values for field in fields: res[issue.id][field] = values[field] return res def on_change_project(self, cr, uid, ids, project_id, context=None): if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return {'value': {'partner_id': project.partner_id.id}} return {'value': {'partner_id': False}} _columns = { 'id': fields.integer('ID', readonly=True), 'name': fields.char('Issue', required=True), 'active': fields.boolean('Active', required=False), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'write_date': fields.datetime('Update Date', readonly=True), 'days_since_creation': fields.function(_compute_day, string='Days since creation date', \ multi='compute_day', type="integer", help="Difference in days between creation date and current date"), 'date_deadline': fields.date('Deadline'), 'team_id': fields.many2one('crm.team', 'Sales Team', oldname='section_id',\ select=True, help='Sales team to which Case belongs to.\ Define Responsible user and Email account for mail gateway.' ), 'partner_id': fields.many2one('res.partner', 'Contact', select=1), 'company_id': fields.many2one('res.company', 'Company'), 'description': fields.text('Private Note'), 'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', track_visibility='onchange', help="A Issue's kanban state indicates special situations affecting it:\n" " * Normal is the default situation\n" " * Blocked indicates something is preventing the progress of this issue\n" " * Ready for next stage indicates the issue is ready to be pulled to the next stage", required=True), 'email_from': fields.char('Email', size=128, help="These people will receive email.", select=1), 'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'date_open': fields.datetime('Assigned', readonly=True, select=True), # Project Issue fields 'date_closed': fields.datetime('Closed', readonly=True, select=True), 'date': fields.datetime('Date'), 'date_last_stage_update': fields.datetime('Last Stage Update', select=True), 'channel': fields.char('Channel', help="Communication channel."), 'tag_ids': fields.many2many('project.tags', string='Tags'), 'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority', select=True), 'stage_id': fields.many2one ('project.task.type', 'Stage', track_visibility='onchange', select=True, domain="[('project_ids', '=', project_id)]", copy=False), 'project_id': fields.many2one('project.project', 'Project', track_visibility='onchange', select=True), 'duration': fields.float('Duration'), 'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]", help="You can link this issue to an existing task or directly create a new one from here"), 'day_open': fields.function(_compute_day, string='Days to Assign', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'day_close': fields.function(_compute_day, string='Days to Close', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'user_id': fields.many2one('res.users', 'Assigned to', required=False, select=1, track_visibility='onchange'), 'working_hours_open': fields.function(_compute_day, string='Working Hours to assign the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'working_hours_close': fields.function(_compute_day, string='Working Hours to close the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'inactivity_days': fields.function(_compute_day, string='Days since last action', multi='compute_day', type="integer", help="Difference in days between last action and current date"), 'color': fields.integer('Color Index'), 'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True), 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'legend_blocked': fields.related("stage_id", "legend_blocked", type="char", string='Kanban Blocked Explanation'), 'legend_done': fields.related("stage_id", "legend_done", type="char", string='Kanban Valid Explanation'), 'legend_normal': fields.related("stage_id", "legend_normal", type="char", string='Kanban Ongoing Explanation'), } _defaults = { 'active': 1, 'team_id': lambda s, cr, uid, c: s.pool['crm.team']._get_default_team_id( cr, uid, context=c), 'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c), 'company_id': lambda s, cr, uid, c: s.pool['res.users']._get_company( cr, uid, context=c), 'priority': '0', 'kanban_state': 'normal', 'date_last_stage_update': fields.datetime.now, 'user_id': lambda obj, cr, uid, context: uid, } _group_by_full = {'stage_id': _read_group_stage_ids} def copy(self, cr, uid, id, default=None, context=None): issue = self.read(cr, uid, [id], ['name'], context=context)[0] if not default: default = {} default = default.copy() default.update(name=_('%s (copy)') % (issue['name'])) return super(project_issue, self).copy(cr, uid, id, default=default, context=context) def create(self, cr, uid, vals, context=None): context = dict(context or {}) if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') if vals.get('user_id') and not vals.get('date_open'): vals['date_open'] = fields.datetime.now() if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, None, vals.get('stage_id'), context=context)['value']) # context: no_log, because subtype already handle this create_context = dict(context, mail_create_nolog=True) return super(project_issue, self).create(cr, uid, vals, context=create_context) def write(self, cr, uid, ids, vals, context=None): # stage change: update date_last_stage_update if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, ids, vals.get('stage_id'), context=context)['value']) vals['date_last_stage_update'] = fields.datetime.now() if 'kanban_state' not in vals: vals['kanban_state'] = 'normal' # user_id change: update date_open if vals.get('user_id') and 'date_open' not in vals: vals['date_open'] = fields.datetime.now() return super(project_issue, self).write(cr, uid, ids, vals, context) def onchange_task_id(self, cr, uid, ids, task_id, context=None): if not task_id: return {'value': {}} task = self.pool.get('project.task').browse(cr, uid, task_id, context=context) return { 'value': { 'user_id': task.user_id.id, } } def onchange_partner_id(self, cr, uid, ids, partner_id, context=None): """ This function returns value of partner email address based on partner :param part: Partner's id """ if partner_id: partner = self.pool['res.partner'].browse(cr, uid, partner_id, context) return {'value': {'email_from': partner.email}} return {'value': {'email_from': False}} def get_empty_list_help(self, cr, uid, help, context=None): context = dict(context or {}) context['empty_list_help_model'] = 'project.project' context['empty_list_help_id'] = context.get('default_project_id') context['empty_list_help_document_name'] = _("issues") return super(project_issue, self).get_empty_list_help(cr, uid, help, context=context) # ------------------------------------------------------- # Stage management # ------------------------------------------------------- def onchange_stage_id(self, cr, uid, ids, stage_id, context=None): if not stage_id: return {'value': {}} stage = self.pool['project.task.type'].browse(cr, uid, stage_id, context=context) if stage.fold: return {'value': {'date_closed': fields.datetime.now()}} return {'value': {'date_closed': False}} def stage_find(self, cr, uid, cases, team_id, domain=[], order='sequence', context=None): """ Override of the base.stage method Parameter of the stage search taken from the issue: - type: stage type must be the same or 'both' - team_id: if set, stages must belong to this team or be a default case """ if isinstance(cases, (int, long)): cases = self.browse(cr, uid, cases, context=context) # collect all team_ids team_ids = [] if team_id: team_ids.append(team_id) for task in cases: if task.project_id: team_ids.append(task.project_id.id) # OR all team_ids and OR with case_default search_domain = [] if team_ids: search_domain += [('|')] * (len(team_ids) - 1) for team_id in team_ids: search_domain.append(('project_ids', '=', team_id)) search_domain += list(domain) # perform search, return the first found stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context) if stage_ids: return stage_ids[0] return False # ------------------------------------------------------- # Mail gateway # ------------------------------------------------------- def _track_subtype(self, cr, uid, ids, init_values, context=None): record = self.browse(cr, uid, ids[0], context=context) if 'kanban_state' in init_values and record.kanban_state == 'blocked': return 'project_issue.mt_issue_blocked' elif 'kanban_state' in init_values and record.kanban_state == 'done': return 'project_issue.mt_issue_ready' elif 'user_id' in init_values and record.user_id: # assigned -> new return 'project_issue.mt_issue_new' elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence <= 1: # start stage -> new return 'project_issue.mt_issue_new' elif 'stage_id' in init_values: return 'project_issue.mt_issue_stage' return super(project_issue, self)._track_subtype(cr, uid, ids, init_values, context=context) def _notification_group_recipients(self, cr, uid, ids, message, recipients, done_ids, group_data, context=None): """ Override the mail.thread method to handle project users and officers recipients. Indeed those will have specific action in their notification emails: creating tasks, assigning it. """ group_project_user = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'project.group_project_user') for recipient in recipients: if recipient.id in done_ids: continue if recipient.user_ids and group_project_user in recipient.user_ids[ 0].groups_id.ids: group_data['group_project_user'] |= recipient done_ids.add(recipient.id) return super(project_issue, self)._notification_group_recipients(cr, uid, ids, message, recipients, done_ids, group_data, context=context) def _notification_get_recipient_groups(self, cr, uid, ids, message, recipients, context=None): res = super(project_issue, self)._notification_get_recipient_groups(cr, uid, ids, message, recipients, context=context) new_action_id = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'project_issue.project_issue_categ_act0') take_action = self._notification_link_helper(cr, uid, ids, 'assign', context=context) new_action = self._notification_link_helper(cr, uid, ids, 'new', context=context, action_id=new_action_id) task_record = self.browse(cr, uid, ids[0], context=context) actions = [] if not task_record.user_id: actions.append({'url': take_action, 'title': _('I take it')}) else: actions.append({'url': new_action, 'title': _('New Issue')}) res['group_project_user'] = {'actions': actions} return res @api.cr_uid_context def message_get_reply_to(self, cr, uid, ids, default=None, context=None): """ Override to get the reply_to of the parent project. """ issues = self.browse(cr, SUPERUSER_ID, ids, context=context) project_ids = set( [issue.project_id.id for issue in issues if issue.project_id]) aliases = self.pool['project.project'].message_get_reply_to( cr, uid, list(project_ids), default=default, context=context) return dict( (issue.id, aliases.get(issue.project_id and issue.project_id.id or 0, False)) for issue in issues) def message_get_suggested_recipients(self, cr, uid, ids, context=None): recipients = super(project_issue, self).message_get_suggested_recipients( cr, uid, ids, context=context) try: for issue in self.browse(cr, uid, ids, context=context): if issue.partner_id: issue._message_add_suggested_recipient( recipients, partner=issue.partner_id, reason=_('Customer')) elif issue.email_from: issue._message_add_suggested_recipient( recipients, email=issue.email_from, reason=_('Customer Email')) except AccessError: # no read access rights -> just ignore suggested recipients because this imply modifying followers pass return recipients def email_split(self, cr, uid, ids, msg, context=None): email_list = tools.email_split((msg.get('to') or '') + ',' + (msg.get('cc') or '')) # check left-part is not already an alias issue_ids = self.browse(cr, uid, ids, context=context) aliases = [ issue.project_id.alias_name for issue in issue_ids if issue.project_id ] return filter(lambda x: x.split('@')[0] not in aliases, email_list) def message_new(self, cr, uid, msg, custom_values=None, context=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ if custom_values is None: custom_values = {} context = dict(context or {}, state_to='draft') defaults = { 'name': msg.get('subject') or _("No Subject"), 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), 'user_id': False, } defaults.update(custom_values) res_id = super(project_issue, self).message_new(cr, uid, msg, custom_values=defaults, context=context) email_list = self.email_split(cr, uid, [res_id], msg, context=context) partner_ids = self._find_partner_from_emails(cr, uid, [res_id], email_list, force_create=True, context=context) self.message_subscribe(cr, uid, [res_id], partner_ids, context=context) return res_id def message_update(self, cr, uid, ids, msg, update_vals=None, context=None): """ Override to update the issue according to the email. """ email_list = self.email_split(cr, uid, ids, msg, context=context) partner_ids = self._find_partner_from_emails(cr, uid, ids, email_list, force_create=True, context=context) self.message_subscribe(cr, uid, ids, partner_ids, context=context) return super(project_issue, self).message_update(cr, uid, ids, msg, update_vals=update_vals, context=context) @api.cr_uid_ids_context @api.returns('mail.message', lambda value: value.id) def message_post(self, cr, uid, thread_id, subtype=None, context=None, **kwargs): """ Overrides mail_thread message_post so that we can set the date of last action field when a new message is posted on the issue. """ if context is None: context = {} res = super(project_issue, self).message_post(cr, uid, thread_id, subtype=subtype, context=context, **kwargs) if thread_id and subtype: self.write(cr, SUPERUSER_ID, thread_id, {'date_action_last': fields.datetime.now()}, context=context) return res
class mrp_product_produce(osv.osv_memory): _name = "mrp.product.produce" _description = "Product Produce" _columns = { 'product_id': fields.many2one('product.product', type='many2one'), 'product_qty': fields.float( 'Select Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'mode': fields.selection( [('consume_produce', 'Consume & Produce'), ('consume', 'Consume Only')], 'Mode', required=True, help= "'Consume only' mode will only consume the products with the quantity selected.\n" "'Consume & Produce' mode will consume as well as produce the products with the quantity selected " "and it will finish the production order when total ordered quantities are produced." ), 'lot_id': fields.many2one( 'stock.production.lot', 'Lot' ), #Should only be visible when it is consume and produce mode 'consume_lines': fields.one2many('mrp.product.produce.line', 'produce_id', 'Products Consumed'), 'tracking': fields.related('product_id', 'tracking', type='selection', selection=[('serial', 'By Unique Serial Number'), ('lot', 'By Lots'), ('none', 'No Tracking')]), } def on_change_qty(self, cr, uid, ids, product_qty, consume_lines, context=None): """ When changing the quantity of products to be produced it will recalculate the number of raw materials needed according to the scheduled products and the already consumed/produced products It will return the consume lines needed for the products to be produced which the user can still adapt """ prod_obj = self.pool.get("mrp.production") uom_obj = self.pool.get("product.uom") production = prod_obj.browse(cr, uid, context['active_id'], context=context) consume_lines = [] new_consume_lines = [] if product_qty > 0.0: product_uom_qty = uom_obj._compute_qty( cr, uid, production.product_uom.id, product_qty, production.product_id.uom_id.id) consume_lines = prod_obj._calculate_qty( cr, uid, production, product_qty=product_uom_qty, context=context) for consume in consume_lines: new_consume_lines.append([0, False, consume]) return {'value': {'consume_lines': new_consume_lines}} def _get_product_qty(self, cr, uid, context=None): """ To obtain product quantity @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: Quantity """ if context is None: context = {} prod = self.pool.get('mrp.production').browse(cr, uid, context['active_id'], context=context) done = 0.0 for move in prod.move_created_ids2: if move.product_id == prod.product_id: if not move.scrapped: done += move.product_uom_qty # As uom of produced products and production order should correspond return prod.product_qty - done def _get_product_id(self, cr, uid, context=None): """ To obtain product id @return: id """ prod = False if context and context.get("active_id"): prod = self.pool.get('mrp.production').browse(cr, uid, context['active_id'], context=context) return prod and prod.product_id.id or False def _get_track(self, cr, uid, context=None): prod = self._get_product_id(cr, uid, context=context) prod_obj = self.pool.get("product.product") return prod and prod_obj.browse(cr, uid, prod, context=context).tracking or 'none' _defaults = { 'product_qty': _get_product_qty, 'mode': lambda *x: 'consume_produce', 'product_id': _get_product_id, 'tracking': _get_track, } def do_produce(self, cr, uid, ids, context=None): production_id = context.get('active_id', False) assert production_id, "Production Id should be specified in context as a Active ID." data = self.browse(cr, uid, ids[0], context=context) self.pool.get('mrp.production').action_produce(cr, uid, production_id, data.product_qty, data.mode, data, context=context) return {}