class res_company(osv.osv): _name = "res.company" _description = 'Companies' _order = 'name' def _get_address_data(self, cr, uid, ids, field_names, arg, context=None): """ Read the 'address' functional fields. """ result = {} part_obj = self.pool.get('res.partner') for company in self.browse(cr, uid, ids, context=context): result[company.id] = {}.fromkeys(field_names, False) if company.partner_id: address_data = part_obj.address_get(cr, openerp.SUPERUSER_ID, [company.partner_id.id], adr_pref=['default']) if address_data['default']: address = part_obj.read(cr, openerp.SUPERUSER_ID, address_data['default'], field_names, context=context) for field in field_names: result[company.id][field] = address[field] or False return result def _set_address_data(self, cr, uid, company_id, name, value, arg, context=None): """ Write the 'address' functional fields. """ company = self.browse(cr, uid, company_id, context=context) if company.partner_id: part_obj = self.pool.get('res.partner') address_data = part_obj.address_get(cr, uid, [company.partner_id.id], adr_pref=['default']) address = address_data['default'] if address: part_obj.write(cr, uid, [address], {name: value or False}, context=context) else: part_obj.create(cr, uid, { name: value or False, 'parent_id': company.partner_id.id }, context=context) return True def _get_logo_web(self, cr, uid, ids, _field_name, _args, context=None): result = dict.fromkeys(ids, False) for record in self.browse(cr, uid, ids, context=context): size = (180, None) result[record.id] = image_resize_image(record.partner_id.image, size) return result def _get_companies_from_partner(self, cr, uid, ids, context=None): return self.pool['res.company'].search(cr, uid, [('partner_id', 'in', ids)], context=context) _columns = { 'name': fields.related('partner_id', 'name', string='Company Name', size=128, required=True, store=True, type='char'), 'parent_id': fields.many2one('res.company', 'Parent Company', select=True), 'child_ids': fields.one2many('res.company', 'parent_id', 'Child Companies'), 'partner_id': fields.many2one('res.partner', 'Partner', required=True), 'rml_header': fields.text('RML Header', required=True), 'rml_header1': fields.char( 'Company Tagline', size=200, help= "Appears by default on the top right corner of your printed documents (report header)." ), 'rml_header2': fields.text('RML Internal Header', required=True), 'rml_header3': fields.text('RML Internal Header for Landscape Reports', required=True), 'rml_footer': fields.text( 'Report Footer', help="Footer text displayed at the bottom of all reports."), 'rml_footer_readonly': fields.related('rml_footer', type='text', string='Report Footer', readonly=True), 'custom_footer': fields.boolean( 'Custom Footer', help= "Check this to define the report footer manually. Otherwise it will be filled in automatically." ), 'font': fields.many2one( 'res.font', string="Font", help= "Set the font into the report header, it will be used as default font in the RML reports of the user company" ), 'logo': fields.related('partner_id', 'image', string="Logo", type="binary"), 'logo_web': fields.function(_get_logo_web, string="Logo Web", type="binary", store={ 'res.company': (lambda s, c, u, i, x: i, ['partner_id'], 10), 'res.partner': (_get_companies_from_partner, ['image'], 10), }), 'currency_id': fields.many2one('res.currency', 'Currency', required=True), 'currency_ids': fields.one2many('res.currency', 'company_id', 'Currency'), 'user_ids': fields.many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', 'Accepted Users'), 'account_no': fields.char('Account No.', size=64), 'street': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street", multi='address'), 'street2': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street2", multi='address'), 'zip': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="Zip", multi='address'), 'city': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="City", multi='address'), 'state_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country.state', string="Fed. State", multi='address'), 'bank_ids': fields.one2many('res.partner.bank', 'company_id', 'Bank Accounts', help='Bank accounts related to this company'), 'country_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country', string="Country", multi='address'), 'email': fields.related('partner_id', 'email', size=64, type='char', string="Email", store=True), 'phone': fields.related('partner_id', 'phone', size=64, type='char', string="Phone", store=True), 'fax': fields.function(_get_address_data, fnct_inv=_set_address_data, size=64, type='char', string="Fax", multi='address'), 'website': fields.related('partner_id', 'website', string="Website", type="char", size=64), 'vat': fields.related('partner_id', 'vat', string="Tax ID", type="char", size=32), 'company_registry': fields.char('Company Registry', size=64), 'paper_format': fields.selection([('a4', 'A4'), ('us_letter', 'US Letter')], "Paper Format", required=True), } _sql_constraints = [('name_uniq', 'unique (name)', 'The company name must be unique !')] def onchange_footer(self, cr, uid, ids, custom_footer, phone, fax, email, website, vat, company_registry, bank_ids, context=None): if custom_footer: return {} # first line (notice that missing elements are filtered out before the join) res = ' | '.join( filter(bool, [ phone and '%s: %s' % (_('Phone'), phone), fax and '%s: %s' % (_('Fax'), fax), email and '%s: %s' % (_('Email'), email), website and '%s: %s' % (_('Website'), website), vat and '%s: %s' % (_('TIN'), vat), company_registry and '%s: %s' % (_('Reg'), company_registry), ])) # second line: bank accounts res_partner_bank = self.pool.get('res.partner.bank') account_data = self.resolve_2many_commands(cr, uid, 'bank_ids', bank_ids, context=context) account_names = res_partner_bank._prepare_name_get(cr, uid, account_data, context=context) if account_names: title = _('Bank Accounts') if len(account_names) > 1 else _( 'Bank Account') res += '\n%s: %s' % (title, ', '.join( name for id, name in account_names)) return {'value': {'rml_footer': res, 'rml_footer_readonly': res}} def onchange_state(self, cr, uid, ids, state_id, context=None): if state_id: return { 'value': { 'country_id': self.pool.get('res.country.state').browse( cr, uid, state_id, context).country_id.id } } return {} def onchange_font_name(self, cr, uid, ids, font, rml_header, rml_header2, rml_header3, context=None): """ To change default header style of all <para> and drawstring. """ def _change_header(header, font): """ Replace default fontname use in header and setfont tag """ default_para = re.sub('fontName.?=.?".*"', 'fontName="%s"' % font, header) return re.sub('(<setFont.?name.?=.?)(".*?")(.)', '\g<1>"%s"\g<3>' % font, default_para) if not font: return True fontname = self.pool.get('res.font').browse(cr, uid, font, context=context).name return { 'value': { 'rml_header': _change_header(rml_header, fontname), 'rml_header2': _change_header(rml_header2, fontname), 'rml_header3': _change_header(rml_header3, fontname) } } def on_change_country(self, cr, uid, ids, country_id, context=None): res = {'domain': {'state_id': []}} currency_id = self._get_euro(cr, uid, context=context) if country_id: currency_id = self.pool.get('res.country').browse( cr, uid, country_id, context=context).currency_id.id res['domain'] = {'state_id': [('country_id', '=', country_id)]} res['value'] = {'currency_id': currency_id} return res def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): if context is None: context = {} if context.get('user_preference'): # We browse as superuser. Otherwise, the user would be able to # select only the currently visible companies (according to rules, # which are probably to allow to see the child companies) even if # she belongs to some other companies. user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context) cmp_ids = list( set([user.company_id.id] + [cmp.id for cmp in user.company_ids])) return cmp_ids return super(res_company, self)._search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count, access_rights_uid=access_rights_uid) def _company_default_get(self, cr, uid, object=False, field=False, context=None): """ Check if the object for this company have a default value """ if not context: context = {} proxy = self.pool.get('multi_company.default') args = [ ('object_id.model', '=', object), ('field_id', '=', field), ] ids = proxy.search(cr, uid, args, context=context) user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context) for rule in proxy.browse(cr, uid, ids, context): if eval(rule.expression, {'context': context, 'user': user}): return rule.company_dest_id.id return user.company_id.id @tools.ormcache() def _get_company_children(self, cr, uid=None, company=None): if not company: return [] ids = self.search(cr, uid, [('parent_id', 'child_of', [company])]) return ids def _get_partner_hierarchy(self, cr, uid, company_id, context=None): if company_id: parent_id = self.browse(cr, uid, company_id)['parent_id'] if parent_id: return self._get_partner_hierarchy(cr, uid, parent_id.id, context) else: return self._get_partner_descendance(cr, uid, company_id, [], context) return [] def _get_partner_descendance(self, cr, uid, company_id, descendance, context=None): descendance.append(self.browse(cr, uid, company_id).partner_id.id) for child_id in self._get_company_children(cr, uid, company_id): if child_id != company_id: descendance = self._get_partner_descendance( cr, uid, child_id, descendance) return descendance # # This function restart the cache on the _get_company_children method # def cache_restart(self, cr): self._get_company_children.clear_cache(self) def create(self, cr, uid, vals, context=None): if not vals.get('name', False) or vals.get('partner_id', False): self.cache_restart(cr) return super(res_company, self).create(cr, uid, vals, context=context) obj_partner = self.pool.get('res.partner') partner_id = obj_partner.create(cr, uid, { 'name': vals['name'], 'is_company': True, 'image': vals.get('logo', False) }, context=context) vals.update({'partner_id': partner_id}) self.cache_restart(cr) company_id = super(res_company, self).create(cr, uid, vals, context=context) obj_partner.write(cr, uid, [partner_id], {'company_id': company_id}, context=context) return company_id def write(self, cr, uid, ids, values, context=None): self.cache_restart(cr) return super(res_company, self).write(cr, uid, ids, values, context=context) def _get_euro(self, cr, uid, context=None): rate_obj = self.pool.get('res.currency.rate') rate_id = rate_obj.search(cr, uid, [('rate', '=', 1)], context=context) return rate_id and rate_obj.browse( cr, uid, rate_id[0], context=context).currency_id.id or False def _get_logo(self, cr, uid, ids): return open( os.path.join(tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb').read().encode('base64') def _get_font(self, cr, uid, ids): font_obj = self.pool.get('res.font') res = font_obj.search(cr, uid, [('family', '=', 'Helvetica'), ('mode', '=', 'all')], limit=1) return res and res[0] or False _header = """ <header> <pageTemplate> <frame id="first" x1="28.0" y1="28.0" width="%s" height="%s"/> <stylesheet> <!-- Set here the default font to use for all <para> tags --> <paraStyle name='Normal' fontName="DejaVuSans"/> </stylesheet> <pageGraphics> <fill color="black"/> <stroke color="black"/> <setFont name="DejaVuSans" size="8"/> <drawString x="%s" y="%s"> [[ formatLang(time.strftime("%%Y-%%m-%%d"), date=True) ]] [[ time.strftime("%%H:%%M") ]]</drawString> <setFont name="DejaVuSans-Bold" size="10"/> <drawCentredString x="%s" y="%s">[[ company.partner_id.name ]]</drawCentredString> <stroke color="#000000"/> <lines>%s</lines> <!-- Set here the default font to use for all <drawString> tags --> <!-- don't forget to change the 2 other occurence of <setFont> above if needed --> <setFont name="DejaVuSans" size="8"/> </pageGraphics> </pageTemplate> </header>""" _header2 = _header % (539, 772, "1.0cm", "28.3cm", "11.1cm", "28.3cm", "1.0cm 28.1cm 20.1cm 28.1cm") _header3 = _header % (786, 525, 25, 555, 440, 555, "25 550 818 550") def _get_header(self, cr, uid, ids): try: header_file = tools.file_open( os.path.join('base', 'report', 'corporate_rml_header.rml')) try: return header_file.read() finally: header_file.close() except: return self._header_a4 _header_main = """ <header> <pageTemplate> <frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/> <stylesheet> <!-- Set here the default font to use for all <para> tags --> <paraStyle name='Normal' fontName="DejaVuSans"/> <paraStyle name="main_footer" fontSize="8.0" alignment="CENTER"/> <paraStyle name="main_header" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> </stylesheet> <pageGraphics> <!-- Set here the default font to use for all <drawString> tags --> <setFont name="DejaVuSans" size="8"/> <!-- You Logo - Change X,Y,Width and Height --> <image x="1.3cm" y="%s" height="40.0" >[[ company.logo or removeParentNode('image') ]]</image> <fill color="black"/> <stroke color="black"/> <!-- page header --> <lines>1.3cm %s 20cm %s</lines> <drawRightString x="20cm" y="%s">[[ company.rml_header1 ]]</drawRightString> <drawString x="1.3cm" y="%s">[[ company.partner_id.name ]]</drawString> <place x="1.3cm" y="%s" height="1.8cm" width="15.0cm"> <para style="main_header">[[ display_address(company.partner_id) or '' ]]</para> </place> <drawString x="1.3cm" y="%s">Phone:</drawString> <drawRightString x="7cm" y="%s">[[ company.partner_id.phone or '' ]]</drawRightString> <drawString x="1.3cm" y="%s">Mail:</drawString> <drawRightString x="7cm" y="%s">[[ company.partner_id.email or '' ]]</drawRightString> <lines>1.3cm %s 7cm %s</lines> <!-- left margin --> <rotate degrees="90"/> <fill color="grey"/> <drawString x="2.65cm" y="-0.4cm">generated by OpenERP.com</drawString> <fill color="black"/> <rotate degrees="-90"/> <!--page bottom--> <lines>1.2cm 2.65cm 19.9cm 2.65cm</lines> <place x="1.3cm" y="0cm" height="2.55cm" width="19.0cm"> <para style="main_footer">[[ company.rml_footer ]]</para> <para style="main_footer">Contact : [[ user.name ]] - Page: <pageNumber/></para> </place> </pageGraphics> </pageTemplate> </header>""" _header_a4 = _header_main % ( '21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm') _header_letter = _header_main % ( '20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm') def onchange_paper_format(self, cr, uid, ids, paper_format, context=None): if paper_format == 'us_letter': return {'value': {'rml_header': self._header_letter}} return {'value': {'rml_header': self._header_a4}} def act_discover_fonts(self, cr, uid, ids, context=None): return self.pool.get("res.font").font_scan(cr, uid, context=context) _defaults = { 'currency_id': _get_euro, 'paper_format': 'a4', 'rml_header': _get_header, 'rml_header2': _header2, 'rml_header3': _header3, 'logo': _get_logo, 'font': _get_font, } _constraints = [ (osv.osv._check_recursion, 'Error! You can not create recursive companies.', ['parent_id']) ]
class hr_rpt_attend_emp_day(osv.osv): _name = "hr.rpt.attend.emp.day" _description = "HR Attendance Employee Daily Report" _columns = { 'name': fields.char('Report Name', size=32, required=False), 'title': fields.char('Report Title', required=False), 'type': fields.char('Report Type', size=16, required=True), 'company_id': fields.many2one('res.company', 'Company', required=True), #report data lines 'rpt_lines': fields.one2many('hr.rpt.attend.emp.day.line', 'rpt_id', string='Report Line'), 'date_from': fields.datetime("Start Date", required=True), 'date_to': fields.datetime("End Date", required=True), 'emp_ids': fields.many2many('hr.employee', string='Selected Employees'), 'state': fields.selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('cancel', 'Cancel'), ], 'Status', select=True, readonly=True, track_visibility='onchange'), 'note': fields.text('Description', readonly=False, states={'done': [('readonly', True)]}), 'attend_month_ids': fields.one2many('hr.rpt.attend.month', 'attend_day_id', string='Attendances Monthly', readonly=True), } _defaults = { 'type': 'attend_emp_day', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'account.rptcn', context=c), 'state': 'draft', } def copy(self, cr, uid, id, default=None, context=None): if not default: default = {} default['attend_month_ids'] = None default['rpt_lines'] = None return super(hr_rpt_attend_emp_day, self).copy(cr, uid, id, default, context) def default_get(self, cr, uid, fields, context=None): vals = super(hr_rpt_attend_emp_day, self).default_get(cr, uid, fields, context=context) if 'date_from' in fields: #For the datetime value in defaults, need convert the local time to UTC, the web framework will convert them back to local time on GUI date_from = datetime.strptime(time.strftime('%Y-%m-01 00:00:00'), '%Y-%m-%d %H:%M:%S') date_from_utc = utils.utc_timestamp(cr, uid, date_from, context) vals.update({ 'date_from': date_from_utc.strftime(DEFAULT_SERVER_DATETIME_FORMAT) }) if 'date_to' in fields: date_to = datetime.strptime(time.strftime('%Y-%m-%d 23:59:59'), '%Y-%m-%d %H:%M:%S') date_to_utc = utils.utc_timestamp(cr, uid, date_to, context) vals.update({ 'date_to': date_to_utc.strftime(DEFAULT_SERVER_DATETIME_FORMAT) }) return vals def _check_dates(self, cr, uid, ids, context=None): for wiz in self.browse(cr, uid, ids, context=context): if wiz.date_from and wiz.date_to and wiz.date_from > wiz.date_to: return False return True _constraints = [ (_check_dates, 'The date end must be after the date start.', ['date_from', 'date_to']), ] def get_report_name(self, cr, uid, id, rpt_name, context=None): return "Attendance Employee Daily Report" def name_get(self, cr, uid, ids, context=None): if not ids: return [] if isinstance(ids, (int, long)): ids = [ids] res = [] for row in self.read(cr, uid, ids, ['name'], context=context): res.append((row['id'], '[%s]%s' % (row['id'], row['name']))) return res def _convert_save_dates(self, cr, uid, vals, context): #convert to the date like '2013-01-01' to UTC datetime to store if 'date_from' in vals and len(vals['date_from']) == 10: date_from = vals['date_from'] date_from = utils.utc_timestamp( cr, uid, datetime.strptime(date_from + ' 00:00:00', DEFAULT_SERVER_DATETIME_FORMAT), context=context) date_from = date_from.strftime(DEFAULT_SERVER_DATETIME_FORMAT) vals['date_from'] = date_from if 'date_to' in vals and len(vals['date_to']) == 10: date_to = vals['date_to'] date_to = utils.utc_timestamp(cr, uid, datetime.strptime( date_to + ' 23:59:59', DEFAULT_SERVER_DATETIME_FORMAT), context=context) date_to = date_to.strftime(DEFAULT_SERVER_DATETIME_FORMAT) vals['date_to'] = date_to def create(self, cr, uid, vals, context=None): if 'name' not in vals or not vals['name']: date_to = vals['date_to'] if date_to and len(date_to) == 10: date_to = vals['date_to'] + ' 00:00:00' date_to = datetime.strptime(date_to, DEFAULT_SERVER_DATETIME_FORMAT) name = '%s-%s' % (date_to.year, date_to.month) vals['name'] = name self._convert_save_dates(cr, uid, vals, context) id_new = super(hr_rpt_attend_emp_day, self).create(cr, uid, vals, context=context) return id_new def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] self._convert_save_dates(cr, uid, vals, context) old_emp_ids = [] if 'emp_ids' in vals: old_emp_ids = self.read(cr, uid, ids[0], ['emp_ids'], context=context)['emp_ids'] resu = super(hr_rpt_attend_emp_day, self).write(cr, uid, ids, vals, context=context) new_emp_ids = self.read(cr, uid, ids[0], ['emp_ids'], context=context)['emp_ids'] if old_emp_ids: del_emp_ids = [] if new_emp_ids: for emp_id in old_emp_ids: if not emp_id in new_emp_ids: del_emp_ids.append(emp_id) else: del_emp_ids = old_emp_ids #unlink report line of deleted employees if del_emp_ids: rpt_line_obj = self.pool.get('hr.rpt.attend.emp.day.line') unlink_line_ids = rpt_line_obj.search( cr, uid, [('rpt_id', '=', ids[0]), ('emp_id', 'in', del_emp_ids)]) rpt_line_obj.unlink(cr, uid, unlink_line_ids, context=context) return resu def unlink(self, cr, uid, ids, context=None): for rpt in self.read(cr, uid, ids, ['state'], context=context): if rpt['state'] not in ('draft', 'cancel'): raise osv.except_osv( _('Error'), _('Only order with Draft/Cancel state can be delete!')) return super(hr_rpt_attend_emp_day, self).unlink(cr, uid, ids, context=context) def action_confirm(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'confirmed'}) return True def action_cancel_draft(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'draft'}) return True def action_cancel(self, cr, uid, ids, context=None): for rpt in self.browse(cr, uid, ids, context=context): if rpt.attend_month_ids: for attend_month in rpt.attend_month_ids: if attend_month.state != 'cancel': raise osv.except_osv( _('Error!'), _('There are related monthly attendance report, please cancel or delete them first!' )) self.write(cr, uid, ids, {'state': 'cancel'}) return True #generate a new monthly report def new_attend_month(self, cr, uid, ids, context=None): rpt_id = ids[0] #read daily report data, create new monthly report based on it. rpt = self.browse(cr, uid, rpt_id, context=context) rpt_month_obj = self.pool.get('hr.rpt.attend.month') vals = { 'date_from': rpt.date_from, 'date_to': rpt.date_to, 'emp_ids': [(4, emp.id) for emp in rpt.emp_ids], 'company_id': rpt.company_id.id, 'attend_day_id': rpt.id } rpt_month_id = rpt_month_obj.create(cr, uid, vals, context=context) #generate report rpt_month_obj.run_report(cr, uid, [rpt_month_id], context=context) #go to the attendances monthly report view page form_view = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'dmp_hr_attend', 'hr_rpt_attend_month_view') form_view_id = form_view and form_view[1] or False return { 'name': _('Attendances Monthly Report'), 'view_type': 'form', 'view_mode': 'form', 'view_id': [form_view_id], 'res_model': 'hr.rpt.attend.month', 'type': 'ir.actions.act_window', 'target': 'current', 'res_id': rpt_month_id, } #view monthly report def view_attend_month(self, cr, uid, ids, context=None): rpt_id = ids[0] #read daily report data, create new monthly report based on it. rpt = self.read(cr, uid, rpt_id, ['attend_month_ids'], context=context) rpt_month_ids = rpt['attend_month_ids'] if not rpt_month_ids: raise osv.except_osv(_('Error!'), _('No monthly attendance report generated!')) if len(rpt_month_ids) > 1: #got to list page act_id = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'dmp_hr_attend', 'hr_rpt_attend_month_action') act_id = act_id and act_id[1] or False act_win = self.pool.get('ir.actions.act_window').read( cr, uid, act_id, [], context=context) act_win['context'] = {'search_default_attend_day_id': rpt['id']} return act_win else: #go to form page form_view = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'dmp_hr_attend', 'hr_rpt_attend_month_view') form_view_id = form_view and form_view[1] or False return { 'name': _('Attendances Monthly Report'), 'view_type': 'form', 'view_mode': 'form', 'view_id': [form_view_id], 'res_model': 'hr.rpt.attend.month', 'type': 'ir.actions.act_window', 'target': 'current', 'res_id': rpt_month_ids[0], } def _attend_hours(self, hours_valid, period): if hours_valid + 0.5 >= period.hours_work_normal: hours_normal = period.hours_work_normal else: hours_normal = hours_valid hours_ot = hours_valid - hours_normal if hours_ot + 0.5 >= period.hours_work_ot: hours_ot = period.hours_work_ot #the second time group if hours_valid + 0.5 >= period.hours_work_normal2: hours_normal2 = period.hours_work_normal2 else: hours_normal2 = hours_valid hours_ot2 = hours_valid - hours_normal2 if hours_ot2 + 0.5 >= period.hours_work_ot2: hours_ot2 = period.hours_work_ot2 return hours_normal, hours_ot, hours_normal2, hours_ot2 def run_report(self, cr, uid, ids, context=None, emp_ids=None): rpt = self.browse(cr, uid, ids, context=context)[0] if not rpt.emp_ids: raise osv.except_osv( _('Warning!'), _('Please select employees to get attendance!')) rpt_method = getattr(self, 'run_%s' % (rpt.type, )) #get report data rpt_line_obj, rpt_lns = rpt_method(cr, uid, ids, context, emp_ids=emp_ids) #remove the old lines unlink_domain = [('rpt_id', '=', rpt.id)] if emp_ids: unlink_domain.append(('emp_id', 'in', emp_ids)) unlink_ids = rpt_line_obj.search(cr, uid, unlink_domain, context=context) rpt_line_obj.unlink(cr, uid, unlink_ids, context=context) #create new lines for rpt_line in rpt_lns: rpt_line['rpt_id'] = rpt.id rpt_line_obj.create(cr, uid, rpt_line, context=context) #update GUI elements self.write(cr, uid, rpt.id, { 'show_search': False, 'show_result': True, 'save_pdf': True }, context=context) #go to the attendances line view page form_view = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'dmp_hr_attend', 'view_hr_rpt_attend_emp_day_line_tree') form_view_id = form_view and form_view[1] or False return { 'name': _('Attendances Daily Report Line'), 'view_type': 'form', 'view_mode': 'tree', 'view_id': [form_view_id], 'res_model': 'hr.rpt.attend.emp.day.line', 'type': 'ir.actions.act_window', 'target': 'current', 'domain': [('rpt_id', '=', ids[0])], # 'context': {'search_default_groupby_emp':True}, } return True def run_attend_emp_day(self, cr, uid, ids, context=None, emp_ids=None): ''' 1.Query all data with both in/out by date range, store the result in attends_normal 2.Loop on by days and employees ''' emp_obj = self.pool.get('hr.employee') attend_obj = self.pool.get('hr.attendance') if context is None: context = {} rpt = self.browse(cr, uid, ids, context=context)[0] date_from = datetime.strptime(rpt.date_from, DEFAULT_SERVER_DATETIME_FORMAT) date_to = datetime.strptime(rpt.date_to, DEFAULT_SERVER_DATETIME_FORMAT) #report data line rpt_lns = [] #context for the query c = context.copy() #get the employees if not emp_ids: emp_ids = [emp.id for emp in rpt.emp_ids] if not emp_ids: emp_ids = emp_obj.search(cr, uid, [], context=context) #sort the employee ids emp_ids.sort() ''' 1.Query all data with both in/out by date range, store the result in attends_normal ''' sql = ''' select emp.id as emp_id, period.id as period_id, sign_in.day, sign_in.action as in_action, sign_in.name as in_time, sign_out.action out_action, sign_out.name out_time from hr_employee emp left join (select name,employee_id,cale_period_id,action,day from hr_attendance where name between %s and %s and action in('sign_in','sign_in_late')) as sign_in on emp.id = sign_in.employee_id left join (select name,employee_id,cale_period_id,action,day from hr_attendance where name between %s and %s and action in('sign_out','sign_out_early')) as sign_out on emp.id = sign_out.employee_id and sign_in.day = sign_out.day and sign_in.cale_period_id = sign_out.cale_period_id join resource_calendar_attendance period on sign_in.cale_period_id = period.id and sign_out.cale_period_id = period.id where emp.id = ANY(%s) ''' cr.execute(sql, (date_from, date_to, date_from, date_to, (emp_ids, ))) attends = cr.dictfetchall() #use the emp_id-day-period_id as the key to store the normal attendance # attends_normal = dict(('%s-%s-%s'%(attend['emp_id'], attend['day'], attend['period_id']), attend) for attend in attends) attends_normal = {} for attend in attends: key = '%s-%s-%s' % (attend['emp_id'], attend['day'], attend['period_id']) in_time = fields.datetime.context_timestamp( cr, uid, datetime.strptime(attend['in_time'], DEFAULT_SERVER_DATETIME_FORMAT), context=context) out_time = fields.datetime.context_timestamp( cr, uid, datetime.strptime(attend['out_time'], DEFAULT_SERVER_DATETIME_FORMAT), context=context) attend['in_time'] = in_time attend['out_time'] = out_time attends_normal[key] = attend ''' 2.Loop on by days and employees ''' date_from_local = fields.datetime.context_timestamp( cr, uid, date_from, context) date_to_local = fields.datetime.context_timestamp( cr, uid, date_to, context) days = rrule.rrule(rrule.DAILY, dtstart=date_from_local, until=date_to_local) emps = emp_obj.browse(cr, uid, emp_ids, context) seq = 0 for emp in emps: for day_dt in days: emp_cale = emp_obj.get_wt(cr, uid, emp.id, day_dt, context=context) day = day_dt.strftime('%Y-%m-%d') #if there is no working time defined to employee then continue to next employee directly if not emp_cale or not emp_cale.attendance_ids: seq += 1 ''' init a new empty line by employee/day without period info ''' rpt_line = { 'seq': seq, 'emp_id': emp.id, 'day': day_dt, 'period_id': None, 'sign_in': None, 'sign_out': None, 'hours_normal': None, 'hours_ot': None, 'is_late': False, 'is_early': False, 'is_absent': False, 'hours_normal2': None, 'hours_ot2': None, } rpt_lns.append(rpt_line) continue for period in emp_cale.attendance_ids: if day_dt.isoweekday() != (int(period.dayofweek) + 1): continue ''' init a new empty line by employee/day/period ''' seq += 1 rpt_line = { 'seq': seq, 'emp_id': emp.id, 'day': day_dt, 'period_id': period.id, 'sign_in': None, 'sign_out': None, 'hours_normal': None, 'hours_ot': None, 'is_late': False, 'is_early': False, 'is_absent': False, 'hours_normal2': None, 'hours_ot2': None, } rpt_lns.append(rpt_line) #find the normal attendance by employee/day/period attend_key = '%s-%s-%s' % (emp.id, day, period.id) attend = attends_normal.get(attend_key, False) if attend: #found the normal attendance, with sign in and out record, put the data directly hour_in = attend[ 'in_time'].hour + attend['in_time'].minute / 60.0 hour_out = attend[ 'out_time'].hour + attend['out_time'].minute / 60.0 hours_valid = hour_out - hour_in - period.hours_non_work attend_hours = self._attend_hours(hours_valid, period) rpt_line.update({ 'period_id': period.id, 'sign_in': hour_in, 'sign_out': hour_out, 'hours_normal': attend_hours[0], 'hours_ot': attend_hours[1], 'is_late': attend['in_action'] == 'sign_in_late', 'is_early': attend['out_action'] == 'sign_out_early', 'hours_normal2': attend_hours[2], 'hours_ot2': attend_hours[3], }) continue #the abnormal attendance, with sign in or out record only, or without any attendance attend_ids = attend_obj.search( cr, uid, [('employee_id', '=', emp.id), ('day', '=', day), ('cale_period_id', '=', period.id), ('action', 'in', ('sign_in', 'sign_in_late', 'sign_out', 'sign_out_early'))], context=context) if attend_ids: #found sign in or sign out data, there shoule be only one record, so use the first ID to get data attend = attend_obj.browse(cr, uid, attend_ids[0], context=context) attend_time = fields.datetime.context_timestamp( cr, uid, datetime.strptime(attend.name, DEFAULT_SERVER_DATETIME_FORMAT), context) hour_in = None hour_out = None hours_valid = None hours_normal = None hours_ot = None is_late = False is_early = False is_absent = False hours_normal2 = None hours_ot2 = None #Only have sign in record if attend.action in ('sign_in', 'sign_in_late'): hour_in = attend_time.hour + attend_time.minute / 60.0 if emp_cale.no_out_option == 'early': #treat as leave early if not period.is_full_ot: is_early = True hours_valid = period.hour_to - hour_in - period.hours_non_work - emp_cale.no_out_time / 60.0 else: #treat as absent if not period.is_full_ot: is_absent = True hours_valid = 0.0 #Only have sign out record if attend.action in ('sign_out', 'sign_out_early'): hour_out = attend_time.hour + attend_time.minute / 60.0 if emp_cale.no_in_option == 'late': #treat as leave early if not period.is_full_ot: is_late = True hours_valid = hour_out - period.hour_from - period.hours_non_work - emp_cale.no_in_time / 60.0 else: #treat as absent if not period.is_full_ot: is_absent = True hours_valid = 0.0 if hours_valid: hours_normal, hours_ot, hours_normal2, hours_ot2 = self._attend_hours( hours_valid, period) rpt_line.update({ 'period_id': period.id, 'sign_in': hour_in, 'sign_out': hour_out, 'hours_normal': hours_normal, 'hours_ot': hours_ot, 'is_late': is_late, 'is_early': is_early, 'is_absent': is_absent, 'hours_normal2': hours_normal2, 'hours_ot2': hours_ot2, }) else: if not period.is_full_ot: rpt_line.update({'is_absent': True}) '''========return data to rpt_base.run_report()=========''' return self.pool.get('hr.rpt.attend.emp.day.line'), rpt_lns def _pdf_data(self, cr, uid, ids, form_data, context=None): return {'xmlrpt_name': 'hr.rpt.attend.emp.day'} def save_pdf(self, cr, uid, ids, context=None): if context is None: context = {} form_data = self.read(cr, uid, ids[0], context=context) rptxml_name = self._pdf_data(cr, uid, ids[0], form_data, context=context)['xmlrpt_name'] datas = { 'model': self._name, 'ids': [ids[0]], 'form': form_data, } return { 'type': 'ir.actions.report.xml', 'report_name': rptxml_name, 'datas': datas, 'nodestroy': True } def print_empday_group(self, cr, uid, ids, context=None, rpt_line_ids=None): if context is None: context = {} ''' store the groups in dict: {key:{ val1,val2,..., #lines:{key:line_data} {} valn } } ''' groups = {} #store the calendar worktime types in dict:{calendar_id:type_list} cale_wt_types = {} #get the group data rptlines = [] if not rpt_line_ids: #call from self rpt = self.browse(cr, uid, ids[0], context=context) rptlines = rpt.rpt_lines else: #this parameter will be called from hr_rpt_attend_emp_day_line.print_empday_line_group() rptlines = self.pool.get('hr.rpt.attend.emp.day.line').browse( cr, uid, rpt_line_ids, context=context) rpt = rptlines[0].rpt_id ids = [rpt.id] #handle the attend month report parameter attend_month_id = context.get('attend_month_id', None) emp_attend_month_lines = {} if attend_month_id: attend_month_line_obj = self.pool.get('hr.rpt.attend.month.line') attend_month_line_ids = attend_month_line_obj.search( cr, uid, [('rpt_id', '=', attend_month_id)], context=context) emp_ids = attend_month_line_obj.read(cr, uid, attend_month_line_ids, ['emp_id']) emp_attend_month_lines = dict( (item['emp_id'][0], item['id']) for item in emp_ids) for rpt_line in rptlines: #if from attend month report, only print the employees in the attendance report if attend_month_id and not emp_attend_month_lines.get( rpt_line.emp_id.id): continue key_group = '[%s]%s' % (rpt_line.emp_id.emp_code, rpt_line.emp_id.name) if not groups.get(key_group): #Add the attendance data cale_id = rpt_line.period_id.calendar_id.id worktime_types = cale_wt_types.get(cale_id) if not worktime_types and cale_id: sql = 'select distinct b.id,b.sequence,b.name \ from resource_calendar_attendance a \ join hr_worktime_type b on a.type_id = b.id \ where a.calendar_id=%s \ order by b.sequence' cr.execute(sql, (cale_id, )) worktime_types = cr.dictfetchall() cale_wt_types[cale_id] = worktime_types #set the group values group_vals = { 'name': key_group, 'emp_id': rpt_line.emp_id.id, 'date_from': rpt.date_from, 'date_to': rpt.date_to, 'period_type_a_id': (worktime_types and len(worktime_types) >= 1) and worktime_types[0]['id'] or None, 'period_type_b_id': (worktime_types and len(worktime_types) >= 2) and worktime_types[1]['id'] or None, 'period_type_c_id': (worktime_types and len(worktime_types) >= 3) and worktime_types[2]['id'] or None, 'line_ids_dict': {} } #add the attend month line link id if attend_month_id: group_vals[ 'attend_month_line_id'] = emp_attend_month_lines.get( group_vals['emp_id']) groups[key_group] = group_vals #append this line group_vals = groups.get(key_group) #get the group line values in dict group_lines = group_vals['line_ids_dict'] key_group_line = rpt_line.day if not group_lines.get(key_group_line): group_lines[key_group_line] = { 'day': rpt_line.day, 'weekday': rpt_line.p_weekday, 'seq': 0 } #add current data group_line = group_lines[key_group_line] #set the different attendance work time fields by the line data if group_vals.get( 'period_type_a_id' ) and rpt_line.period_id.type_id.id == group_vals[ 'period_type_a_id']: group_line['sign_in_a'] = rpt_line.sign_in group_line['sign_out_a'] = rpt_line.sign_out group_line['hours_normal_a'] = rpt_line.hours_normal group_line['hours_ot_a'] = rpt_line.hours_ot group_line['seq'] = rpt_line.seq if group_vals.get( 'period_type_b_id' ) and rpt_line.period_id.type_id.id == group_vals[ 'period_type_b_id']: group_line['sign_in_b'] = rpt_line.sign_in group_line['sign_out_b'] = rpt_line.sign_out group_line['hours_normal_b'] = rpt_line.hours_normal group_line['hours_ot_b'] = rpt_line.hours_ot if group_vals.get( 'period_type_c_id' ) and rpt_line.period_id.type_id.id == group_vals[ 'period_type_c_id']: group_line['sign_in_c'] = rpt_line.sign_in group_line['sign_out_c'] = rpt_line.sign_out group_line['hours_normal_c'] = rpt_line.hours_normal group_line['hours_ot_c'] = rpt_line.hours_ot #sum and create groups data to DB group_ids = [] attend_empday_group_obj = self.pool.get('attend.empday.group') group_list = groups.values() group_list.sort(lambda x, y: cmp(x['name'], y['name'])) for group in group_list: group_lines_list = [] work_hours = 0 work_hours_ot = 0 for line in group['line_ids_dict'].values(): line['hours_normal_total'] = line.get( 'hours_normal_a', 0) + line.get( 'hours_normal_b', 0) + line.get('hours_normal_c', 0) line['hours_ot_total'] = line.get('hours_ot_a', 0) + line.get( 'hours_ot_b', 0) + line.get('hours_ot_c', 0) work_hours += line['hours_normal_total'] work_hours_ot += line['hours_ot_total'] group_lines_list.append((0, 0, line)) group_lines_list.sort(lambda x, y: cmp(x[2]['seq'], y[2]['seq'])) group['line_ids'] = group_lines_list group['days_attend'] = work_hours / 8.0 group['hours_ot'] = work_hours_ot group_ids.append( attend_empday_group_obj.create(cr, uid, group, context=context)) #print attendances by group if not group_ids: return {'type': 'ir.actions.act_window_close'} #return report action datas = { 'model': 'attend.empday.group', 'ids': group_ids, } context.update({ 'active_model': 'attend.empday.group', 'active_ids': group_ids }) rpt_action = { 'type': 'ir.actions.report.xml', 'report_name': 'attend.empday.group', 'datas': datas, 'nodestroy': True, 'context': context } return rpt_action
for module in self.browse(cr, uid, ids, context=context): path = addons.get_module_resource(module.name, 'static', 'src', 'img', 'icon.png') if path: image_file = tools.file_open(path, 'rb') try: res[module.id] = image_file.read().encode('base64') finally: image_file.close() return res _columns = { 'name': fields.char("Technical Name", size=128, readonly=True, required=True, select=True), 'category_id': fields.many2one('ir.module.category', 'Category', readonly=True, select=True), 'shortdesc': fields.char('Module Name', size=64, readonly=True, translate=True), 'summary': fields.char('Summary', size=64, readonly=True, translate=True), 'description': fields.text("Description", readonly=True, translate=True), 'description_html': fields.function(_get_desc, string='Description HTML', type='html', method=True, readonly=True), 'author': fields.char("Author", size=128, readonly=True), 'maintainer': fields.char('Maintainer', size=128, readonly=True), 'contributors': fields.text('Contributors', readonly=True), 'website': fields.char("Website", size=256, readonly=True), # attention: Incorrect field names !! # installed_version refers the latest version (the one on disk) # latest_version refers the installed version (the one in database) # published_version refers the version available on the repository 'installed_version': fields.function(_get_latest_version, string='Latest Version', type='char'), 'latest_version': fields.char('Installed Version', size=64, readonly=True), 'published_version': fields.char('Published Version', size=64, readonly=True), 'url': fields.char('URL', size=128, readonly=True),
class kg_gate_pass(osv.osv): _name = "kg.gate.pass" _description = "KG Gate Pass" _order = "date desc" _columns = { ## Basic Info 'name': fields.char('Gate Pass No', size=128, readonly=True), 'date': fields.date('Gate Pass Date', readonly=True, states={'draft': [('readonly', False)]}, required=True), 'note': fields.text('Remarks', readonly=False, states={ 'confirmed': [('readonly', False)], 'done': [('readonly', False)] }), 'state': fields.selection([('draft', 'Draft'), ('confirmed', 'WFA'), ('done', 'Delivered'), ('cancel', 'Cancelled'), ('reject', 'Rejected')], 'Out Status', readonly=True), 'entry_mode': fields.selection([('auto', 'Auto'), ('manual', 'Manual')], 'Entry Mode', readonly=True), 'remark': fields.text('Remarks'), ## Module Requirement Info 'dep_id': fields.many2one('kg.depmaster', 'Source Location', select=True, readonly=True, states={'draft': [('readonly', False)]}), 'return_date': fields.date('Expected Return Date', readonly=True, states={'draft': [('readonly', False)]}, required=True), 'partner_id': fields.many2one('res.partner', 'Supplier', domain="[('supplier','=',True)]"), 'out_type': fields.many2one('kg.outwardmaster', 'OutwardType', domain="[('state','not in',(''reject),'cancel')]"), 'origin': fields.many2one('kg.service.indent', 'Origin', readonly=True), 'in_state': fields.selection([('pending', 'Pending'), ('partial', 'Partial'), ('done', 'Received'), ('cancel', 'Cancelled')], 'In Status', readonly=True), #~ 'in_state': fields.selection([('open', 'OPEN'),('pending', 'Pending'), ('done', 'Received'), ('cancel', 'Cancelled')], 'In Status',readonly=True), 'si_indent_ids': fields.many2many( 'kg.service.indent.line', 's_indent_gp_entry', 'si_id', 'gp_id', 'Service Indent Lines', domain= "[('service_id.state','=','approved'), '&', ('pending_qty','>','0')]", readonly=True, states={'draft': [('readonly', False)]}), 'indent_flag': fields.boolean('Indent'), 'transport': fields.char('Transport', size=128, readonly=True, states={'draft': [('readonly', False)]}), 'transport_id': fields.many2one('kg.transport', 'Transport', readonly=True, states={ 'draft': [('readonly', False)], 'confirmed': [('readonly', False)] }), 'taken_by': fields.char('Taken By', size=128, readonly=True, states={'draft': [('readonly', False)]}), 'received_by': fields.char('Received By', size=128, readonly=True, states={'draft': [('readonly', False)]}), 'project': fields.char('Project', size=100, readonly=True, states={'draft': [('readonly', False)]}), 'division': fields.char('Division', size=100, readonly=True, states={'draft': [('readonly', False)]}), 'confirm_flag': fields.boolean('Confirm Flag'), 'approve_flag': fields.boolean('Expiry Flag'), 'mode': fields.selection([('direct', 'Direct'), ('frm_indent', 'From Indent')], 'Entry Mode', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'gp_type': fields.selection([('from_so', 'From SI'), ('direct', 'Direct')], 'GP Type', readonly=True), 'vehicle_details': fields.char('Vehicle Details', readonly=True, states={'draft': [('readonly', False)]}), 'outward_type': fields.selection([('in_reject', 'Inward Rejection'), ('service', 'Send For Service'), ('in_transfer', 'Internal Transfer')], 'Outward Type', readonly=True, states={ 'draft': [('readonly', False)], 'confirmed': [('readonly', False)] }), ## Child Tables Declaration 'gate_line': fields.one2many('kg.gate.pass.line', 'gate_id', 'Gate Pass Line', readonly=True, states={'draft': [('readonly', False)]}, required=True), ## Entry Info 'active': fields.boolean('Active'), 'company_id': fields.many2one('res.company', 'Company Name', readonly=True), 'user_id': fields.many2one('res.users', 'Created By', readonly=True), 'creation_date': fields.datetime('Created Date', readonly=True), 'confirmed_by': fields.many2one('res.users', 'Confirmed By', readonly=True, select=True), 'confirmed_date': fields.datetime('Confirmed Date', readonly=True), 'reject_date': fields.datetime('Rejected Date', readonly=True), 'rej_user_id': fields.many2one('res.users', 'Rejected By', readonly=True), 'approved_by': fields.many2one('res.users', 'Approved By', readonly=True, select=True), 'approved_date': fields.datetime('Approved Date', readonly=True), 'cancel_date': fields.datetime('Cancelled Date', readonly=True), 'cancel_user_id': fields.many2one('res.users', 'Cancelled By', readonly=True), 'update_date': fields.datetime('Last Updated Date', readonly=True), 'update_user_id': fields.many2one('res.users', 'Last Updated By', readonly=True), 'cancel_remarks': fields.text('Cancel Remarks'), } _defaults = { 'creation_date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'date': lambda *a: time.strftime('%Y-%m-%d'), 'state': 'draft', 'name': '', 'user_id': lambda self, cr, uid, c: self.pool.get('res.users').browse( cr, uid, uid, c).id, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'kg.gate.pass', context=c), 'in_state': 'pending', 'indent_flag': False, 'gp_type': 'from_so', 'active': True, 'mode': 'direct', 'entry_mode': 'manual', } def _check_lineitems(self, cr, uid, ids, context=None): entry = self.browse(cr, uid, ids[0]) if entry.entry_mode == 'manual' and entry.mode == 'direct' or entry.indent_flag == True: if not entry.gate_line: return False return True _constraints = [ (_check_lineitems, 'System not allow to save with empty Details !!', ['']), ] def onchange_return_date(self, cr, uid, ids, return_date, context=None): today = date.today() today = str(today) today = datetime.strptime(today, '%Y-%m-%d') return_date = str(return_date) return_date = datetime.strptime(return_date, '%Y-%m-%d') if return_date >= today: return False else: raise osv.except_osv( _('Warning!'), _('System not allow to save with past date !!')) def write(self, cr, uid, ids, vals, context=None): vals.update({ 'update_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'update_user_id': uid }) return super(kg_gate_pass, self).write(cr, uid, ids, vals, context) def cancel_entry(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.state == 'done': if not rec.cancel_remarks: raise osv.except_osv(_('Warning'), _('Enter Cancel remark !!')) self.write( cr, uid, ids, { 'state': 'cancel', 'cancel_user_id': uid, 'cancel_date': time.strftime("%Y-%m-%d %H:%M:%S"), }) def reject_entry(self, cr, uid, ids, context=None): rec = self.browse(cr.uid.ids[0]) if rec.state == 'confirmed': if not rec.remark: raise osv.except_osv( _('Rejection remark is must !!'), _('Enter rejection remark in remark field !!')) self.write( cr, uid, ids, { 'state': 'draft', 'rej_user_id': uid, 'reject_date': time.strftime("%Y-%m-%d %H:%M:%S"), }) def confirm_entry(self, cr, uid, ids, context=None): entry = self.browse(cr, uid, ids[0]) if entry.state == 'draft': if not entry.name: seq_id = self.pool.get('ir.sequence').search( cr, uid, [('code', '=', 'kg.gate.pass')]) seq_rec = self.pool.get('ir.sequence').browse( cr, uid, seq_id[0]) cr.execute("""select generatesequenceno(%s,'%s','%s') """ % (seq_id[0], seq_rec.code, entry.date)) seq_name = cr.fetchone() self.write(cr, uid, ids[0], {'name': seq_name[0]}) if entry.mode == 'frm_indent': for line in entry.gate_line: if line.qty > line.indent_qty: raise osv.except_osv( _('Warning!'), _('You cannot increase qty more than indent qty')) self.write( cr, uid, ids[0], { 'state': 'confirmed', 'confirmed_by': uid, 'confirm_flag': True, 'confirmed_date': time.strftime('%Y-%m-%d %H:%M:%S') }) return True def approve_entry(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.state == 'confirmed': #if rec.confirmed_by.id == uid: # raise osv.except_osv( # _('Warning'), # _('Approve cannot be done by Confirmed user')) if rec.mode == 'frm_indent': for line in rec.gate_line: indent_pen_qty = line.indent_qty - line.qty gp_qty = line.qty old_indent_pen_qty = line.si_line_id.pending_qty new_pen_qty = old_indent_pen_qty - gp_qty line.si_line_id.write({'gate_pending_qty': new_pen_qty}) else: pass rec.write({ 'state': 'done', 'approved_by': uid, 'approve_flag': True, 'approved_date': time.strftime('%Y-%m-%d %H:%M:%S') }) return True def gate_pass_print(self, cr, uid, ids, context=None): #assert len(ids) == 1, 'This option should only be used for a single id at a time' wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'kg.gate.pass', ids[0], 'send_rfq', cr) datas = { 'model': 'kg.gate.pass', 'ids': ids, 'form': self.read(cr, uid, ids[0], context=context), } return { 'type': 'ir.actions.report.xml', 'report_name': 'gate.pass.report', 'datas': datas, 'nodestroy': True } def create_gp_line(self, cr, uid, ids, context=False): indent_line_obj = self.pool.get('kg.service.indent.line') gp_line_obj = self.pool.get('kg.gate.pass.line') prod_obj = self.pool.get('product.product') user_obj = self.pool.get('res.users') line_ids = [] res = {} res['line_ids'] = [] res['indent_flag'] = True obj = self.browse(cr, uid, ids[0]) user_rec = obj.user_id user = user_rec.id if obj.gate_line: for line in obj.gate_line: line.si_line_id.write({'line_state': 'noprocess'}) line_ids = map(lambda x: x.id, obj.gate_line) gp_line_obj.unlink(cr, uid, line_ids) if obj.si_indent_ids: indent_line_ids = map(lambda x: x.id, obj.si_indent_ids) indent_line_browse = indent_line_obj.browse( cr, uid, indent_line_ids) indent_line_browse = sorted(indent_line_browse, key=lambda k: k.product_id.id) groups = [] for key, group in groupby(indent_line_browse, (lambda x: x.product_id.id)): for key, group in groupby(group, lambda x: x.brand_id.id): groups.append(map(lambda r: r, group)) for key, group in enumerate(groups): qty = sum(map(lambda x: float(x.qty), group)) pending_qty = sum(map(lambda x: float(x.pending_qty), group)) #TODO: qty indent_line_ids = map(lambda x: x.id, group) if len(indent_line_ids) > 1: flag = True else: flag = False prod_browse = group[0].product_id brand_id = group[0].brand_id uom = group[0].uom.id or False depindent_id = group[0].id qty = pending_qty remark = group[0].note serial_no = group[0].serial_no.id ser_no = group[0].ser_no vals = { 'product_id': prod_browse.id, 'brand_id': brand_id.id, 'uom': uom, 'qty': qty, 'indent_qty': qty, 'grn_pending_qty': qty, 'so_pending_qty': qty, 'si_line_id': depindent_id, 'group_flag': flag, 'note': remark, 'ser_no': ser_no, 'serial_no': serial_no, } if pending_qty == 0: indent_line_obj.write(cr, uid, depindent_id, {'line_state': 'process'}) if ids: self.write(cr, uid, ids[0], {'gate_line': [(0, 0, vals)]}) self.write(cr, uid, ids, res) return True def update_product_group(self, cr, uid, ids, line, context=None): pi_rec = self.browse(cr, uid, ids[0]) line_obj = self.pool.get('purchase.requisition.line') dep_line_obj = self.pool.get('kg.depindent.line') product_obj = self.pool.get('product.product') cr.execute( """ select depindent_line_id from kg_depindent_pi_line where pi_id = %s """ % (str(ids[0]))) data = cr.dictfetchall() val = [ d['depindent_line_id'] for d in data if 'depindent_line_id' in d ] product_id = line.product_id.id product_record = product_obj.browse(cr, uid, product_id) list_line = dep_line_obj.search(cr, uid, [('id', 'in', val), ('product_id', '=', product_id)], context=context) depindent_line_id = line.depindent_line_id pi_qty = line.product_qty for i in list_line: bro_record = dep_line_obj.browse(cr, uid, i) orig_depindent_qty = bro_record.pending_qty po_uom_qty = bro_record.po_qty pi_used_qty = pi_qty uom = bro_record.uom.id po_uom = bro_record.po_uom.id if uom != po_uom: if pi_used_qty <= po_uom_qty: pending_po_depindent_qty = po_uom_qty - pi_used_qty pending_stock_depindent_qty = orig_depindent_qty - ( pi_used_qty * product_record.po_uom_coeff) sql = """ update kg_depindent_line set po_qty=%s, pending_qty=%s where id = %s""" % ( pending_po_depindent_qty, pending_stock_depindent_qty, bro_record.id) cr.execute(sql) #dep_line_obj.write(cr,uid, bro_record.id, {'line_state' : 'noprocess'}) break else: remain_qty = pi_used_qty - po_uom_qty pi_qty = remain_qty pending_po_depindent_qty = 0.0 pending_stock_depindent_qty = 0.0 sql = """ update kg_depindent_line set po_qty=%s, pending_qty=%s where id = %s""" % ( pending_po_depindent_qty, pending_stock_depindent_qty, bro_record.id) cr.execute(sql) #dep_line_obj.write(cr,uid, bro_record.id, {'line_state' : 'noprocess'}) if remain_qty < 0: break else: if pi_used_qty <= po_uom_qty: pending_po_depindent_qty = po_uom_qty - pi_used_qty pending_stock_depindent_qty = po_uom_qty - pi_used_qty sql = """ update kg_depindent_line set po_qty=%s, pending_qty=%s where id = %s""" % ( pending_po_depindent_qty, pending_stock_depindent_qty, bro_record.id) cr.execute(sql) #dep_line_obj.write(cr,uid, bro_record.id, {'line_state' : 'noprocess'}) break else: remain_qty = pi_used_qty - po_uom_qty pi_qty = remain_qty pending_po_depindent_qty = 0.0 pending_stock_depindent_qty = 0.0 sql = """ update kg_depindent_line set po_qty=%s, pending_qty=%s where id = %s""" % ( pending_po_depindent_qty, pending_stock_depindent_qty, bro_record.id) cr.execute(sql) #dep_line_obj.write(cr,uid, bro_record.id, {'line_state' : 'noprocess'}) if remain_qty < 0: break return True
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): result = super(product_search_ept, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar,submenu) if context is None: context={} self._columns = {} self._columns['product_ids'] = fields.text('Product IDS') if view_type != 'form': return result _moves_arch_lst = """ <form string="Stone Search" version="7.0"> <div> """ #_moves_arch_lst += """<group colspan="4" col="10">""" _line_fields = result['fields'] info = [ {'model':None,'_column_name':'product_name','label':'Stone ID','type':'char','name':'name','product_search_type':'char'}, {'model':None,'_column_name':'product_certificate_no','label':'Certificate No.','type':'char','name':'certificate_no','product_search_type':'char'}, {'model':None,'_column_name':'product_weight','label':'Weight','no':3,'type':'float','name':'weight','product_search_type':'char','range':True}, {'model':None,'_column_name':'product_price_caret','label':'PPC','no':1,'type':'float','name':'price_caret','product_search_type':'char','range':True}, {'model':None,'_column_name':'product_discount','label':'Back%','no':1,'type':'float','name':'discount','product_search_type':'char','range':True}, {'model':'product.shape','width':'15%','_column_name':'product_shape','label':'SHP','help':'Shape','type':'many2one','name':'shape_id','product_search_type':'boolean','on_change':'product_shape_change'}, {'model':'product.color','width':'8%','_column_name':'product_color','label':'CLR','help':'Color','type':'many2one','name':'color_id','product_search_type':'boolean','on_change':'product_color_change'}, {'model':'product.clarity','width':'10%','_column_name':'product_clarity','label':'CLRTY','help':'Clarity','type':'many2one','name':'clarity_id','product_search_type':'boolean','on_change':'product_clarity_change'}, {'model':'product.cut','width':'12%','_column_name':'product_cut','label':'CUT','help':'Cut','type':'many2one','name':'cut_id','product_search_type':'boolean','on_change':'product_cut_change'}, {'model':'product.polish','width':'8%','_column_name':'product_polish','label':'POL','help':'Polish','type':'many2one','name':'polish_id','product_search_type':'boolean','on_change':'product_polish_change'}, {'model':'product.symmetry','width':'10%','_column_name':'product_symmetry','label':'SYM','help':'Symmetry','type':'many2one','name':'symmetry_id','product_search_type':'boolean','on_change':'product_symmetry_change'}, {'model':'product.fluorescence.intensity','width':'13%','_column_name':'product_fluorescence_intensity','label':'FLUR','help':'Fluorescence Intensity','type':'many2one','name':'fluorescence_intensity_id','product_search_type':'boolean','on_change':'product_fluorescence_intensity_change'}, {'model':'product.lab','width':'8%','_column_name':'product_lab','label':'LAB','help':'Lab','type':'many2one','name':'lab_id','product_search_type':'boolean','on_change':'product_lab_change'}, {'model':'product.fancy.color','width':'15%','_column_name':'product_fancy_color','label':'FNC CLR','help':'Fancy Color','type':'many2one','name':'fancy_color_id','product_search_type':'boolean','on_change':'product_fancy_color_change'}, {'model':'product.fancy.color.intensity','width':'15%','_column_name':'product1_fancy_color_intensity','label':'FNC CLR INT','help':'Fancy Color Intensity','type':'many2one','name':'fancy_color_intensity','product_search_type':'boolean','on_change':'product1_fancy_color_intensity_change'}, {'model':'product.fancy.color.overtone','width':'15%','_column_name':'product2_fancy_color_overtone','label':'FNC CLR OVR','help':'Fancy Color Overtone','type':'many2one','name':'fancy_color_overtone','product_search_type':'boolean','on_change':'product2_fancy_color_overtone_change'}, {'model':None,'_column_name':'product_status','width':'20%','label':'Status','type':'selection','name':'product_status','product_search_type':'boolean' ,'selection_val':[('available','Available'), ('hold','Hold'), ('sold','Sold'), ('on_approval','On Approval'), ('on_consignment','On Consignment'), ('offline','Offline'), ('repair','Repair'), ('web_sale','Web Sale')]}, {'model':'stock.location','_column_name':'stock_location','width':'15%','label':'Location','type':'many2one','name':'location_id','product_search_type':'boolean' ,'domain':[('usage','=','internal')],}, ] for model_info in info : if model_info['type'] == 'many2one' and model_info['product_search_type'] == 'boolean' : if model_info['model']: ids = self.pool.get(model_info['model']).search(cr,uid,model_info.get('domain',[])) if ids : _moves_arch_lst += """<div style="float:left;width:%s;">"""%(model_info.get('width', '100%')) ''' Header ''' if model_info.get('label', False)=='Location': _moves_arch_lst += """<u><label style="color:rgb(124,123,173);font-weight:bold;" string="%s"/></u>"""%(model_info['label']) if model_info.get('on_change', False): ''' Check box for Select All ''' _moves_arch_lst += """<div><field name="%s" class="oe_inline" nolabel="1" on_change="%s(%s)"/> """%(model_info['on_change'],model_info['on_change'],model_info['on_change']) ''' Label for Select All ''' _moves_arch_lst += """<u><label help='%s' style="color:rgb(124, 123, 173);" string="%s" for="%s" /></u></div>"""%(model_info['help'],model_info['label'],model_info['label']) _line_fields.update({ '%s'%(model_info['on_change']) : { 'string': 'All ?', 'type' : 'boolean', },}) self._columns['%s'%(model_info['on_change'])] = fields.boolean(model_info['on_change']) for obj in self.pool.get(model_info['model']).browse(cr,uid,ids,context=context): name=len(obj.name) > 7 and (obj.name[:7]+'...') or obj.name[:7] _line_fields.update({ '%s%s'%(model_info['_column_name'],obj.id) : { 'string': obj.name, 'type' : 'boolean', 'help' : '%s'%(obj.name) },}) self._columns['%s%s'%(model_info['_column_name'],obj.id)] = fields.boolean(obj.name) ''' Check box and related label ''' _moves_arch_lst += """ <div><field name="%s%s" class="oe_inline" nolabel="1"/> <label string="%s" for="%s%s" /></div> """%(model_info['_column_name'],obj.id,name,model_info['_column_name'],obj.id) _moves_arch_lst += """</div>""" ####################### if model_info['type'] == 'char' and model_info['product_search_type'] == 'char': _moves_arch_lst += """<div style="width:%s;float:left;">""" %('50%') _line_fields.update({ '%s'%(model_info['_column_name']) : { 'string': 'Name', 'type' : 'char', 'help' : '%s'%(model_info['_column_name']), },}) self._columns['%s'%(model_info['_column_name'])] = fields.char(model_info['label'],size=1024) _moves_arch_lst += """ <div> <label style="color:rgb(124, 123, 173);" string="%s" for="%s" /> <field name="%s" style="width: 70%%" nolabel="1"/> </div> </div> """%(model_info['label'],model_info['_column_name'],model_info['_column_name']) ################################ if model_info['type'] == 'selection' and model_info['product_search_type'] == 'boolean' : if model_info['selection_val']: _moves_arch_lst += """<div style="float:left;width:%s">"""%(model_info['width']) _moves_arch_lst += """<u><label style="color:rgb(124, 123, 173);font-weight:bold;" string="%s" /></u><newline/>"""%(model_info['label']) for value in model_info['selection_val']: _line_fields.update({ '%s_%s'%(model_info['_column_name'],value[0]) : { 'string': value[1], 'type' : 'boolean', },}) self._columns['%s_%s'%(model_info['_column_name'],value[0])] = fields.boolean(value[1]) _moves_arch_lst += """ <div><field name="%s_%s" nolabel="1"/> <label string="%s" for="%s_%s" /></div> """%(model_info['_column_name'],value[0],value[1],model_info['_column_name'],value[0]) _moves_arch_lst +="""</div>""" ########################### if model_info.get('range') and model_info['range']: width = '50%' if model_info.get('no') > 1:width = '100%' _moves_arch_lst += """<div style="float:left;width:%s;">"""%(width) _moves_arch_lst += """<div style="float:left;width:%s;"><label style="color:rgb(124, 123, 173);font-weight:bold;" string="%s" /></div>"""%('15%',model_info['label']) if model_info.get('no'): no = model_info.get('no') wid = str(85/int(no)) + '%' while no != 0 : no = no - 1 _line_fields.update({'%s_from_%s'%(model_info['_column_name'],no) : {'string': model_info['label'],'type':'float'}}) _line_fields.update({'%s_to_%s'%(model_info['_column_name'],no) : {'string': model_info['label'],'type':'float'}}) self._columns['%s_from_%s'%(model_info['_column_name'],no)] = fields.float(model_info['label'],digits=(16,2)) self._columns['%s_to_%s'%(model_info['_column_name'],no)] = fields.float(model_info['label'],digits=(16,2)) _moves_arch_lst += """ <div style="float:left;width:%s;"> <div style="float:left;"><field name="%s_from_%s" placeholder="From" class="oe_inline" nolabel="1"/></div> <div style="float:left;"><b><label style="color:rgb(124, 123, 173);" string="--" /></b></div> <div style="float:left;"><field name="%s_to_%s" placeholder="To" class="oe_inline" nolabel="1"/></div> </div> """%(wid,model_info['_column_name'],no,model_info['_column_name'],no) _moves_arch_lst += """</div>""" _moves_arch_lst += """ </div> <footer> <button name="get_product" string="Search" type="object" colspan="2" class="oe_highlight"/> or <button string="Cancel" class="oe_link" special="cancel"/> </footer> </form> """ result['arch'] = _moves_arch_lst result['arch'] = result['arch'].replace('&','&') result['fields'] = _line_fields return result
cur = line.order_id.pricelist_id.currency_id res[line.id] = cur_obj.round(cr, uid, cur, taxes["total"]) return res def _get_uom_id(self, cr, uid, *args): try: proxy = self.pool.get("ir.model.data") result = proxy.get_object_reference(cr, uid, "product", "product_uom_unit") return result[1] except Exception, ex: return False _name = "rent.order.line" _description = "Rent Order Line" _columns = { "name": fields.text("Description", required=True, readonly=True, states={"draft": [("readonly", False)]}), "order_id": fields.many2one( "rent.order", "Rent Reference", required=True, ondelete="cascade", select=True, readonly=True, states={"draft": [("readonly", False)]}, ), "sequence": fields.integer("Sequence"), "product_id": fields.many2one( "product.product", "Product", domain=[("is_rent", "=", True)], change_default=True ), "invoice_lines": fields.many2many( "account.invoice.line",
class ir_attachment(osv.osv): """Attachments are used to link binary files or url to any openerp document. External attachment storage --------------------------- The 'data' function field (_data_get,data_set) is implemented using _file_read, _file_write and _file_delete which can be overridden to implement other storage engines, shuch methods should check for other location pseudo uri (example: hdfs://hadoppserver) The default implementation is the file:dirname location that stores files on the local filesystem using name based on their sha1 hash """ def _name_get_resname(self, cr, uid, ids, object, method, context): data = {} for attachment in self.browse(cr, uid, ids, context=context): model_object = attachment.res_model res_id = attachment.res_id if model_object and res_id: model_pool = self.pool.get(model_object) res = model_pool.name_get(cr, uid, [res_id], context) res_name = res and res[0][1] or False if res_name: field = self._columns.get('res_name', False) if field and len(res_name) > field.size: res_name = res_name[:field.size - 3] + '...' data[attachment.id] = res_name else: data[attachment.id] = False return data # 'data' field implementation def _full_path(self, cr, uid, location, path): # location = 'file:filestore' assert location.startswith( 'file:'), "Unhandled filestore location %s" % location location = location[5:] # sanitize location name and path location = re.sub('[.]', '', location) location = location.strip('/\\') path = re.sub('[.]', '', path) path = path.strip('/\\') return os.path.join(tools.config['root_path'], location, cr.dbname, path) def _file_read(self, cr, uid, location, fname, bin_size=False): full_path = self._full_path(cr, uid, location, fname) r = '' try: if bin_size: r = os.path.getsize(full_path) else: r = open(full_path, 'rb').read().encode('base64') except IOError: _logger.error("_read_file reading %s", full_path) return r def _file_write(self, cr, uid, location, value): bin_value = value.decode('base64') fname = hashlib.sha1(bin_value).hexdigest() # scatter files across 1024 dirs # we use '/' in the db (even on windows) fname = fname[:3] + '/' + fname full_path = self._full_path(cr, uid, location, fname) try: dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): os.makedirs(dirname) open(full_path, 'wb').write(bin_value) except IOError: _logger.error("_file_write writing %s", full_path) return fname def _file_delete(self, cr, uid, location, fname): # using SQL to include files hidden through unlink or due to record rules cr.execute("SELECT COUNT(*) FROM ir_attachment WHERE store_fname = %s", (fname, )) count = cr.fetchone()[0] if count <= 1: full_path = self._full_path(cr, uid, location, fname) try: os.unlink(full_path) except OSError: _logger.error("_file_delete could not unlink %s", full_path) except IOError: # Harmless and needed for race conditions _logger.error("_file_delete could not unlink %s", full_path) def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} result = {} location = self.pool.get('ir.config_parameter').get_param( cr, SUPERUSER_ID, 'ir_attachment.location') bin_size = context.get('bin_size') for attach in self.browse(cr, uid, ids, context=context): if location and attach.store_fname: result[attach.id] = self._file_read(cr, uid, location, attach.store_fname, bin_size) else: result[attach.id] = attach.db_datas return result def _data_set(self, cr, uid, id, name, value, arg, context=None): # We dont handle setting data to null if not value: return True if context is None: context = {} location = self.pool.get('ir.config_parameter').get_param( cr, SUPERUSER_ID, 'ir_attachment.location') file_size = len(value.decode('base64')) if location: attach = self.browse(cr, uid, id, context=context) if attach.store_fname: self._file_delete(cr, uid, location, attach.store_fname) fname = self._file_write(cr, uid, location, value) # SUPERUSER_ID as probably don't have write access, trigger during create super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], { 'store_fname': fname, 'file_size': file_size }, context=context) else: super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], { 'db_datas': value, 'file_size': file_size }, context=context) return True _name = 'ir.attachment' _columns = { 'name': fields.char('Attachment Name', size=256, required=True), 'datas_fname': fields.char('File Name', size=256), 'description': fields.text('Description'), 'res_name': fields.function(_name_get_resname, type='char', size=128, string='Resource Name', store=True), 'res_model': fields.char( 'Resource Model', size=64, readonly=True, help="The database object this attachment will be attached to"), 'res_id': fields.integer('Resource ID', readonly=True, help="The record id this is attached to"), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Owner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', change_default=True), 'type': fields.selection([ ('url', 'URL'), ('binary', 'Binary'), ], 'Type', help="Binary File or URL", required=True, change_default=True), 'url': fields.char('Url', size=1024), # al: We keep shitty field names for backward compatibility with document 'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True), 'store_fname': fields.char('Stored Filename', size=256), 'db_datas': fields.binary('Database Data'), 'file_size': fields.integer('File Size'), } _defaults = { 'type': 'binary', 'file_size': 0, 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'ir.attachment', context=c), } def _auto_init(self, cr, context=None): super(ir_attachment, self)._auto_init(cr, context) cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_attachment_res_idx', )) if not cr.fetchone(): cr.execute( 'CREATE INDEX ir_attachment_res_idx ON ir_attachment (res_model, res_id)' ) cr.commit() def check(self, cr, uid, ids, mode, context=None, values=None): """Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there. """ res_ids = {} require_employee = False if ids: if isinstance(ids, (int, long)): ids = [ids] cr.execute( 'SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids, )) for rmod, rid, create_uid in cr.fetchall(): if not (rmod and rid): if create_uid != uid: require_employee = True continue res_ids.setdefault(rmod, set()).add(rid) if values: if values.get('res_model') and values.get('res_id'): res_ids.setdefault(values['res_model'], set()).add(values['res_id']) ima = self.pool.get('ir.model.access') for model, mids in res_ids.items(): # ignore attachments that are not attached to a resource anymore when checking access rights # (resource was deleted but attachment was not) if not self.pool.get(model): require_employee = True continue existing_ids = self.pool.get(model).exists(cr, uid, mids) if len(existing_ids) != len(mids): require_employee = True ima.check(cr, uid, model, mode) self.pool.get(model).check_access_rule(cr, uid, existing_ids, mode, context=context) if require_employee: if not self.pool['res.users'].has_group(cr, uid, 'base.group_user'): raise except_orm( _('Access Denied'), _("Sorry, you are not allowed to access this document.")) def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): ids = super(ir_attachment, self)._search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False, access_rights_uid=access_rights_uid) if not ids: if count: return 0 return [] # Work with a set, as list.remove() is prohibitive for large lists of documents # (takes 20+ seconds on a db with 100k docs during search_count()!) orig_ids = ids ids = set(ids) # For attachments, the permissions of the document they are attached to # apply, so we must remove attachments for which the user cannot access # the linked document. # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs), # and the permissions are checked in super() and below anyway. cr.execute( """SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids), )) targets = cr.dictfetchall() model_attachments = {} for target_dict in targets: if not (target_dict['res_id'] and target_dict['res_model']): continue # model_attachments = { 'model': { 'res_id': [id1,id2] } } model_attachments.setdefault(target_dict['res_model'], {}).setdefault( target_dict['res_id'], set()).add(target_dict['id']) # To avoid multiple queries for each attachment found, checks are # performed in batch as much as possible. ima = self.pool.get('ir.model.access') for model, targets in model_attachments.iteritems(): if not self.pool.get(model): continue if not ima.check(cr, uid, model, 'read', False): # remove all corresponding attachment ids for attach_id in itertools.chain(*targets.values()): ids.remove(attach_id) continue # skip ir.rule processing, these ones are out already # filter ids according to what access rules permit target_ids = targets.keys() allowed_ids = self.pool.get(model).search( cr, uid, [('id', 'in', target_ids)], context=context) disallowed_ids = set(target_ids).difference(allowed_ids) for res_id in disallowed_ids: for attach_id in targets[res_id]: ids.remove(attach_id) # sort result according to the original sort ordering result = [id for id in orig_ids if id in ids] return len(result) if count else list(result) def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'read', context=context) return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context, load) def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'write', context=context, values=vals) if 'file_size' in vals: del vals['file_size'] return super(ir_attachment, self).write(cr, uid, ids, vals, context) def copy(self, cr, uid, id, default=None, context=None): self.check(cr, uid, [id], 'write', context=context) return super(ir_attachment, self).copy(cr, uid, id, default, context) def unlink(self, cr, uid, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'unlink', context=context) location = self.pool.get('ir.config_parameter').get_param( cr, SUPERUSER_ID, 'ir_attachment.location') if location: for attach in self.browse(cr, uid, ids, context=context): if attach.store_fname: self._file_delete(cr, uid, location, attach.store_fname) return super(ir_attachment, self).unlink(cr, uid, ids, context) def create(self, cr, uid, values, context=None): self.check(cr, uid, [], mode='write', context=context, values=values) if 'file_size' in values: del values['file_size'] return super(ir_attachment, self).create(cr, uid, values, context) def action_get(self, cr, uid, context=None): return self.pool.get('ir.actions.act_window').for_xml_id( cr, uid, 'base', 'action_attachment', context=context)
'url':fields.char('File Location', size=255), 'image': fields.binary("Image", help="This field holds the image used as image for the product, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'product.images': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the product. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'product.images': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of the product. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'comments':fields.text('Comments', translate=True), 'product_id':fields.many2one('product.product', 'Product') } _defaults = { 'is_link': lambda *a: False, } _sql_constraints = [('uniq_name_product_id', 'UNIQUE(product_id, name)', _('A product can have only one image with the same name'))]
class agreement(osv.osv): _name = 'sale.recurring_orders.agreement' _inherit = ['mail.thread'] _description = "Recurring orders agreement" def __get_next_term_date(self, date, unit, interval): """ Get the date that results on incrementing given date an interval of time in time unit. @param date: Original date. @param unit: Interval time unit. @param interval: Quantity of the time unit. @rtype: date @return: The date incremented in 'interval' units of 'unit'. """ if unit == 'days': return date + timedelta(days=interval) elif unit == 'weeks': return date + timedelta(weeks=interval) elif unit == 'months': return date + relativedelta(months=interval) elif unit == 'years': return date + relativedelta(years=interval) def __get_next_expiration_date(self, cr, uid, ids, field_name, arg, context=None): """ Get next expiration date of the agreement. For unlimited agreements, get max date """ if not ids: return {} res = {} for agreement in self.browse(cr, uid, ids): if agreement.prolong == 'fixed': res[agreement.id] = agreement.end_date elif agreement.prolong == 'unlimited': now = datetime.now() date = self.__get_next_term_date(datetime.strptime(agreement.start_date, "%Y-%m-%d"), agreement.prolong_unit, agreement.prolong_interval) while (date < now): date = self.__get_next_term_date(date, agreement.prolong_unit, agreement.prolong_interval) res[agreement.id] = date else: # for renewable fixed term res[agreement.id] = self.__get_next_term_date(datetime.strptime( \ agreement.last_renovation_date if agreement.last_renovation_date else agreement.start_date, "%Y-%m-%d"), \ agreement.prolong_unit, agreement.prolong_interval) return res _columns = { 'name': fields.char('Name', size=100, select=1, required=True, help='Name that helps to identify the agreement'), 'number': fields.char('Agreement number', select=1, size=32, help="Number of agreement. Keep empty to get the number assigned by a sequence."), 'active': fields.boolean('Active', help='Unchecking this field, quotas are not generated'), 'partner_id': fields.many2one('res.partner', 'Customer', select=1, change_default=True, required=True, help="Customer you are making the agreement with"), 'company_id': fields.many2one('res.company', 'Company', required=True, help="Company that signs the agreement"), 'start_date': fields.date('Start date', select=1, help="Beginning of the agreement. Keep empty to use the current date"), 'prolong': fields.selection([('recurrent','Renewable fixed term'),('unlimited','Unlimited term'),('fixed','Fixed term')], 'Prolongation', help="Sets the term of the agreement. 'Renewable fixed term': It sets a fixed term, but with possibility of manual renew; 'Unlimited term': Renew is made automatically; 'Fixed term': The term is fixed and there is no possibility to renew.", required=True), 'end_date': fields.date('End date', help="End date of the agreement"), 'prolong_interval': fields.integer('Interval', help="Interval in time units to prolong the agreement until new renewable (that is automatic for unlimited term, manual for renewable fixed term)."), 'prolong_unit': fields.selection([('days','days'),('weeks','weeks'),('months','months'),('years','years')], 'Interval unit', help='Time unit for the prolongation interval'), 'agreement_line': fields.one2many('sale.recurring_orders.agreement.line', 'agreement_id', 'Agreement lines'), 'order_line': fields.one2many('sale.recurring_orders.agreement.order', 'agreement_id', 'Order lines', readonly=True), 'renewal_line': fields.one2many('sale.recurring_orders.agreement.renewal', 'agreement_id', 'Renewal lines', readonly=True), 'last_renovation_date': fields.date('Last renovation date', help="Last date when agreement was renewed (same as start date if not renewed)"), 'next_expiration_date': fields.function(__get_next_expiration_date, string='Next expiration date', type='date', method=True, store=True), #TODO: Añadir posibilidad de seguir cuando se genera una factura con _track = {} 'state': fields.selection([('empty', 'Without orders'), ('first', 'First order created'), ('orders', 'With orders')], 'State', readonly=True), 'renewal_state': fields.selection([('not_renewed', 'Agreement not renewed'), ('renewed', 'Agreement renewed')], 'Renewal state', readonly=True), 'notes': fields.text('Notes'), } _defaults = { 'active': lambda *a: 1, 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'sale', context=c), 'prolong': lambda *a: 'unlimited', 'prolong_interval': lambda *a: 1, 'prolong_unit': lambda *a: 'years', 'state': lambda *a: 'empty', 'renewal_state': lambda *a: 'not_renewed', } _sql_constraints = [ ('number_uniq', 'unique(number)', 'Agreement number must be unique !'), ] def _check_dates(self, cr, uid, ids, context=None): """ Check correct dates. When prolongation is unlimited or renewal, end_date is False, so doesn't apply @rtype: boolean @return: True if dates are correct or don't apply, False otherwise """ if context == None: context = {} agreements = self.browse(cr, uid, ids, context=context) val = True for agreement in agreements: if agreement.end_date: val = val and agreement.end_date > agreement.start_date return val _constraints = [ (_check_dates, 'Agreement end date must be greater than start date', ['start_date','end_date']), ] def create(self, cr, uid, vals, context=None): # Set start date if empty if not vals.get('start_date'): vals['start_date'] = datetime.now() # Set agreement number if empty if not vals.get('number'): vals['number'] = self.pool.get('ir.sequence').get(cr, uid, 'sale.r_o.agreement.sequence') return super(agreement, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): value = super(agreement, self).write(cr, uid, ids, vals, context=context) # unlink all future orders if vals.has_key('active') or vals.has_key('number') or ( vals.has_key('agreement_line') and len(vals['agreement_line']) ) \ or vals.has_key('prolong') or vals.has_key('end_date') or vals.has_key('prolong_interval') or vals.has_key('prolong_unit'): self.unlink_orders(cr, uid, ids, datetime.date(datetime.now()), context) return value def copy(self, cr, uid, orig_id, default={}, context=None): if context is None: context = {} agreement_record = self.browse(cr, uid, orig_id) default.update({ 'state': 'empty', 'number': False, 'active': True, 'name': '%s*' % agreement_record['name'], 'start_date': False, 'order_line': [], 'renewal_line': [], }) return super(agreement, self).copy(cr, uid, orig_id, default, context) def unlink(self, cr, uid, ids, context=None): unlink_ids = [] for agreement in self.browse(cr, uid, ids, context=context): confirmedOrders = False for order_line in agreement.order_line: if order_line.confirmed: confirmedOrders = True if not confirmedOrders: unlink_ids.append(agreement.id) else: raise osv.except_osv(_('Invalid action!'), _('You cannot remove agreements with confirmed orders!')) self.unlink_orders(cr, uid, unlink_ids, datetime.date(datetime.now()), context=context) return osv.osv.unlink(self, cr, uid, unlink_ids, context=context) def onchange_start_date(self, cr, uid, ids, start_date=False): """ It changes last renovation date to the new start date. @rtype: dictionary @return: field last_renovation_date with new start date """ if not start_date: return {} result = {} result['value'] = { 'last_renovation_date': start_date } return result def revise_agreements_expirations_planned(self, cr, uid, context={}): """ Check each active agreement to see if the end is near """ ids = self.search(cr, uid, []) revise_ids = [] for agreement in self.browse(cr, uid, ids, context=context): if not agreement.active: continue next_expiration_date = datetime.date(datetime.strptime(agreement.next_expiration_date, "%Y-%m-%d")) if agreement.prolong == 'unlimited' and next_expiration_date <= datetime.date(datetime.now()): # add to a list for reviewing expiration date revise_ids.append(agreement.id) if revise_ids: # force recalculate next_expiration_date self.write(cr, uid, revise_ids, {'prolong':'unlimited'}, context=context) return True def create_order(self, cr, uid, agreement, date, agreement_lines, confirmed_flag, context={}): """ Method that creates an order from given data. @param agreement: Agreement method get data from. @param date: Date of created order. @param agreement_lines: Lines that will generate order lines. @confirmed_flag: Confirmed flag in agreement order line will be set to this value. """ order_obj = self.pool.get('sale.order') order_line_obj = self.pool.get('sale.order.line') # Create order object context['company_id'] = agreement.company_id.id order = { 'date_order': date.strftime('%Y-%m-%d'), 'date_confirm': date.strftime('%Y-%m-%d'), 'origin': agreement.number, 'partner_id': agreement.partner_id.id, 'state': 'draft', 'company_id': agreement.company_id.id, 'from_agreement': True, } # Get other order values from agreement partner order.update(self.pool['sale.order'].onchange_partner_id(cr, uid, [], agreement.partner_id.id,context)['value']) order['user_id'] = agreement.partner_id.user_id.id order_id = order_obj.create(cr, uid, order, context=context) # Create order lines objects agreement_lines_ids = [] for agreement_line in agreement_lines: order_line = { 'order_id': order_id, 'product_id': agreement_line.product_id.id, 'product_uom_qty': agreement_line.quantity, 'discount': agreement_line.discount, } # get other order line values from agreement line product order_line.update(self.pool['sale.order.line'].product_id_change(cr, uid, [], order['pricelist_id'], \ product=agreement_line.product_id.id, qty=agreement_line.quantity, partner_id=agreement.partner_id.id, fiscal_position=1 or order['fiscal_position'])['value']) # Put line taxes order_line['tax_id'] = [(6, 0, tuple(order_line['tax_id']))] # Put custom description if agreement_line.additional_description: order_line['name'] += " " + agreement_line.additional_description order_line_obj.create(cr, uid, order_line, context=context) agreement_lines_ids.append(agreement_line.id) # Update last order date for lines self.pool.get('sale.recurring_orders.agreement.line').write(cr, uid, agreement_lines_ids, {'last_order_date': date.strftime('%Y-%m-%d')} ,context=context) # Update agreement state if agreement.state != 'orders': self.pool.get('sale.recurring_orders.agreement').write(cr, uid, [agreement.id], {'state': 'orders'} ,context=context) # Create order agreement record agreement_order = { 'agreement_id': agreement.id, 'order_id': order_id, } self.pool.get('sale.recurring_orders.agreement.order').create(cr, uid, agreement_order, context=context) return order_id def _order_created(self, cr, uid, agreement, agreement_lines_ordered, order_id, context={}): """ It triggers actions after order is created. This method can be overriden for extending its functionality thanks to its parameters. @param agreement: Agreement object whose order has been created @param agreement_lines_ordered: List of agreement lines objects used in the creation of the order. @param order_id: ID of the created order. """ pass def _order_confirmed(self, cr, uid, agreement, order_id, context={}): """ It triggers actions after order is confirmed. This method can be overriden for extending its functionality thanks to its parameters. @param agreement: Agreement object whose order has been confirmed @param order_id: ID of the confirmed order. """ pass def _get_next_order_date(self, agreement, line, startDate, context={}): """ Get next date starting from given date when an order is generated. @param line: Agreement line @param startDate: Start date from which next order date is calculated. @rtype: datetime @return: Next order date starting from the given date. """ next_date = datetime.strptime(agreement.start_date, '%Y-%m-%d') while next_date <= startDate: next_date = self.__get_next_term_date(next_date, line.ordering_unit, line.ordering_interval) return next_date def generate_agreement_orders(self, cr, uid, agreement, startDate, endDate, context={}): """ Check if there is any pending order to create for given agreement. """ if not agreement.active: return lines_to_order = {} agreement_expiration_date = datetime.strptime(agreement.next_expiration_date, '%Y-%m-%d') if (agreement_expiration_date < endDate) and (agreement.prolong != 'unlimited'): endDate = agreement_expiration_date for line in agreement.agreement_line: # Check if there is any agreement line to order if line.active_chk: # Check future orders for this line until endDate next_order_date = self._get_next_order_date(agreement, line, startDate) while next_order_date < endDate: # Add to a list to order all lines together if not lines_to_order.get(next_order_date): lines_to_order[next_order_date] = [] lines_to_order[next_order_date].append(line) next_order_date = self._get_next_order_date(agreement, line, next_order_date) # Order all pending lines dates = lines_to_order.keys() dates.sort() agreement_order_obj = self.pool.get('sale.recurring_orders.agreement.order') for date in dates: # Check if an order exists for that date if not len(agreement_order_obj.search(cr, uid, [ ('date', '=', str(date)), ('agreement_id', '=', agreement['id']) ])): # create it if not exists order_id = self.create_order(cr, uid, agreement, date, lines_to_order[date], False, context=context) # Call 'event' method self._order_created(cr, uid, agreement, lines_to_order, order_id, context=context) def generate_initial_order(self, cr, uid, ids, context={}): """ Method that creates an initial order with all the agreement lines """ agreement = self.browse(cr, uid, ids, context=context)[0] agreement_lines = [] # Add only active lines for line in agreement.agreement_line: if line.active_chk: agreement_lines.append(line) order_id = self.create_order(cr, uid, agreement, datetime.strptime(agreement.start_date, '%Y-%m-%d'), agreement_lines, True, context=context) # Update agreement state self.write(cr, uid, agreement.id, { 'state': 'first' }, context=context) # Confirm order wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'sale.order', order_id, 'order_confirm', cr) # Get view to show data_obj = self.pool.get('ir.model.data') result = data_obj._get_id(cr, uid, 'sale', 'view_order_form') view_id = data_obj.browse(cr, uid, result).res_id # Return view with order created return { 'domain': "[('id','=', " + str(order_id) + ")]", 'view_type': 'form', 'view_mode': 'form', 'res_model': 'sale.order', 'context': context, 'res_id': order_id, 'view_id': [view_id], 'type': 'ir.actions.act_window', 'nodestroy': True } def generate_next_year_orders_planned(self, cr, uid, context={}): """ Check if there is any pending order to create for each agreement. """ if context is None: context = {} ids = self.search(cr, uid, [('active','=',True)]) self.generate_next_year_orders(cr, uid, ids, context) def generate_next_year_orders(self, cr, uid, ids, context={}): """ Method that generates all the orders of the given agreements for the next year, counting from current date. """ startDate = datetime.now() endDate = datetime(startDate.year + 1, startDate.month, startDate.day) for agreement in self.browse(cr, uid, ids, context=context): self.generate_agreement_orders(cr, uid, agreement, startDate, endDate, context) return True def confirm_current_orders_planned(self, cr, uid, context={}): if context is None: context = {} ids = self.search(cr, uid, []) now = datetime.now() wf_service = netsvc.LocalService("workflow") for agreement in self.browse(cr, uid, ids, context=context): for agreement_order in agreement.order_line: if datetime.strptime(agreement_order['date'], '%Y-%m-%d') <= now and not agreement_order.confirmed: order = agreement_order.order_id if order: wf_service.trg_validate(uid, 'sale.order', order.id, 'order_confirm', cr) self._order_confirmed(cr, uid, agreement, order.id, context) self.pool.get('sale.recurring_orders.agreement.order').write(cr, uid, agreement_order.id, { 'confirmed': 'True' }, context=context) def unlink_orders(self, cr, uid, ids, startDate, context={}): """ Remove generated orders from given date. """ agreement_order_obj = self.pool.get('sale.recurring_orders.agreement.order') ordersToRemove = [] for agreement in self.browse(cr, uid, ids, context=context): for order in agreement['order_line']: order_date = datetime.date(datetime.strptime(order['date'], '%Y-%m-%d')) if order_date >= startDate and not order.confirmed: if order.order_id.id: ordersToRemove.append(order.order_id.id) agreement_order_obj.unlink(cr, uid, order['id'], context) self.pool.get('sale.order').unlink(cr, uid, ordersToRemove, context)
class HeaderHTML(osv.osv): """HTML Header allows you to define HTML CSS and Page format""" _name = "ir.header_webkit" _columns = { 'company_id': fields.many2one('res.company', 'Company'), 'html': fields.text('webkit header', help="Set Webkit Report Header"), 'footer_html': fields.text('webkit footer', help="Set Webkit Report Footer."), 'css': fields.text('Header CSS'), 'name': fields.char('Name', size=128, required=True), 'margin_top': fields.float('Top Margin (mm)'), 'margin_bottom': fields.float('Bottom Margin (mm)'), 'margin_left': fields.float('Left Margin (mm)'), 'margin_right': fields.float('Right Margin (mm)'), 'orientation': fields.selection([('Landscape', 'Landscape'), ('Portrait', 'Portrait')], 'Orientation'), 'format': fields.selection([ ('A0', 'A0 5 841 x 1189 mm'), ('A1', 'A1 6 594 x 841 mm'), ('A2', 'A2 7 420 x 594 mm'), ('A3', 'A3 8 297 x 420 mm'), ('A4', 'A4 0 210 x 297 mm, 8.26 x 11.69 inches'), ('A5', 'A5 9 148 x 210 mm'), ('A6', 'A6 10 105 x 148 mm'), ('A7', 'A7 11 74 x 105 mm'), ('A8', 'A8 12 52 x 74 mm'), ('A9', 'A9 13 37 x 52 mm'), ('B0', 'B0 14 1000 x 1414 mm'), ('B1', 'B1 15 707 x 1000 mm'), ('B2', 'B2 17 500 x 707 mm'), ('B3', 'B3 18 353 x 500 mm'), ('B4', 'B4 19 250 x 353 mm'), ('B5', 'B5 1 176 x 250 mm, 6.93 x 9.84 inches'), ('B6', 'B6 20 125 x 176 mm'), ('B7', 'B7 21 88 x 125 mm'), ('B8', 'B8 22 62 x 88 mm'), ('B9', 'B9 23 33 x 62 mm'), ('B10', ':B10 16 31 x 44 mm'), ('C5E', 'C5E 24 163 x 229 mm'), ('Comm10E', 'Comm10E 25 105 x 241 mm, U.S. Common 10 Envelope'), ('DLE', 'DLE 26 110 x 220 mm'), ('Executive', 'Executive 4 7.5 x 10 inches, 190.5 x 254 mm'), ('Folio', 'Folio 27 210 x 330 mm'), ('Ledger', 'Ledger 28 431.8 x 279.4 mm'), ('Legal', 'Legal 3 8.5 x 14 inches, 215.9 x 355.6 mm'), ('Letter', 'Letter 2 8.5 x 11 inches, 215.9 x 279.4 mm'), ('Tabloid', 'Tabloid 29 279.4 x 431.8 mm'), ], 'Paper size', required=True, help="Select Proper Paper size") }
class sync_shangyi_info(osv.osv): _name = 'sync.shangyi.info' _columns = { 'code': fields.char(u'编码', required=True), 'name': fields.char(u'名称'), 'date': fields.date(u'日期'), 'text': fields.text(u'备注'), } _sql_constraints = [ ('code_uniq', 'unique(code)', 'Code must be unique!'), ] def sync_product_data(self, cr, uid, ids, context=None): ms = Lz_read_SQLCa(self) # 导入产品类别 # 获取更新记录范围,本地库的时间戳和服务端时间戳 local_sql = """ select max(stamp) AS timestamp from product_category """ remote_sql = "SELECT CONVERT(INT,max(timestamp)) AS timestamp from product_class " btw = self.query_period(local_sql, remote_sql) sql = """ select ClassId,cast(ClassName as nvarchar(100)) as name,CAST (TIMESTAMP AS INT ) AS stamps from product_class where CAST (TIMESTAMP AS INT ) between {0} and {1} """ sql = sql.format(btw['start_stamp'], btw['end_stamp']) product_class_list = ms.ExecQuery(sql.encode('utf-8')) for (ClassId, name, stamp) in product_class_list: code = ClassId p_ids = self.pool.get('product.category').search_bycode(cr, uid, code) if p_ids: category = self.pool.get('product.category').browse(cr, uid, p_ids) old_name = category.name val = {} if old_name != name: val['name'] = name val['stamp'] = stamp if val: self.pool.get('product.category').write(cr, uid, p_ids, val) else: self.pool.get('product.category').create(cr, uid, { 'code': ClassId, 'name': name, 'stamp': stamp, }) # 导入品牌 local_sql = """ select max(stamp) AS timestamp from product_brand """ remote_sql = "SELECT CONVERT(INT,max(timestamp)) AS timestamp from product_brand " btw = self.query_period(local_sql, remote_sql) sql = """ SELECT pb.BrandId,cast(pb.BrandName as nvarchar(100)) as name,CAST (TIMESTAMP AS INT ) AS stamps FROM product_brand pb where CAST (TIMESTAMP AS INT ) between {0} and {1} """ sql = sql.format(btw['start_stamp'], btw['end_stamp']) product_brand_list = ms.ExecQuery(sql.encode('utf-8')) for (BrandId, name, stamp) in product_brand_list: brand_id = self.pool.get('product.brand').search_bycode(cr, uid, BrandId) if brand_id: brand = self.pool.get('product.brand').browse(cr, uid, brand_id) old_name = brand.name val = {} if old_name != name: val['name'] = name val['stamp'] = stamp if val: self.pool.get('product.brand').write(cr, uid, brand_id, val) else: self.pool.get('product.brand').create(cr, uid, { 'code': BrandId, 'name': name, 'stamp': stamp, }) # 导入供应商 local_sql = """ select max(stamp) AS timestamp from res_partner where supplier is true """ remote_sql = "SELECT CONVERT(INT,max(timestamp)) AS timestamp from supplier " btw = self.query_period(local_sql, remote_sql) sql = """ select SupId,cast(SupName as nvarchar(100)) as name,cast(Addr as nvarchar(100)) as addr, Tel,Fax,Zip,Email,CAST (TIMESTAMP AS INT ) AS stamps from supplier where CAST (TIMESTAMP AS INT ) between {0} and {1} """ sql = sql.format(btw['start_stamp'], btw['end_stamp']) supplier_class_list = ms.ExecQuery(sql.encode('utf-8')) for (SupId, name, addr, Tel, Fax, Zip, Email, stamp) in supplier_class_list: code = SupId p_ids = self.pool.get('res.partner').search_bycode(cr, uid, code) if p_ids: partner = self.pool.get('res.partner').browse(cr, uid, p_ids) val = {} if partner.name != name: val['name'] = name if partner.street != addr: val['street'] = addr if partner.phone != Tel: val['phone'] = Tel if partner.fax != Fax: val['fax'] = Fax if partner.zip != Zip: val['zip'] = Zip if partner.email != Email: val['email'] = Email val['stamp'] = stamp if val: # partner.write(val) self.pool.get('res.partner').write(cr, uid, p_ids, val) else: self.pool.get('res.partner').create(cr, uid, { 'code': SupId, 'name': name, 'street': addr, 'phone': Tel, 'fax': Fax, 'zip': Zip, 'email': Email, 'is_company': True, 'supplier': True, 'customer': False, 'company_id': False, 'stamp': stamp }) # 导入产品 local_sql = """ select max(stamp) AS timestamp from product_template """ remote_sql = "SELECT CONVERT(INT,max(timestamp)) AS timestamp from product " btw = self.query_period(local_sql, remote_sql) sql = """ select ProId,Barcode,cast(ProName as nvarchar(100)) as name,cast(spec as nvarchar(100)) as spec, ClassId,SupId,isnull(NormalPrice,0),BrandId ,CAST (TIMESTAMP AS INT ) AS stamps from product where CAST (TIMESTAMP AS INT ) between {0} and {1} """ sql = sql.format(btw['start_stamp'], btw['end_stamp']) product_list = ms.ExecQuery(sql.encode('utf-8')) for (ProId, Barcode, name, spec, ClassId, SupId, NormalPrice, BrandId, stamp) in product_list: code = ProId p_id = self.pool.get('product.template').search_bycode(cr, uid, ProId) categ_id = self.pool.get('product.category').search_bycode(cr, uid, ClassId) m_categ_id = self.pool.get('product.category').search_bycode(cr, uid, ClassId[0:6]) b_categ_id = self.pool.get('product.category').search_bycode(cr, uid, ClassId[0:4]) sup_id = self.pool.get('res.partner').search_bycode(cr, uid, SupId) brand_id = self.pool.get('product.brand').search_bycode(cr, uid, BrandId) if p_id: product = self.pool.get('product.template').browse(cr, uid, p_id) val = {} if product.name != name: val['name'] = name if product.barcode != Barcode: val['barcode'] = Barcode if product.categ_id.id != categ_id: val['categ_id'] = categ_id if product.b_category.id != b_categ_id: val['b_category'] = b_categ_id if product.m_category.id != m_categ_id: val['m_category'] = m_categ_id if product.list_price != float(NormalPrice): val['list_price'] = NormalPrice if product.brand_id.id != brand_id: val['brand_id'] = brand_id if product.spec != spec: val['spec'] = spec val['stamp'] = stamp if val: self.pool.get('product.template').write(cr, uid, p_id, val) seller_ids = product.seller_ids s_ids = [] for seller_id in seller_ids: s_ids.append(seller_id.name.id) if sup_id and sup_id not in s_ids: unlink_ids = self.pool.get('product.supplierinfo').search(cr, uid, [('product_tmpl_id', '=', p_id)]) self.pool.get('product.supplierinfo').unlink(cr, uid, unlink_ids) self.pool.get('product.supplierinfo').create(cr, uid, { 'product_tmpl_id': p_id, 'name': sup_id, }) else: product_tmpl_id = self.pool.get('product.template').create(cr, uid, { 'code': ProId, 'barcode': Barcode, 'name': name, 'spec': spec, 'list_price': NormalPrice, 'sale_ok': True, 'type': 'product', 'active': True, 'categ_id': categ_id, 'm_category': m_categ_id, 'b_category': b_categ_id, 'brand_id': brand_id, 'company_id': False, 'stamp': stamp, }) if sup_id: self.pool.get('product.supplierinfo').create(cr, uid, { 'product_tmpl_id': product_tmpl_id, 'name': sup_id, }) return def synch_product_category_parent(self, cr, uid, ids, context=None): # 产品分类分级 c_ids = self.pool.get('product.category').search(cr, uid, [('code', '!=', False)]) b_list = [] m_list = [] for c_id in c_ids: category = self.pool.get('product.category').browse(cr, uid, c_id) code = category.code if len(code) == 4: b_list.append({ 'value': category.code, 'id': category.id, }) elif len(code) == 6: m_list.append({ 'value': category.code, 'id': category.id, }) for c_id in c_ids: category = self.pool.get('product.category').browse(cr, uid, c_id) code = category.code if len(code) == 4: parent_id = False categ_id = self.pool.get('product.category').search(cr, uid, [('name', '=', u'乐之产品分类')]) if categ_id: parent_id = categ_id[0] if category.parent_id != parent_id: self.pool.get('product.category').write(cr, uid, c_id, {'parent_id': parent_id}) elif len(code) == 6: parent_id = False for b in b_list: if category.code[0:4] == b.get('value'): parent_id = b.get('id') if category.parent_id != parent_id: self.pool.get('product.category').write(cr, uid, c_id, {'parent_id': parent_id}) elif len(code) == 8: parent_id = False for m in m_list: if category.code[0:6] == m.get('value'): parent_id = m.get('id') if category.parent_id != parent_id: self.pool.get('product.category').write(cr, uid, c_id, {'parent_id': parent_id}) return def synch_pos_order(self, cr, uid, ids, sy_product_date, context=None): if type(sy_product_date) is types.DictType: if self.browse(cr, uid, ids[0]) and self.browse(cr, uid, ids[0]).date: date = self.browse(cr, uid, ids[0]).date else: date = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") else: date = sy_product_date ms = Lz_read_SQLCa(self) # 导入员工 employee_list = ms.ExecQuery( "SELECT be.BraId,be.EmpId,cast(be.EmpName as nvarchar(100)) as name FROM branch_employee be".encode( 'utf-8')) for (BraId, EmpId, name) in employee_list: e_id = self.pool.get('hr.employee').search_bycode(cr, uid, EmpId) company_id = self.pool.get('res.company').search_bycode(cr, uid, BraId) if e_id: hr_employee = self.pool.get('hr.employee').browse(cr, uid, e_id) val = {} if hr_employee.company_id and hr_employee.company_id.id != company_id: val['company_id'] = company_id if hr_employee.company_id == False and company_id: val['company_id'] = company_id if hr_employee.name != name: val['name'] = name if val: self.pool.get('resource.resource').write(cr, uid, hr_employee.resource_id.id, val) else: self.pool.get('hr.employee').create(cr, uid, { 'code': EmpId, 'name': name, }) # 删除当天数据 del_ids = self.pool.get('sy.pos.order').search(cr, uid, [('sale_date', 'like', date + '%')]) for del_id in del_ids: self.pool.get('sy.pos.order').unlink(cr, uid, del_id) del_ids = self.pool.get('sy.pos.payment').search(cr, uid, [('date', 'like', date + '%')]) for del_id in del_ids: self.pool.get('sy.pos.payment').unlink(cr, uid, del_id) # 导入当天pos订单和pos支付 exec_sql=""" SELECT bs.BraId,convert(VARCHAR(10),bs.SaleDate,126),DATEADD(hour,-8,bs.SaleDate) AS SaleDate, bs.proid,bs.SaleQty,bs.NormalPrice,bs.amount,bs.SaleId, bs.SaleMan,bs.SaleType,bs.PosNo,bs.profit FROM v_bn_saledetail bs WHERE convert(VARCHAR(10),bs.SaleDate,126)= '{0}' """ exec_sql.format(date) pos_order_list = ms.ExecQuery(exec_sql.encode('utf-8')) for (BraId, SaleDate, proid, SaleQty, NormalPrice, amount, SaleId, SaleMan, SaleType, PosNo, profit) in pos_order_list: product_id = self.pool.get('product.template').search_bycode(cr, uid, proid) company_id = self.pool.get('res.company').search_bycode(cr, uid, BraId) employee_id = self.pool.get('hr.employee').search_bycode(cr, uid, SaleMan) self.pool.get('sy.pos.order').create(cr, uid, { 'code': SaleId, 'product': product_id, 'sale_date': SaleDate, 'qty': SaleQty, 'normal_price': NormalPrice, 'amount': amount, 'company_id': company_id, 'sale_man': employee_id, 'sale_type': SaleType, 'PosNo': PosNo, 'profit': profit, }) pos_order_pay_list = ms.ExecQuery( "SELECT spa.SaleId,spa.PaymodeId,DATEADD(hour,-8,spa.SaleDate) as SaleDate,spa.PayMoney,spa.BraId FROM sale_paymode_all spa WHERE convert(VARCHAR(10),spa.SaleDate,126) = '%s'" % ( date).encode('utf-8')) for (SaleId, PaymodeId, SaleDate, PayMoney, BraId) in pos_order_pay_list: company_id = self.pool.get('res.company').search_bycode(cr, uid, BraId) self.pool.get('sy.pos.payment').create(cr, uid, { 'code': SaleId, 'date': SaleDate, 'paymodel': PaymodeId, 'paymoney': PayMoney, 'company_id': company_id, }) return def auto_update(self, cr, uid, ids, context=None): # 导入前一天销售数据 date = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") self.synch_product_data(cr, uid, ids, context=None) self.synch_product_category_parent(cr, uid, ids, context=None) self.synch_pos_order(cr, uid, ids, date, context=None) # 检查前5天的销售数据是否正确 day = 5 start_date = (datetime.datetime.now() - datetime.timedelta(days=1 + day)).strftime("%Y-%m-%d") end_date = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") check_sql = """ SELECT braid,date,SUM(amount),SUM(profit) FROM v_bn_saledetail WHERE saledate BETWEEN '%s' AND '%s' GROUP BY braid,date """ % (start_date, end_date) ms = Lz_read_SQLCa(self) record = ms.ExecQuery(check_sql.encode('utf-8')) for (braid, date, amount, profit) in record: company_id = self.pool.get('res.company').search_bycode(cr, uid, braid) s_date = date + " 00:00:00" e_date = date + " 23:59:59" pos_order_ids = self.pool.get('sy.pos.order').search(cr, uid, [('company_id', '=', company_id), ('sale_date', '>=', s_date), ('sale_date', '<=', e_date)]) sum_amount = 0 sum_profit = 0 for pos_id in pos_order_ids: pos_order = self.pool.get('sy.pos.order').browse(cr, uid, pos_id) sum_amount = sum_amount + pos_order.amount sum_profit = sum_profit + pos_order.profit if int(amount) != int(sum_amount) or int(profit) != int(sum_profit): self.synch_pos_order(cr, uid, ids, date, context=None) return True # 检查历史pos单是否一致 def check_pos_order(self, cr, uid, ids, context=None): check_sql = """ SELECT braid,date,SUM(amount),SUM(profit) FROM v_bn_saledetail WHERE date IS NOT NULL GROUP BY braid,date ORDER BY date """ ms = Lz_read_SQLCa(self) record = ms.ExecQuery(check_sql.encode('utf-8')) for (braid, date, amount, profit) in record: company_id = self.pool.get('res.company').search_bycode(cr, uid, braid) s_date = date + " 00:00:00" e_date = date + " 23:59:59" pos_order_ids = self.pool.get('sy.pos.order').search(cr, uid, [('company_id', '=', company_id), ('sale_date', '>=', s_date), ('sale_date', '<=', e_date)]) sum_amount = 0 sum_profit = 0 for pos_id in pos_order_ids: pos_order = self.pool.get('sy.pos.order').browse(cr, uid, pos_id) sum_amount = sum_amount + pos_order.amount sum_profit = sum_profit + pos_order.profit if int(amount) != int(sum_amount) or int(profit) != int(sum_profit): self.synch_pos_order(cr, uid, ids, date, context=None) return def query_period(self, local, remote): start_stamp = 0 end_stamp = 0 query_local = local query_remote = remote cr = self._cr cr.execute(query_local) for local_max_num in cr.fetchall(): start_stamp = local_max_num[0] if local_max_num[0] is None: start_stamp = 0 return_start = start_stamp ms = Lz_read_SQLCa(self) remote_stamp = ms.ExecQuery(query_remote.encode('utf-8')) for end_stamp in remote_stamp: if remote_stamp[0] is None: end_stamp = 0 return_end = end_stamp[0] res = { 'start_stamp': return_start, 'end_stamp': return_end, } return res
class hr_training_course(osv.osv): _name = "hr.training.course" _description = "Register Training Course" _columns = { 'name': fields.char('Course name', required=True), 'category': fields.char('Category', size=100), 'from_date': fields.date('From', required=True), 'to_date': fields.date('To', required=True), 'attachments': fields.one2many('ir.attachment', 'res_id', 'Attachments'), 'notes': fields.text('Notes'), } _defaults = { 'category': 'miscellaneous', } _sql_constraints = [('course_unique_name_date','unique(name,from_date,to_date)','The same course with the same start and end dates exists')] def _check_unique_insenstive(self, cr, uid, ids, context=None): """ check case insensitive uniqueness of the record in terms of course name """ all_ids = self.search(cr, uid, [], context=context) record = self.browse(cr, uid, ids, context=context) all_ids.remove(record[0].id) for obj in self.browse(cr, uid, all_ids, context=context): if obj.name.lower() == record[0].name.lower() and obj.from_date == record[0].from_date: return False return True def _check_date(self, cr, uid, ids, context=None): """ check if dates are before current date and that the end date of the course is after the begining date """ record = self.browse(cr, uid, ids, context=context) from_date = datetime.strptime(record[0].from_date,"%Y-%m-%d") to_date = datetime.strptime(record[0].to_date,"%Y-%m-%d") if from_date >= datetime.today() and to_date > from_date: return True else: return False def name_get(self, cr, uid, ids, context=None): res = [] if context is None: context = {} if not ids: return [] aux = '' for obj in self.browse(cr,uid,ids,context=context): if obj.name: aux = obj.name else: aux = obj.id aux = aux + ' (' if obj.from_date: aux = aux + obj.from_date aux = aux + " - " if obj.to_date: aux = aux + obj.to_date aux += ')' res.append((obj.id, aux)) return res _constraints = [ (_check_unique_insenstive, 'Course name and dates must be unique', ['name','from_date','to_date']), (_check_date, 'Incorrect Date Values', ['from_date','to_date']) ] _order = 'from_date asc'
class project_issue(osv.Model): _name = "project.issue" _description = "Project Issue" _order = "priority desc, create_date desc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _mail_post_access = 'read' _track = { 'stage_id': { # this is only an heuristics; depending on your particular stage configuration it may not match all 'new' stages 'project_issue.mt_issue_new': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id. sequence <= 1, 'project_issue.mt_issue_stage': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id. sequence > 1, }, 'user_id': { 'project_issue.mt_issue_assigned': lambda self, cr, uid, obj, ctx=None: obj.user_id and obj.user_id. id, }, 'kanban_state': { 'project_issue.mt_issue_blocked': lambda self, cr, uid, obj, ctx=None: obj.kanban_state == 'blocked', 'project_issue.mt_issue_ready': lambda self, cr, uid, obj, ctx=None: obj.kanban_state == 'done', }, } def _get_default_partner(self, cr, uid, context=None): project_id = self._get_default_project_id(cr, uid, context) if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return project.partner_id.id return False def _get_default_project_id(self, cr, uid, context=None): """ Gives default project by checking if present in the context """ return self._resolve_project_id_from_context(cr, uid, context=context) def _get_default_stage_id(self, cr, uid, context=None): """ Gives default stage_id """ project_id = self._get_default_project_id(cr, uid, context=context) return self.stage_find(cr, uid, [], project_id, [('fold', '=', False)], context=context) def _resolve_project_id_from_context(self, cr, uid, context=None): """ Returns ID of project based on the value of 'default_project_id' context key, or None if it cannot be resolved to a single project. """ if context is None: context = {} if type(context.get('default_project_id')) in (int, int): return context.get('default_project_id') if isinstance(context.get('default_project_id'), str): project_name = context['default_project_id'] project_ids = self.pool.get('project.project').name_search( cr, uid, name=project_name, context=context) if len(project_ids) == 1: return int(project_ids[0][0]) return None def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('project.task.type') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # retrieve section_id from the context and write the domain # - ('id', 'in', 'ids'): add columns that should be present # - OR ('case_default', '=', True), ('fold', '=', False): add default columns that are not folded # - OR ('project_ids', 'in', project_id), ('fold', '=', False) if project_id: add project columns that are not folded search_domain = [] project_id = self._resolve_project_id_from_context(cr, uid, context=context) if project_id: search_domain += ['|', ('project_ids', '=', project_id)] search_domain += [('id', 'in', ids)] # perform search stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort( lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) fold = {} for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context): fold[stage.id] = stage.fold or False return result, fold def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ Calendar = self.pool['resource.calendar'] res = dict((res_id, {}) for res_id in ids) for issue in self.browse(cr, uid, ids, context=context): values = { 'day_open': 0.0, 'day_close': 0.0, 'working_hours_open': 0.0, 'working_hours_close': 0.0, 'days_since_creation': 0.0, 'inactivity_days': 0.0, } # if the working hours on the project are not defined, use default ones (8 -> 12 and 13 -> 17 * 5), represented by None calendar_id = None if issue.project_id and issue.project_id.resource_calendar_id: calendar_id = issue.project_id.resource_calendar_id.id dt_create_date = datetime.strptime(issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) if issue.date_open: dt_date_open = datetime.strptime( issue.date_open, DEFAULT_SERVER_DATETIME_FORMAT) values['day_open'] = (dt_date_open - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_open'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_open, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) if issue.date_closed: dt_date_closed = datetime.strptime( issue.date_closed, DEFAULT_SERVER_DATETIME_FORMAT) values['day_close'] = (dt_date_closed - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_close'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_closed, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) days_since_creation = datetime.today() - dt_create_date values['days_since_creation'] = days_since_creation.days if issue.date_action_last: inactive_days = datetime.today() - datetime.strptime( issue.date_action_last, DEFAULT_SERVER_DATETIME_FORMAT) elif issue.date_last_stage_update: inactive_days = datetime.today() - datetime.strptime( issue.date_last_stage_update, DEFAULT_SERVER_DATETIME_FORMAT) else: inactive_days = datetime.today() - datetime.strptime( issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) values['inactivity_days'] = inactive_days.days # filter only required values for field in fields: res[issue.id][field] = values[field] return res def _hours_get(self, cr, uid, ids, field_names, args, context=None): task_pool = self.pool.get('project.task') res = {} for issue in self.browse(cr, uid, ids, context=context): progress = 0.0 if issue.task_id: progress = task_pool._hours_get( cr, uid, [issue.task_id.id], field_names, args, context=context)[issue.task_id.id]['progress'] res[issue.id] = {'progress': progress} return res def on_change_project(self, cr, uid, ids, project_id, context=None): if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return { 'value': { 'partner_id': project.partner_id.id, 'email_from': project.partner_id.email } } return {} def _get_issue_task(self, cr, uid, ids, context=None): issues = [] issue_pool = self.pool.get('project.issue') for task in self.pool.get('project.task').browse(cr, uid, ids, context=context): issues += issue_pool.search(cr, uid, [('task_id', '=', task.id)]) return issues def _get_issue_work(self, cr, uid, ids, context=None): issues = [] issue_pool = self.pool.get('project.issue') for work in self.pool.get('project.task.work').browse(cr, uid, ids, context=context): if work.task_id: issues += issue_pool.search( cr, uid, [('task_id', '=', work.task_id.id)]) return issues _columns = { 'id': fields.integer('ID', readonly=True), 'name': fields.char('Issue', required=True), 'active': fields.boolean('Active', required=False), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'write_date': fields.datetime('Update Date', readonly=True), 'days_since_creation': fields.function(_compute_day, string='Days since creation date', \ multi='compute_day', type="integer", help="Difference in days between creation date and current date"), 'date_deadline': fields.date('Deadline'), 'section_id': fields.many2one('crm.case.section', 'Sales Team', \ select=True, help='Sales team to which Case belongs to.\ Define Responsible user and Email account for mail gateway.' ), 'partner_id': fields.many2one('res.partner', 'Contact', select=1), 'company_id': fields.many2one('res.company', 'Company'), 'description': fields.text('Private Note'), 'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', track_visibility='onchange', help="A Issue's kanban state indicates special situations affecting it:\n" " * Normal is the default situation\n" " * Blocked indicates something is preventing the progress of this issue\n" " * Ready for next stage indicates the issue is ready to be pulled to the next stage", required=False), 'email_from': fields.char('Email', size=128, help="These people will receive email.", select=1), 'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'date_open': fields.datetime('Assigned', readonly=True, select=True), # Project Issue fields 'date_closed': fields.datetime('Closed', readonly=True, select=True), 'date': fields.datetime('Date'), 'date_last_stage_update': fields.datetime('Last Stage Update', select=True), 'channel': fields.char('Channel', help="Communication channel."), 'categ_ids': fields.many2many('project.category', string='Tags'), 'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority', select=True), 'version_id': fields.many2one('project.issue.version', 'Version'), 'stage_id': fields.many2one ('project.task.type', 'Stage', track_visibility='onchange', select=True, domain="[('project_ids', '=', project_id)]", copy=False), 'project_id': fields.many2one('project.project', 'Project', track_visibility='onchange', select=True), 'duration': fields.float('Duration'), 'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]"), 'day_open': fields.function(_compute_day, string='Days to Assign', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'day_close': fields.function(_compute_day, string='Days to Close', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'user_id': fields.many2one('res.users', 'Assigned to', required=False, select=1, track_visibility='onchange'), 'working_hours_open': fields.function(_compute_day, string='Working Hours to assign the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'working_hours_close': fields.function(_compute_day, string='Working Hours to close the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'inactivity_days': fields.function(_compute_day, string='Days since last action', multi='compute_day', type="integer", help="Difference in days between last action and current date"), 'color': fields.integer('Color Index'), 'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True), 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'progress': fields.function(_hours_get, string='Progress (%)', multi='hours', group_operator="avg", help="Computed as: Time Spent / Total Time.", store = { 'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['task_id'], 10), 'project.task': (_get_issue_task, ['work_ids', 'remaining_hours', 'planned_hours', 'state', 'stage_id'], 10), 'project.task.work': (_get_issue_work, ['hours'], 10), }), } _defaults = { 'active': 1, 'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c), 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'crm.helpdesk', context=c), 'priority': '0', 'kanban_state': 'normal', 'date_last_stage_update': fields.datetime.now, 'user_id': lambda obj, cr, uid, context: uid, } _group_by_full = {'stage_id': _read_group_stage_ids} def copy(self, cr, uid, id, default=None, context=None): issue = self.read(cr, uid, [id], ['name'], context=context)[0] if not default: default = {} default = default.copy() default.update(name=_('%s (copy)') % (issue['name'])) return super(project_issue, self).copy(cr, uid, id, default=default, context=context) def create(self, cr, uid, vals, context=None): context = dict(context or {}) if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') if vals.get('user_id') and not vals.get('date_open'): vals['date_open'] = fields.datetime.now() if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, None, vals.get('stage_id'), context=context)['value']) # context: no_log, because subtype already handle this create_context = dict(context, mail_create_nolog=True) return super(project_issue, self).create(cr, uid, vals, context=create_context) def write(self, cr, uid, ids, vals, context=None): # stage change: update date_last_stage_update if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, ids, vals.get('stage_id'), context=context)['value']) vals['date_last_stage_update'] = fields.datetime.now() if 'kanban_state' not in vals: vals['kanban_state'] = 'normal' # user_id change: update date_open if vals.get('user_id') and 'date_open' not in vals: vals['date_open'] = fields.datetime.now() return super(project_issue, self).write(cr, uid, ids, vals, context) def onchange_task_id(self, cr, uid, ids, task_id, context=None): if not task_id: return {'value': {}} task = self.pool.get('project.task').browse(cr, uid, task_id, context=context) return { 'value': { 'user_id': task.user_id.id, } } def onchange_partner_id(self, cr, uid, ids, partner_id, context=None): """ This function returns value of partner email address based on partner :param part: Partner's id """ result = {} if partner_id: partner = self.pool['res.partner'].browse(cr, uid, partner_id, context) result['email_from'] = partner.email return {'value': result} def get_empty_list_help(self, cr, uid, help, context=None): context = dict(context or {}) context['empty_list_help_model'] = 'project.project' context['empty_list_help_id'] = context.get('default_project_id') context['empty_list_help_document_name'] = _("issues") return super(project_issue, self).get_empty_list_help(cr, uid, help, context=context) # ------------------------------------------------------- # Stage management # ------------------------------------------------------- def onchange_stage_id(self, cr, uid, ids, stage_id, context=None): if not stage_id: return {'value': {}} stage = self.pool['project.task.type'].browse(cr, uid, stage_id, context=context) if stage.fold: return {'value': {'date_closed': fields.datetime.now()}} return {'value': {'date_closed': False}} def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None): """ Override of the base.stage method Parameter of the stage search taken from the issue: - type: stage type must be the same or 'both' - section_id: if set, stages must belong to this section or be a default case """ if isinstance(cases, int): cases = self.browse(cr, uid, cases, context=context) # collect all section_ids section_ids = [] if section_id: section_ids.append(section_id) for task in cases: if task.project_id: section_ids.append(task.project_id.id) # OR all section_ids and OR with case_default search_domain = [] if section_ids: search_domain += [('|')] * (len(section_ids) - 1) for section_id in section_ids: search_domain.append(('project_ids', '=', section_id)) search_domain += list(domain) # perform search, return the first found stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context) if stage_ids: return stage_ids[0] return False def case_escalate( self, cr, uid, ids, context=None): # FIXME rename this method to issue_escalate for issue in self.browse(cr, uid, ids, context=context): data = {} esc_proj = issue.project_id.project_escalation_id if not esc_proj: raise osv.except_osv( _('Warning!'), _('You cannot escalate this issue.\nThe relevant Project has not configured the Escalation Project!' )) data['project_id'] = esc_proj.id if esc_proj.user_id: data['user_id'] = esc_proj.user_id.id issue.write(data) if issue.task_id: issue.task_id.write({ 'project_id': esc_proj.id, 'user_id': False }) return True # ------------------------------------------------------- # Mail gateway # ------------------------------------------------------- def message_get_reply_to(self, cr, uid, ids, context=None): """ Override to get the reply_to of the parent project. """ issues = self.browse(cr, SUPERUSER_ID, ids, context=context) project_ids = set( [issue.project_id.id for issue in issues if issue.project_id]) aliases = self.pool['project.project'].message_get_reply_to( cr, uid, list(project_ids), context=context) return dict( (issue.id, aliases.get(issue.project_id and issue.project_id.id or 0, False)) for issue in issues) def message_get_suggested_recipients(self, cr, uid, ids, context=None): recipients = super(project_issue, self).message_get_suggested_recipients( cr, uid, ids, context=context) try: for issue in self.browse(cr, uid, ids, context=context): if issue.partner_id: self._message_add_suggested_recipient( cr, uid, recipients, issue, partner=issue.partner_id, reason=_('Customer')) elif issue.email_from: self._message_add_suggested_recipient( cr, uid, recipients, issue, email=issue.email_from, reason=_('Customer Email')) except ( osv.except_osv, orm.except_orm ): # no read access rights -> just ignore suggested recipients because this imply modifying followers pass return recipients def message_new(self, cr, uid, msg, custom_values=None, context=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ if custom_values is None: custom_values = {} context = dict(context or {}, state_to='draft') defaults = { 'name': msg.get('subject') or _("No Subject"), 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), 'user_id': False, } defaults.update(custom_values) res_id = super(project_issue, self).message_new(cr, uid, msg, custom_values=defaults, context=context) return res_id @api.cr_uid_ids_context def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification', subtype=None, parent_id=False, attachments=None, context=None, content_subtype='html', **kwargs): """ Overrides mail_thread message_post so that we can set the date of last action field when a new message is posted on the issue. """ if context is None: context = {} res = super(project_issue, self).message_post(cr, uid, thread_id, body=body, subject=subject, type=type, subtype=subtype, parent_id=parent_id, attachments=attachments, context=context, content_subtype=content_subtype, **kwargs) if thread_id and subtype: self.write(cr, SUPERUSER_ID, thread_id, {'date_action_last': fields.datetime.now()}, context=context) return res
}, help="Adds the Total Weight of all the packages in the Packages Table.", ), 'tot_del_order_weight': fields.function(_total_ord_weight_net, method=True, readonly=True, string='Total Order Weight', store=False, help="Adds the Total Weight of all the packages in the Packages Table."), 'packages_ids': fields.one2many("stock.packages", 'pick_id', 'Packages Table'), 'ship_state': fields.selection([ ('draft', 'Draft'), ('in_process', 'In Process'), ('ready_pick', 'Ready for Pickup'), ('shipped', 'Shipped'), ('delivered', 'Delivered'), ('void', 'Void'), ('hold', 'Hold'), ('cancelled', 'Cancelled') ], 'Shipping Status', readonly=True, help='The current status of the shipment'), 'trade_mark': fields.text('Trademarks AREA'), 'ship_message': fields.text('Message'), 'address_validate': fields.selection([ ('validate', 'Validate'), ('nonvalidate', 'No Validation') ], 'Address Validation', help=''' No Validation = No address validation. Validate = Fail on failed address validation. Defaults to validate. Note: Full address validation is not performed. Therefore, it is the responsibility of the Shipping Tool User to ensure the address entered is correct to avoid an address correction fee.'''), 'ship_description': fields.text('Description'), 'ship_from': fields.boolean('Ship From', help='Required if pickup location is different from the shipper\'s address..'), 'ship_from_tax_id_no': fields.char('Identification Number', size=30 , select=1), 'shipcharge': fields.float('Shipping Cost', readonly=True), 'ship_from_address': fields.many2one('res.partner', 'Ship From Address', size=30), # 'address': fields.many2one('res.partner', 'Ship From Address'),
class container_booking(osv.Model): _inherit = "container.booking" _columns = { 'approval_reason': fields.text('Approval Reason'), 'state': fields.selection([('cancel', 'Cancelled'), ('draft', 'Draft'), ('need_approval', 'Need Approval'), ('booked', 'Booked'), ('instructed', 'Instructed')], 'Status') } _defaults = { "state": 'draft', } def action_booked(self, cr, uid, ids, context=None): """ * Never inherit this method (do not do super) * """ wf_service = netsvc.LocalService("workflow") container_id = self.browse(cr, uid, ids[0], context=context) picking_line = [picking.id for picking in container_id.picking_ids] picking_pool = self.pool.get('stock.picking') credit_limit, limit_c = picking_pool.check_credit_limit( cr, uid, picking_line, context=context) overdue_limit, limit_o = picking_pool.check_overdue_limit( cr, uid, picking_line, context=context) lc_advance_check = picking_pool.lc_advance_check(cr, uid, picking_line, context=context) # print "sales_contract===================",credit_limit,overdue_limit,lc_advance_check reason = "" if not credit_limit or not overdue_limit or not lc_advance_check: if not credit_limit: reason += "The amount of total outstanding receivable + current delivery is %s,\nbut the partner credit limit is %s\n" % ( limit_c, container_id.picking_ids[0].partner_id.credit_limit) if not overdue_limit: reason += "The amount of total overdue receivable + current delivery is %s,\nbut the partner overdue credit limit is %s\n" % ( limit_o, container_id.picking_ids[0].partner_id. credit_overdue_limit) if not lc_advance_check: reason += "LC(s) or Advance(s) have not been paid yet, please check the payment(s)\n" return self.write( cr, uid, ids[0], { 'state': 'need_approval', 'approval_reason': reason, 'need_approval': True }) for picking_id in container_id.picking_ids: wf_service.trg_validate(uid, 'stock.picking', picking_id.id, 'booked', cr) # print "sales_contract2===================" return self.write(cr, uid, ids[0], { 'state': 'booked', 'approved_by': uid }) def action_booked_manager(self, cr, uid, ids, context=None): wf_service = netsvc.LocalService("workflow") container_id = self.browse(cr, uid, ids[0], context=context) for picking_id in container_id.picking_ids: wf_service.trg_validate(uid, 'stock.picking', picking_id.id, 'booked', cr) return self.write(cr, uid, ids, { 'state': 'booked', 'approved_by': uid })
if sending: _logger.debug('ServerActionFailed success') except Exception, e: _logger.error("ServerActionFailed failed- exception [%s]" % (e)) _columns = { 'checklist_id': fields.many2one('checklist', 'Checklist', select=True), 'checklist_task_id': fields.many2one('checklist.task', 'Checklist Task', select=True), 'exception_type': fields.selection([ ('field', 'Field'), ('action', 'Action'), ], 'Type', select=True), 'res_id': fields.integer('Resource'), 'action_id': fields.many2one('ir.actions.server', 'Action', select=True), 'field_id': fields.many2one('checklist.task.field', 'Field', select=True), 'exception': fields.text('Exception'), 'stack': fields.text('Stack Trace'), 'create_date': fields.datetime('Creation Date'), } native_orm_init = orm.Model.__init__ native_orm_create = orm.Model.create native_orm_write = orm.Model.write native_orm_fields_view_get = orm.BaseModel.fields_view_get def __init__object_and_checklist(self, pool, cr): """Override __init__ method to update checklist cache""" result = native_orm_init(self, pool, cr) checklist_pool = self.pool.get('checklist')
class kg_itemview_ledger(osv.osv): _name = "kg.itemview.ledger" _description = "Item Ledger" _order = "crt_date desc" _columns = { ## Basic Info 'name': fields.text('Name', select=True), 'notes': fields.text('Note', select=True), 'state': fields.selection([('load','Load'),('draft', 'Draft'),('confirm','Waiting For Approval'),('approved','Approved'),('cancel','Cancel')], 'Status', track_visibility='onchange',states={'draft':[('readonly',False)]}), ## Module Requirement Info 'location_id': fields.many2one('stock.location', 'Location Name', domain="[('location_type','=','main'),('active','=',True),('state','in',('approved','draft'))]"), 'from_date':fields.date('From Date'), 'to_date':fields.date('To Date'), 'product_id': fields.many2one('product.product', 'Product Name', domain="[('state','=','approved')]"), 'uom_id': fields.many2one('product.uom', 'UOM', readonly=True), ## Child Tables Declaration 'line_ids' : one2many_sorted.one2many_sorted( 'ch.itemview.ledger.line', 'header_id', 'Detail view', order='sno asc', ), ## Entry Info 'company_id': fields.many2one('res.company', 'Company Name',readonly=True), 'crt_date': fields.datetime('Created Date',readonly=True), 'user_id': fields.many2one('res.users', 'Created By', readonly=True), } _defaults = { 'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'kg.itemview.ledger', context=c), 'user_id': lambda obj, cr, uid, context: uid, 'crt_date': lambda * a: time.strftime('%Y-%m-%d'), 'state': 'load', 'name': '/', 'notes': '<table border="0"><tr><td><canvas id="myCanvas" width="20" height="20" style="border:0px solid white;"></canvas><script>var c = document.getElementById("myCanvas");var ctx = c.getContext("2d");ctx.fillStyle = "pink";ctx.fillRect(0, 0, 20, 20);</script></td> <td> <b>Opening</b></td><td> <canvas id="myCanvas1" width="20" height="20" style="border:0px solid white;"></canvas><script>var c = document.getElementById("myCanvas1");var ctx = c.getContext("2d");ctx.fillStyle = "green";ctx.fillRect(0, 0, 20, 20);</script></td><td> <b>Inward</b></td><td> <canvas id="myCanvas2" width="20" height="20" style="border:0px solid white;"></canvas><script>var c = document.getElementById("myCanvas2");var ctx = c.getContext("2d");ctx.fillStyle = "red";ctx.fillRect(0, 0, 20, 20);</script></td><td> <b>Issue</b></td><td> <canvas id="myCanvas6" width="20" height="20" style="border:0px solid white;"></canvas><script>var c = document.getElementById("myCanvas6");var ctx = c.getContext("2d");ctx.fillStyle = "blue";ctx.fillRect(0, 0, 20, 20);</script></td><td> <b>Transfer</b></td></tr></table>', } def onchange_entry_load(self,cr,uid,ids,location_id,product_id,from_date,to_date,crt_date,context=None): if from_date: frm_dt = from_date else: date_str = '2016-03-31' formatter_string = "%Y-%m-%d" frm_dt = datetime.strptime('2016-03-31', "%Y-%m-%d" ).date() if to_date: to_dt = to_date else: to_dt = crt_date if not product_id: raise osv.except_osv(_('Warning!'), _('Please Choose Product!')) if location_id: cr.execute("""select COALESCE(sum(count),0.00) as count from ( select count from (select count(*) from stock_move where location_dest_id = %s and product_id = %s and date::date between '%s'::date and '%s'::date union all select count(*) from stock_move where location_id = %s and move_type!='cons' and product_id = %s and date::date between '%s'::date and '%s'::date) as sample ) as sam"""%(location_id,product_id,frm_dt,to_dt,location_id,product_id,frm_dt,to_dt)) res_data = cr.fetchall(); if res_data[0][0] <= 0: if ids: cr.execute('delete from ch_itemview_ledger_line where header_id = %s'%(ids[0])) cr.execute('delete from ch_itemview_ledger_details where header_id in (select id from ch_itemview_ledger_line where header_id = %s) '%(ids[0])) else: line_ids = [] else: pass return {'value':{'line_ids':[]}} def entry_load(self,cr,uid,ids,context=None): rec = self.browse(cr,uid,ids[0]) sql = """delete from ch_itemview_ledger_line where header_id= %s"""%(ids[0]) cr.execute(sql) if rec.from_date: frm_dt = rec.from_date else: frm_dt = datetime.strptime('2016-03-31', "%Y-%m-%d" ).date() if rec.to_date: to_dt = rec.to_date else: to_dt = rec.crt_date if rec.location_id.id: cr.execute("""select COALESCE(sum(count),0.00) as count from ( select count from (select count(*) from stock_move where location_dest_id = %s and product_id = %s and date::date between '%s'::date and '%s'::date union all select count(*) from stock_move where location_id = %s and move_type!='cons' and product_id = %s and date::date between '%s'::date and '%s'::date) as sample ) as sam"""%(rec.location_id.id,rec.product_id.id,frm_dt,to_dt,rec.location_id.id,rec.product_id.id,frm_dt,to_dt)) res_data = cr.fetchall(); if res_data[0][0] <= 0: raise osv.except_osv(_('Warning !'),_('No Record Found !!')) stock_rec = self.pool.get('stock.location').browse(cr,uid,rec.location_id.id) if stock_rec: if rec.product_id.id and stock_rec.location_type == 'counter' : sub_sql = """select fn_itemview_ledger_counter(%s,%s,%s,'%s'::date,'%s'::date)"""%(ids[0],rec.product_id.id,rec.location_id.id,frm_dt,to_dt) cr.execute(sub_sql) data = cr.fetchall(); if rec.product_id.id and stock_rec.location_type == 'main' : main_sql = """select fn_itemview_ledger_main_store(%s,%s,%s,'%s'::date,'%s'::date)"""%(ids[0],rec.product_id.id,rec.location_id.id,frm_dt,to_dt) cr.execute(main_sql) data = cr.fetchall(); return True def onchange_product_id(self,cr,uid,ids,product_id,uom_id,context=None): value = {'uom_id':'','location_id':''} if product_id: pro_rec = self.pool.get('product.product').browse(cr, uid, product_id, context=context) value = {'uom_id': pro_rec.uom_id.id,'location_id': ''} else: value = {'uom_id': '','location_id': ''} return {'value': value} def _check_date(self, cr, uid, ids, context=None): for so in self.browse(cr,uid,ids): cr.execute("""SELECT CURRENT_DATE;""") data = cr.fetchall(); if not (so.from_date <= data[0][0] and so.to_date <= data[0][0]): raise osv.except_osv(_('Warning !'),_('From/To Date should be less than or equal to current date !!')) return False if so.from_date > so.to_date: raise osv.except_osv(_('Warning !'),_('From Date should be less than or equal to To Date !!')) return False return True _constraints = [ (_check_date,'From/To Date Validation !',['order_line']), ]
import datetime class manuscrito(orm.Model): _name = 'res.manuscrito' _description = 'Manuscrito' _columns = { 'autor': fields.many2one('res.partner', 'Autor', track_visibility='onchange',required=True, select=True),domain="[('author','=',True)]"), 'partner_id': fields.many2one('res.partner', 'Partner', ondelete='set null', track_visibility='onchange', select=True, help="Linked partner (optional). Usually created when converting the lead."), 'titulo': fields.char('Título', size=50, required=True), 'isbn':fields.char('ISBN', size=30, required=True), 'formato':fields.char('Formato', size=30), 'genero':fields.selection([('ciencia-ficcion','Ciencia-Ficcion'),('novela','Novela'),('poesia','Poesía'),('cuento','Cuento'),('historia','Historia'),('miedo','Miedo'),('otro','Otros')],'Género', required=True), 'email':fields.char('E-MAIL',size=20), 'comment': fields.text('Descripción'), 'image': fields.binary("Image", help="Select image here"), 'date': fields.date('Date', select=1), 'idioma':fields.selection([('cas','Castellano'),('en','Inglés'),('fr','Francés')],'Idioma'), 'state': fields.selection([('recibo', 'Acuse recibo'),('eval', 'Evaluación'),('confirmacion','Pendiente confirmación'),('cancelled', 'Cancelado'),('firma', 'Firma Contrato'),('corregir', 'Corrección'),('reenvio', 'Visto bueno autor'),('envio imprenta', 'Enviado a imprenta'), ('done', 'Hecho')]), } def set_recibo(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'eval'}, context=context) def set_evaluar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'confirmacion'}, context=context) def set_aceptar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'firma'}, context=context) def set_firmar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'corregir'}, context=context) def set_corregir(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'reenvio'}, context=context)
class crane_equipment(osv.osv): _name = 'crane.equipment' _description = 'Equipment' def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images( obj.image, avoid_resize_medium=True) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) def onchange_type(self, cr, uid, ids, type_id, feature_lines): equipment_type = self.pool.get('crane.equipment.type').browse( cr, uid, type_id) new_feature_lines = [[2, line[1], line[2]] for line in feature_lines if line[0]] for line in equipment_type.specification_ids: new_feature_lines.append( [0, 0, { 'name': line.name, 'sequence': line.sequence, }]) return { 'value': { 'feature_line_ids': new_feature_lines, } } _columns = { 'name': fields.char('Equipment Name', size=64, required=True, translate=True), 'equipment_type_id': fields.many2one('crane.equipment.type', 'Equipment Type', required=True, ondelete='restrict'), 'customer_id': fields.many2one('res.partner', 'Customer', required=True, ondelete='restrict'), 'certificate': fields.char('Certificate Number', size=64), 'image': fields.binary("Image", help="This field holds the image used as image for the asset, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'crane.equipment': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the asset. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'crane.equipment': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of the asset. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'feature_line_ids': fields.one2many('crane.equipment.feature.line', 'equipment_id', 'Feature List'), 'notes': fields.text('Notes'), } def create(self, cr, uid, vals, context=None): if vals.get('certificate', '/') == '/': vals['certificate'] = self.pool.get('ir.sequence').get( cr, uid, 'crane.equipment') or '/' return super(crane_equipment, self).create(cr, uid, vals, context=context)
'cc_order_amt':fields.float('Order Amt'), ======= 'cc_order_amt':fields.float('Order Amt',required=True), >>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d 'cc_number':fields.char('Credit Card Number', size=256), 'cc_v':fields.char('Card Code Verification', size=3), 'cc_e_d_month':fields.char('Expiration Date MM', size=32), 'cc_e_d_year':fields.char('Expiration Date YY', size=32), 'cc_comment':fields.char('Comment', size=128,), 'cc_auth_code':fields.char('Authorization Code', size=32), 'cc_save_card_details':fields.boolean('Save Credit Card details'), 'cc_ecommerce_sale':fields.boolean('Ecommerce sale'), 'cc_p_authorize':fields.boolean('Pre-authorize'), 'cc_charge':fields.boolean('Charge'), 'cc_info_hide':fields.boolean('Credit Info Hide'), 'cc_status':fields.text('Status Message'), 'cc_details_autofill':fields.boolean('Credit Card Details Auto Fill'), 'cc_reseller':fields.boolean('Reseller'), 'rel_sale_order_id':fields.many2one('sale.order', 'Related Sale Order'), 'cc_trans_id':fields.char('Transaction ID', size=128), 'cc_bank':fields.many2one('res.bank', 'Bank'), 'cc_details':fields.many2one('res.partner.bank', 'Bank'), 'cc_length':fields.integer('CC Length'), 'cc_transaction':fields.boolean('Transaction Done'), 'key':fields.char('Encryption Key', size=1024, help="The Key used to Encrypt the Credit Card Number"), 'cc_refund_amt':fields.float('Refund Amt'), 'is_charged': fields.boolean('CreditCard Charged'), 'trans_history_ids': fields.one2many('transaction.details', 'voucher_id', 'Transaction History') } _defaults = {
class crane_work_order(osv.osv): _name = 'crane.work.order' _description = 'Work Order' _inherit = ['mail.thread'] STATE_SELECTION = [('draft', 'Draft'), ('in_process', 'In Process'), ('done', 'Done')] def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images( obj.image, avoid_resize_medium=True) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) def onchange_customer(self, cr, uid, ids, customer): customer = self.pool.get('res.partner').browse(cr, uid, customer) return { 'value': { 'location': customer.contact_address, } } _columns = { 'name': fields.char('Work Order', size=64), 'state': fields.selection(STATE_SELECTION, 'Status'), 'customer_id': fields.many2one('res.partner', 'Customer', domain="[('is_company','=',1)]", ondelete='restrict'), 'location': fields.related('customer_id', 'contact_address', string="Location", type="char", readonly=True), 'date': fields.date('Date'), 'origin': fields.char('Source Document', size=64), 'po_number': fields.char('PO Number', size=64), 'reviewed': fields.boolean('Reviewed'), 'customer_person_id': fields.many2one('res.partner', 'Reviewed by Customer', domain="[('is_company','=',0)]", ondelete='restrict'), 'image': fields.binary("Image", help="This field holds the image used as image for the sign, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'crane.work.order': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the sign. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'crane.work.order': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of the sign. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'task_ids': fields.one2many('crane.task', 'wo_id', 'Task'), 'task_copy_ids': fields.one2many('crane.task', 'wo_id', 'Task'), 'description': fields.text('Description'), 'company_id': fields.many2one('res.company','Company',required=True), } _defaults = { 'date': fields.date.context_today, 'state': 'draft', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'crane.work.order', context=c), } def confirm_order(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'in_process'}) task = self.pool.get('crane.task') task_ids = task.search(cr, uid, [('wo_id', 'in', ids)]) task.write(cr, uid, task_ids, {'state': 'in_progress'}) return True def create(self, cr, uid, vals, context=None): if vals.get('name', '/') == '/': vals['name'] = self.pool.get('ir.sequence').get( cr, uid, 'crane.work.order') or '/' return super(crane_work_order, self).create(cr, uid, vals, context=context) def send_email(self, cr, uid, ids, context=None): ir_model_data = self.pool.get('ir.model.data') try: template_id = ir_model_data.get_object_reference( cr, uid, 'crane', 'email_template_work_order')[1] except ValueError: template_id = False try: compose_form_id = ir_model_data.get_object_reference( cr, uid, 'crane', 'crane_email_compose_message_wizard_form')[1] except ValueError: compose_form_id = False ctx = dict(context) ctx.update({ 'default_model': 'crane.work.order', 'default_res_id': ids[0], 'default_use_template': bool(template_id), 'default_template_id': template_id, 'default_composition_mode': 'comment', }) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form_id, 'form')], 'view_id': compose_form_id, 'target': 'new', 'context': ctx, }
def _save_file(self, path, b64_file): """Save a file encoded in base 64""" self._check_filestore(path) with open(path, 'w') as ofile: ofile.write(base64.b64decode(b64_file)) return True def _set_image(self, cr, uid, id, name, value, arg, context=None): image = self.browse(cr, uid, id, context=context) full_path = self._image_path(cr, uid, image, context=context) if full_path: return self._save_file(full_path, value) return self.write(cr, uid, id, {'file_db_store' : value}, context=context) _columns = { 'name':fields.char('Image Title', size=100, required=True), 'extention': fields.char('file extention', size=6), 'link':fields.boolean('Link?', help="Images can be linked from files on your file system or remote (Preferred)"), 'file_db_store':fields.binary('Image stored in database'), 'file':fields.function(_get_image, fnct_inv=_set_image, type="binary", filters='*.png,*.jpg,*.gif'), 'url':fields.char('File Location', size=250), 'comments':fields.text('Comments'), 'product_id':fields.many2one('product.product', 'Product') } _defaults = { 'link': lambda *a: False, } _sql_constraints = [('uniq_name_product_id', 'UNIQUE(product_id, name)', _('A product can have only one image with the same name'))]
class crane_task(osv.osv): _name = 'crane.task' _description = 'Task' STATE_SELECTION = [('new', 'New'), ('in_progress', 'In Progress'), ('done', 'Done')] TASK_TYPE_SELECTION = [('ins', 'Inspection'), ('serv', 'Service')] def _get_total(self, cr, uid, ids, name, arg, context=None): res = {} for task in self.browse(cr, uid, ids, context=context): total = 0.0 for line in task.labor_line_ids: total += line.duration res[task.id] = total return res def _get_subtype(self, cr, uid, ids, name, arg, context=None): res = {} for task in self.browse(cr, uid, ids, context=context): res[task.id] = len([ insp for insp in task.inspection_line_ids if insp.header == True ]) return res _columns = { 'name': fields.char('Task', size=64), 'state': fields.selection(STATE_SELECTION, 'Status'), 'type': fields.selection(TASK_TYPE_SELECTION, 'Type', required=True), 'wo_id': fields.many2one('crane.work.order', 'Work Order', ondelete='cascade'), 'equipment_id': fields.many2one('crane.equipment', 'Equipment', select=True, required=True, ondelete='restrict'), 'equipment_type': fields.related('equipment_id', 'equipment_type_id', type="many2one", relation="crane.equipment.type", string="Equipment Type", readonly=True), 'certificate': fields.related('equipment_id', 'certificate', type="char", string="Equipment Certificate Number", readonly=True), 'completion_date': fields.date('Completion Date'), 'result': fields.selection(INSPECTION_RESULT_SELECTION, 'Inspection Result'), 'total_labor': fields.function(_get_total, method=True, string='Total Labor h:m'), 'subtype': fields.function(_get_subtype, method=True, string='Subtype'), 'equipment_feature': fields.related('equipment_id', 'feature_line_ids', type="one2many", relation="crane.equipment.feature.line", string="Equipment Features", readonly=True), 'inspection_line_ids': fields.one2many('crane.task.inspection.line', 'task_id', 'Inspection Points'), 'labor_line_ids': fields.one2many('crane.task.labor.line', 'task_id', 'Labor'), 'part_line_ids': fields.one2many('crane.task.part.line', 'task_id', 'Parts'), 'description': fields.text('Description'), 'notes': fields.text('Notes'), 'approve_uid': fields.many2one('res.users', 'Approved by'), } _defaults = { 'state': 'new', 'completion_date': fields.date.context_today, } def onchange_equipment(self, cr, uid, ids, type, equipment_id, inspection_lines): v = {} new_inspection_lines = [[2, line[1], line[2]] for line in inspection_lines if line[0]] if equipment_id: equipment = self.pool.get('crane.equipment').browse( cr, uid, equipment_id) v['certificate'] = equipment.certificate new_feature_lines = [] for line in equipment.feature_line_ids: new_feature_lines.append( [0, 0, { 'name': line.name, 'value': line.value, }]) v['equipment_feature'] = new_feature_lines equipment_type = equipment.equipment_type_id v['equipment_type'] = equipment_type.id if type == 'ins': for line in equipment_type.inspection_ids: new_inspection_lines.append([ 0, 0, { 'name': line.name, 'header': line.header, 'sequence': line.sequence, 'point_type_id': line.point_type_id.id, } ]) v['inspection_line_ids'] = new_inspection_lines return {'value': v} def onchange_labor(self, cr, uid, ids, labor): total = 0 for labor in self.resolve_2many_commands(cr, uid, 'labor_line_ids', labor, context=None): total += labor.get('duration') return { 'value': { 'total_labor': total, } } def done_task(self, cr, uid, ids, context=None): for task in self.browse(cr, uid, ids, context=context): result = 'safe' if task.type == 'ins': safe = True for line in task.inspection_line_ids: if not line.header: if not line.point_value_id: raise osv.except_osv( _('Error!'), _('Before work Done, You must fill all Inspection Points!' )) else: if safe and line.point_value_id.result == 'not_safe': safe = False if not safe: result = 'not_safe' self.write( cr, uid, task.id, { 'state': 'done', 'completion_date': time.strftime('%Y-%m-%d'), 'result': result, 'approve_uid': uid }) wos_ids = set() for order in self.read(cr, uid, ids, ['wo_id'], context=context): wos_ids.add(order['wo_id'][0]) wo_ids = [] for id in wos_ids: wo_ids.append(id) wo = self.pool.get('crane.work.order') done_wo_ids = [] for order in wo.browse(cr, uid, wo_ids, context=context): done = True for task in order.task_ids: if task.state != 'done': done = False break if done: done_wo_ids.append(order.id) wo.write(cr, uid, done_wo_ids, { 'state': 'done', 'date': time.strftime('%Y-%m-%d') }) return True def create(self, cr, uid, vals, context=None): if vals.get('name', '/') == '/': vals['name'] = self.pool.get('ir.sequence').get( cr, uid, 'crane.task') or '/' return super(crane_task, self).create(cr, uid, vals, context=context)
class account_analytic_account(osv.osv): _name = 'account.analytic.account' _inherit = ['mail.thread'] _description = 'Analytic Account' _track = { 'state': { 'analytic.mt_account_pending': lambda self, cr, uid, obj, ctx=None: obj.state == 'pending', 'analytic.mt_account_closed': lambda self, cr, uid, obj, ctx=None: obj.state == 'close', 'analytic.mt_account_opened': lambda self, cr, uid, obj, ctx=None: obj.state == 'open', }, } def _compute_level_tree(self, cr, uid, ids, child_ids, res, field_names, context=None): currency_obj = self.pool.get('res.currency') recres = {} def recursive_computation(account): result2 = res[account.id].copy() for son in account.child_ids: result = recursive_computation(son) for field in field_names: if (account.currency_id.id != son.currency_id.id) and (field != 'quantity'): result[field] = currency_obj.compute( cr, uid, son.currency_id.id, account.currency_id.id, result[field], context=context) result2[field] += result[field] return result2 for account in self.browse(cr, uid, ids, context=context): if account.id not in child_ids: continue recres[account.id] = recursive_computation(account) return recres def _debit_credit_bal_qtty(self, cr, uid, ids, fields, arg, context=None): res = {} if context is None: context = {} child_ids = tuple( self.search(cr, uid, [('parent_id', 'child_of', ids)])) for i in child_ids: res[i] = {} for n in fields: res[i][n] = 0.0 if not child_ids: return res where_date = '' where_clause_args = [tuple(child_ids)] if context.get('from_date', False): where_date += " AND l.date >= %s" where_clause_args += [context['from_date']] if context.get('to_date', False): where_date += " AND l.date <= %s" where_clause_args += [context['to_date']] cr.execute( """ SELECT a.id, sum( CASE WHEN l.amount > 0 THEN l.amount ELSE 0.0 END ) as debit, sum( CASE WHEN l.amount < 0 THEN -l.amount ELSE 0.0 END ) as credit, COALESCE(SUM(l.amount),0) AS balance, COALESCE(SUM(l.unit_amount),0) AS quantity FROM account_analytic_account a LEFT JOIN account_analytic_line l ON (a.id = l.account_id) WHERE a.id IN %s """ + where_date + """ GROUP BY a.id""", where_clause_args) for row in cr.dictfetchall(): res[row['id']] = {} for field in fields: res[row['id']][field] = row[field] return self._compute_level_tree(cr, uid, ids, child_ids, res, fields, context) def name_get(self, cr, uid, ids, context=None): res = [] if not ids: return res if isinstance(ids, (int, long)): ids = [ids] for id in ids: elmt = self.browse(cr, uid, id, context=context) res.append((id, self._get_one_full_name(elmt))) return res def _get_full_name(self, cr, uid, ids, name=None, args=None, context=None): if context == None: context = {} res = {} for elmt in self.browse(cr, uid, ids, context=context): res[elmt.id] = self._get_one_full_name(elmt) return res def _get_one_full_name(self, elmt, level=6): if level <= 0: return '...' if elmt.parent_id and not elmt.type == 'template': parent_path = self._get_one_full_name(elmt.parent_id, level - 1) + " / " else: parent_path = '' return parent_path + elmt.name def _child_compute(self, cr, uid, ids, name, arg, context=None): result = {} if context is None: context = {} for account in self.browse(cr, uid, ids, context=context): result[account.id] = map(lambda x: x.id, [ child for child in account.child_ids if child.state != 'template' ]) return result def _get_analytic_account(self, cr, uid, ids, context=None): company_obj = self.pool.get('res.company') analytic_obj = self.pool.get('account.analytic.account') accounts = [] for company in company_obj.browse(cr, uid, ids, context=context): accounts += analytic_obj.search(cr, uid, [('company_id', '=', company.id)]) return accounts def _set_company_currency(self, cr, uid, ids, name, value, arg, context=None): if isinstance(ids, (int, long)): ids = [ids] for account in self.browse(cr, uid, ids, context=context): if account.company_id: if account.company_id.currency_id.id != value: raise osv.except_osv( _('Error!'), _("If you set a company, the currency selected has to be the same as it's currency. \nYou can remove the company belonging, and thus change the currency, only on analytic account of type 'view'. This can be really useful for consolidation purposes of several companies charts with different currencies, for example." )) if value: return cr.execute( """update account_analytic_account set currency_id=%s where id=%s""", ( value, account.id, )) def _currency(self, cr, uid, ids, field_name, arg, context=None): result = {} for rec in self.browse(cr, uid, ids, context=context): if rec.company_id: result[rec.id] = rec.company_id.currency_id.id else: result[rec.id] = rec.currency_id.id return result _columns = { 'name': fields.char('Account/Contract Name', size=128, required=True, track_visibility='onchange'), 'complete_name': fields.function(_get_full_name, type='char', string='Full Name'), 'code': fields.char('Reference', select=True, track_visibility='onchange'), 'type': fields.selection([('view','Analytic View'), ('normal','Analytic Account'),('contract','Contract or Project'),('template','Template of Contract')], 'Type of Account', required=True, help="If you select the View Type, it means you won\'t allow to create journal entries using that account.\n"\ "The type 'Analytic account' stands for usual accounts that you only want to use in accounting.\n"\ "If you select Contract or Project, it offers you the possibility to manage the validity and the invoicing options for this account.\n"\ "The special type 'Template of Contract' allows you to define a template with default data that you can reuse easily."), 'template_id': fields.many2one('account.analytic.account', 'Template of Contract'), 'description': fields.text('Description'), 'parent_id': fields.many2one('account.analytic.account', 'Parent Analytic Account', select=2), 'child_ids': fields.one2many('account.analytic.account', 'parent_id', 'Child Accounts'), 'child_complete_ids': fields.function(_child_compute, relation='account.analytic.account', string="Account Hierarchy", type='many2many'), 'line_ids': fields.one2many('account.analytic.line', 'account_id', 'Analytic Entries'), 'balance': fields.function(_debit_credit_bal_qtty, type='float', string='Balance', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'debit': fields.function(_debit_credit_bal_qtty, type='float', string='Debit', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'credit': fields.function(_debit_credit_bal_qtty, type='float', string='Credit', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'quantity': fields.function(_debit_credit_bal_qtty, type='float', string='Quantity', multi='debit_credit_bal_qtty'), 'quantity_max': fields.float('Prepaid Service Units', help='Sets the higher limit of time to work on the contract, based on the timesheet. (for instance, number of hours in a limited support contract.)'), 'partner_id': fields.many2one('res.partner', 'Customer'), 'user_id': fields.many2one('res.users', 'Project Manager', track_visibility='onchange'), 'manager_id': fields.many2one('res.users', 'Account Manager', track_visibility='onchange'), 'date_start': fields.date('Start Date'), 'date': fields.date('End Date', select=True, track_visibility='onchange'), 'company_id': fields.many2one('res.company', 'Company', required=False), #not required because we want to allow different companies to use the same chart of account, except for leaf accounts. 'state': fields.selection([('template', 'Template'),('draft','New'),('open','In Progress'),('pending','To Renew'),('close','Closed'),('cancelled', 'Cancelled')], 'Status', required=True, track_visibility='onchange'), 'currency_id': fields.function(_currency, fnct_inv=_set_company_currency, #the currency_id field is readonly except if it's a view account and if there is no company store = { 'res.company': (_get_analytic_account, ['currency_id'], 10), }, string='Currency', type='many2one', relation='res.currency'), } def on_change_template(self, cr, uid, ids, template_id, context=None): if not template_id: return {} res = {'value': {}} template = self.browse(cr, uid, template_id, context=context) if template.date_start and template.date: from_dt = datetime.strptime(template.date_start, tools.DEFAULT_SERVER_DATE_FORMAT) to_dt = datetime.strptime(template.date, tools.DEFAULT_SERVER_DATE_FORMAT) timedelta = to_dt - from_dt res['value']['date'] = datetime.strftime( datetime.now() + timedelta, tools.DEFAULT_SERVER_DATE_FORMAT) res['value']['date_start'] = fields.date.today() res['value']['quantity_max'] = template.quantity_max res['value'][ 'parent_id'] = template.parent_id and template.parent_id.id or False res['value']['description'] = template.description return res def on_change_partner_id(self, cr, uid, ids, partner_id, name, context={}): res = {} if partner_id: partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context) if partner.user_id: res['manager_id'] = partner.user_id.id if not name: res['name'] = _('Contract: ') + partner.name return {'value': res} def _default_company(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) if user.company_id: return user.company_id.id return self.pool.get('res.company').search( cr, uid, [('parent_id', '=', False)])[0] def _get_default_currency(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return user.company_id.currency_id.id _defaults = { 'type': 'normal', 'company_id': _default_company, 'code': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get( cr, uid, 'account.analytic.account'), 'state': 'open', 'user_id': lambda self, cr, uid, ctx: uid, 'partner_id': lambda self, cr, uid, ctx: ctx.get('partner_id', False), 'date_start': lambda *a: time.strftime('%Y-%m-%d'), 'currency_id': _get_default_currency, } def check_recursion(self, cr, uid, ids, context=None, parent=None): return super(account_analytic_account, self)._check_recursion(cr, uid, ids, context=context, parent=parent) _order = 'name asc' _constraints = [ (check_recursion, 'Error! You cannot create recursive analytic accounts.', ['parent_id']), ] def name_create(self, cr, uid, name, context=None): raise osv.except_osv(_('Warning'), _("Quick account creation disallowed.")) def copy(self, cr, uid, id, default=None, context=None): if not default: default = {} analytic = self.browse(cr, uid, id, context=context) default.update(code=False, line_ids=[], name=_("%s (copy)") % (analytic['name'])) return super(account_analytic_account, self).copy(cr, uid, id, default, context=context) def on_change_company(self, cr, uid, id, company_id): if not company_id: return {} currency = self.pool.get('res.company').read( cr, uid, [company_id], ['currency_id'])[0]['currency_id'] return {'value': {'currency_id': currency}} def on_change_parent(self, cr, uid, id, parent_id): if not parent_id: return {} parent = self.read(cr, uid, [parent_id], ['partner_id', 'code'])[0] if parent['partner_id']: partner = parent['partner_id'][0] else: partner = False res = {'value': {}} if partner: res['value']['partner_id'] = partner return res def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): if not args: args = [] if context is None: context = {} if context.get('current_model') == 'project.project': project_obj = self.pool.get("account.analytic.account") project_ids = project_obj.search(cr, uid, args) return self.name_get(cr, uid, project_ids, context=context) if name: account_ids = self.search(cr, uid, [('code', '=', name)] + args, limit=limit, context=context) if not account_ids: dom = [] for name2 in name.split('/'): name = name2.strip() account_ids = self.search(cr, uid, dom + [('name', 'ilike', name)] + args, limit=limit, context=context) if not account_ids: break dom = [('parent_id', 'in', account_ids)] else: account_ids = self.search(cr, uid, args, limit=limit, context=context) return self.name_get(cr, uid, account_ids, context=context)
class patent_ihce(osv.Model): _name = 'patent.ihce' def _get_consultoria(self, cr, uid, ids, field, arg, context=None): res = {} patente = False lista_ids = "" for row in self.browse(cr, uid, ids, context=context): for ro in row.courses_ids: lista_ids = str(ro.id) + "," if lista_ids: lista_ids = lista_ids[:-1] cr.execute( "SELECT courses_id FROM date_courses WHERE id IN (" + str(lista_ids) + ") AND state = 'done' AND type = 'consultoria' GROUP BY courses_id;" ) cours = cr.fetchall() for ro in cours: data = self.pool.get('courses.ihce').browse( cr, uid, ro[0], context=context) if data.state_cours == True: patente = True break res[row.id] = patente return res _columns = { 'name': fields.char("Registro", size=200), 'description': fields.char("Descripción", size=250), 'date': fields.date("Fecha de registro"), 'company_id': fields.many2one('companies.ihce', 'Beneficiario'), 'letter_company': fields.boolean("Carta de parte de Empresa"), 'letter_company_note': fields.char("Notas"), 'send_impi': fields.boolean("Enviar al IMPI"), 'send_impi_note': fields.char("Notas"), 'mail_company': fields.boolean("Reenviar correo a empresa"), 'mail_company_note': fields.char("Notas"), 'request_patent': fields.boolean("Solicitud de patente"), 'request_patent_note': fields.char("Notas"), 'ingress_impi': fields.boolean("Ingreso al IMPI"), 'ingress_impi_note': fields.char("Notas"), 'notes': fields.text("Observación General"), 'state': fields.selection([ ('draft', 'Nuevo'), ('process', 'Proceso'), ('out_time', 'Fuera de tiempo'), ('espera', 'En espera'), ('done', 'Por aprobar/Rechazar'), ('approved', 'Aprobado'), ('not_approved', 'Rechazado'), ('abandoned', 'Abandonado'), ], 'Estado', select=True), 'cron_id': fields.many2one('ir.cron', "Tarea en proceso"), 'task': fields.selection([ ('1', 'Carta de parte de Empresa'), ('2', 'Enviar al IMPI'), ('3', 'Reenviar correo a empresa'), ('4', 'Solicitud de patente'), ('5', 'Ingreso al IMPI'), ('6', 'Por aprobar/rechazar'), ], 'Etapa', select=True), 'time_task': fields.integer("Tiempo"), 'date_bool': fields.date("Fecha de tarea"), 'percent': fields.integer("Porcentaje"), 'consultoria': fields.function(_get_consultoria, type='boolean', string="Consultoría"), 'servicio': fields.boolean("Servicio"), 'type_patent': fields.many2one('type.patent', 'Tipo de patente'), 'impi_send': fields.selection([ ('aceptado', 'Carta aceptada en el IMPI'), ('rechazado', 'Carta rechazada en el IMPI'), ], 'Status IMPI', select=True), 'date_next_task': fields.date("Fecha de próxima etapa"), 'crm_id': fields.many2one('crm.project.ihce', "Proyecto crm"), 'task_id': fields.integer("Tarea crm"), 'courses_ids': fields.one2many('date.courses', 'patent_id', "Relacion cursos"), 'user_id': fields.many2one( 'res.users', "Responsable", help="Es el usuario al que se le contarán los indicadores."), 'option': fields.selection([('ihce', 'IHCE'), ('emprered', 'Emprered')], 'Oficina'), 'area': fields.many2one('responsible.area', "Departamento"), 'emprered': fields.many2one('emprereds', 'Emprered'), 'change_user': fields.boolean("Cambiar Usuario"), } _defaults = { 'name': 'P', 'date': lambda *a: time.strftime('%Y-%m-%d'), 'state': 'draft', 'time_task': 0, 'percent': 0, 'consultoria': False, 'servicio': False, 'user_id': lambda obj, cr, uid, context: uid, 'option': lambda self, cr, uid, obj, ctx=None: self.pool['res.users'].browse( cr, uid, uid).option, 'area': lambda self, cr, uid, obj, ctx=None: self.pool['res.users'].browse( cr, uid, uid).area.id, 'emprered': lambda self, cr, uid, obj, ctx=None: self.pool['res.users'].browse( cr, uid, uid).emprered.id, 'change_user': False, } _order = "date_next_task asc" def create(self, cr, uid, vals, context=None): # Genera referencia de registro (nombre) if vals.get('name', 'P') == 'P': new_seq = self.pool.get('ir.sequence').get(cr, uid, 'patent.ihce') vals.update({'name': new_seq}) return super(patent_ihce, self).create(cr, uid, vals, context) def write(self, cr, uid, ids, vals, context=None): return super(patent_ihce, self).write(cr, uid, ids, vals, context=context) def crm_tareas(self, cr, uid, ids, context=None): row = self.browse(cr, uid, ids[0], context=context) fecha_actual = datetime.now() time_row = self.time_development(cr, uid, ids, context=context) if row.task == False: tarea = "Carta de empresa" dias = time_row.letter_company fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '1': tarea = "Enviar al IMPI" dias = time_row.send_impi fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '2': tarea = "Reenviar correo a empresa" dias = time_row.mail_company fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '3': tarea = "Solicitud de patente" dias = time_row.request_patent fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '4': tarea = "Ingreso al IMPI" dias = time_row.ingress_impi fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '5': tarea = "Por aprobar/rechazar" fecha_siguiente = fecha_actual if row.task_id != 0: self.pool.get('crm.task').terminar(cr, uid, [row.task_id], context=context) datos = { 'name': tarea, 'date_compromise': fecha_siguiente, 'user': uid, 'type_task': 'automatico', 'crm_id': row.crm_id.id, } task_id = self.pool.get('crm.task').create(cr, uid, datos, context=context) self.pool.get('crm.task').comenzar(cr, uid, [task_id], context=context) return task_id def approved(self, cr, uid, ids, context=None): fecha_actual = datetime.now() row = self.browse(cr, uid, ids[0], context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': row.company_id.id, 'date': fecha_actual, 'name': 'El servicio de Patente ha sido aprobado', 'user': uid, 'date_compromise': fecha_actual, 'state': 'done' }, context=context) self.pool.get('ir.cron').write(cr, uid, [row.cron_id.id], {'active': False}) self.pool.get('ir.cron').unlink(cr, uid, [row.cron_id.id]) self.write(cr, uid, row.id, { 'state': 'approved', 'task': False }, context=context) #~ Si el proyecto es aprobado, finalizamos el proyecto en el crm, solo si todos sus tareas estan finalizadas ban = True crm_data = self.pool.get('crm.project.ihce').browse(cr, uid, row.crm_id.id, context=context) for task in crm_data.task_ids: crm_task = self.pool.get('crm.task').browse(cr, uid, task.id, context=context) if crm_task.state != 'd-done' or crm_task.state != 'f-cancel': ban = False break if ban: self.pool.get('crm.project.ihce').write(cr, uid, [row.crm_id.id], {'state': 'd-done'}) def not_approved(self, cr, uid, ids, context=None): fecha_actual = datetime.now() row = self.browse(cr, uid, ids[0], context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': row.company_id.id, 'date': fecha_actual, 'name': 'El servicio de Patente ha sido rechazado', 'user': uid, 'date_compromise': fecha_actual, 'state': 'done' }, context=context) self.pool.get('ir.cron').write(cr, uid, [row.cron_id.id], {'active': False}) self.pool.get('ir.cron').unlink(cr, uid, [row.cron_id.id]) self.write(cr, uid, row.id, { 'state': 'not_approved', 'task': False }, context=context) def _check_homework(self, cr, uid, ids, context=None): row = self.browse(cr, uid, ids[0], context=context) fecha_creacion = datetime.strptime(row.date_bool, "%Y-%m-%d").date() fecha_ejecucion = datetime.now().date() fecha = fecha_ejecucion - fecha_creacion dias = fecha.days #~ titulo = "Aviso CRM" if row.state == 'process': if row.task == '1': if dias >= row.time_task and not row.letter_company: self.write(cr, uid, row.id, {'state': 'out_time'}, context=context) elif row.task == '2': if dias >= row.time_task and not row.send_impi: self.write(cr, uid, row.id, {'state': 'out_time'}, context=context) elif row.task == '3': if dias >= row.time and not row.mail_company: self.write(cr, uid, row.id, {'state': 'out_time'}, context=context) elif row.task == '4': if dias >= row.time_task and not row.request_patent: self.write(cr, uid, row.id, {'state': 'out_time'}, context=context) else: if row.task == '5': if dias >= row.time_task and not row.ingress_impi: self.write(cr, uid, row.id, {'state': 'out_time'}, context=context) return True def time_development(self, cr, uid, ids, context=None): #~ Obtenemos id de tiempos time_ids = self.pool.get('time.development').search(cr, uid, []) time_row = self.pool.get('time.development').browse(cr, uid, time_ids) return time_row def start_process(self, cr, uid, ids, context=None): #~ En el momento que comienza el proceso tiene un tiempo para cumplir la primera tarea. row = self.browse(cr, uid, ids[0], context=context) time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() dias = time_row.letter_company fecha_siguiente = fecha_actual + timedelta(days=dias) res = { 'name': 'Process : ' + row.name, 'model': 'patent.ihce', 'args': repr([ids]), 'function': '_check_homework', 'priority': 5, 'interval_number': 1, 'interval_type': 'work_days', 'user_id': uid, 'numbercall': -1, 'doall': False, 'active': True } #~ Agregamos actividad al historial de la empresa fecha_crm = datetime.now() self.pool.get('crm.ihce').create( cr, uid, { 'company_id': row.company_id.id, 'date': fecha_crm, 'name': 'Se ha iniciado el servicio de Patente', 'user': uid, 'date_compromise': fecha_crm, 'state': 'done' }, context=context) #~ Agregamos proyecto al crm del usuario valores = { 'name': row.description.encode('utf-8'), 'company_id': row.company_id.id, 'state': 'a-draft', 'type_crm': 'automatico', } crm_id = self.pool.get('crm.project.ihce').create(cr, uid, valores, context=context) self.pool.get('crm.project.ihce').comenzar(cr, uid, [crm_id], context=context) id_cron = self.pool.get('ir.cron').create(cr, uid, res) self.write(cr, uid, row.id, { 'cron_id': id_cron, 'state': 'process', 'time_task': time_row.letter_company, 'date_bool': fecha_actual, 'date_next_task': fecha_siguiente, 'crm_id': crm_id }, context=context) #~ Se crea la tarea task = self.crm_tareas(cr, uid, ids, context=context) self.write(cr, uid, row.id, { 'task_id': task, 'task': '1' }, context=context) return True def re_start(self, cr, uid, ids, context=None): row = self.browse(cr, uid, ids[0], context=context) time_row = self.time_development(cr, uid, ids, context=context) time_time = 0 fecha_actual = datetime.now().date() if row.task == '1': time_time = time_row.letter_company dias = time_row.letter_company fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '2': time_time = time_row.send_impi dias = time_row.send_impi fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '3': time_time = time_row.mail_company dias = time_row.mail_company fecha_siguiente = fecha_actual + timedelta(days=dias) elif row.task == '4': time_time = time_row.request_patent dias = time_row.request_patent fecha_siguiente = fecha_actual + timedelta(days=dias) else: if row.task == '5': time_time = time_row.ingress_impi dias = time_row.ingress_impi fecha_siguiente = fecha_actual + timedelta(days=dias) if row.state == 'out_time': self.write(cr, uid, row.id, { 'state': 'process', 'time_task': time_time, 'date_bool': fecha_actual, 'date_next_task': fecha_siguiente }, context=context) #~ Reabrimos el tiempo de la tarea en el crm self.pool.get('crm.task').write(cr, uid, [row.task_id], { 'state': 'b-progress', 'date_compromise': fecha_siguiente }) return True def onchange_task2(self, cr, uid, ids, valor, company_id, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() fecha = datetime.now() dias = time_row.send_impi fecha_siguiente = fecha_actual + timedelta(days=dias) if ids: if valor == True: row = self.browse(cr, uid, ids[0], context=context) percent = row.percent + time_row.letter_company_percent task_id = self.crm_tareas(cr, uid, ids, context=context) self.write(cr, uid, ids[0], { 'task_id': task_id, 'letter_company': valor, 'task': '2', 'time_task': time_row.send_impi, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': company_id, 'date': fecha, 'name': 'Carta de parte de empresa de Patente', 'user_id': uid, 'date_compromise': fecha, 'state': 'done' }, context=context) else: if valor == False: dias = time_row.letter_company fecha_siguiente = fecha_actual + timedelta(days=dias) row = self.browse(cr, uid, ids[0], context=context) percent = row.percent - time_row.letter_company_percent self.write(cr, uid, ids[0], { 'letter_company': valor, 'task': '1', 'time_task': time_row.letter_company, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) return True def onchange_task3(self, cr, uid, ids, valor, company_id, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() fecha = datetime.now() dias = time_row.mail_company fecha_siguiente = fecha_actual + timedelta(days=dias) if ids: if valor == True: row = self.browse(cr, uid, ids[0], context=context) percent = row.percent + time_row.send_impi_percent task_id = self.crm_tareas(cr, uid, ids, context=context) self.write(cr, uid, ids[0], { 'task_id': task_id, 'send_impi': valor, 'task': '3', 'time_task': time_row.mail_company, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': company_id, 'date': fecha, 'name': 'Envio al IMPI de Patente', 'user_id': uid, 'date_compromise': fecha, 'state': 'done' }, context=context) else: if valor == False: dias = time_row.send_impi fecha_siguiente = fecha_actual + timedelta(days=dias) row = self.browse(cr, uid, ids[0], context=context) percent = row.percent - time_row.send_impi_percent self.write(cr, uid, ids[0], { 'send_impi': valor, 'task': '2', 'time_task': time_row.send_impi, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) return True def onchange_task4(self, cr, uid, ids, valor, company_id, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() fecha = datetime.now() dias = time_row.request_patent fecha_siguiente = fecha_actual + timedelta(days=dias) if ids: if valor == True: row = self.browse(cr, uid, ids[0], context=context) percent = row.percent + time_row.mail_company_percent task_id = self.crm_tareas(cr, uid, ids, context=context) self.write(cr, uid, ids[0], { 'task_id': task_id, 'mail_company': valor, 'task': '4', 'time_task': time_row.request_patent, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': company_id, 'date': fecha, 'name': 'Reenvio de correo de Patente', 'user_id': uid, 'date_compromise': fecha, 'state': 'done' }, context=context) else: if valor == False: dias = time_row.mail_company fecha_siguiente = fecha_actual + timedelta(days=dias) row = self.browse(cr, uid, ids[0], context=context) percent = row.percent - time_row.mail_company_percent self.write(cr, uid, ids[0], { 'mail_company': valor, 'task': '3', 'time_task': time_row.mail_company, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) return True def onchange_task5(self, cr, uid, ids, valor, company_id, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() fecha = datetime.now() dias = time_row.ingress_impi fecha_siguiente = fecha_actual + timedelta(days=dias) if ids: if valor: row = self.browse(cr, uid, ids[0], context=context) percent = row.percent + time_row.request_patent_percent task_id = self.crm_tareas(cr, uid, ids, context=context) self.write(cr, uid, ids[0], { 'task_id': task_id, 'request_patent': valor, 'state': 'espera', 'task': '5', 'time_task': time_row.ingress_impi, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': company_id, 'date': fecha, 'name': 'Solicitud de patente', 'user_id': uid, 'date_compromise': fecha, 'state': 'done' }, context=context) else: if valor == False: dias = time_row.request_patent fecha_siguiente = fecha_actual + timedelta(days=dias) row = self.browse(cr, uid, ids[0], context=context) percent = row.percent - time_row.request_patent_percent self.write(cr, uid, ids[0], { 'request_patent': valor, 'state': 'process', 'task': '4', 'time_task': time_row.request_patent, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) return True def onchange_task6(self, cr, uid, ids, valor, company_id, opcion, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() fecha = datetime.now() anio = datetime.now().year result = {} result['value'] = {} if ids: if valor: row = self.browse(cr, uid, ids[0], context=context) percent = row.percent + time_row.ingress_impi_percent self.pool.get('crm.task').terminar(cr, uid, [row.task_id], context=context) self.write(cr, uid, ids[0], { 'ingress_impi': valor, 'state': 'done', 'percent': percent, 'servicio': True, 'task': '6', 'date_next_task': False }, context=context) self.pool.get('ir.cron').write(cr, uid, [row.cron_id.id], {'active': False}) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': company_id, 'date': fecha, 'name': 'Ingreso al IMPI de Patente', 'user_id': uid, 'date_compromise': fecha, 'state': 'done' }, context=context) else: if valor == False: dias = time_row.ingress_impi fecha_siguiente = fecha_actual + timedelta(days=dias) row = self.browse(cr, uid, ids[0], context=context) percent = row.percent - time_row.ingress_impi_percent self.write(cr, uid, ids[0], { 'ingress_impi': valor, 'state': 'espera', 'task': '5', 'time_task': time_row.ingress_impi, 'date_bool': fecha_actual, 'percent': percent, 'date_next_task': fecha_siguiente }, context=context) return True def abandoned(self, cr, uid, ids, context=None): fecha_actual = datetime.now() row = self.browse(cr, uid, ids[0], context=context) self.write(cr, uid, row.id, {'state': 'abandoned'}, context=context) #~ Si el proyecto es abandonado, abandonamos tambien el proyecto en el crm self.pool.get('crm.project.ihce').abandonar(cr, uid, [row.crm_id.id], context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': row.company_id.id, 'date': fecha_actual, 'name': 'El servicio de FDA ha sido abandonado', 'user': uid, 'date_compromise': fecha_actual, 'state': 'done' }, context=context) self.pool.get('ir.cron').write(cr, uid, [row.cron_id.id], {'active': False}) self.pool.get('ir.cron').unlink(cr, uid, [row.cron_id.id]) def re_start_all(self, cr, uid, ids, context=None): fecha_actual = datetime.now() row = self.browse(cr, uid, ids[0], context=context) #~ Agregamos actividad al historial de la empresa self.pool.get('crm.ihce').create( cr, uid, { 'company_id': row.company_id.id, 'date': fecha_actual, 'name': 'El servicio de FDA ha sido reiniciado', 'user': uid, 'date_compromise': fecha_actual, 'state': 'done' }, context=context) for ro in row.courses_ids: self.pool.get('date.courses').write(cr, uid, [ro.id], {'patent_id': False}, context=context) self.pool.get('crm.project.ihce').abandonar(cr, uid, [row.crm_id.id], context=context) self.write( cr, uid, row.id, { 'task_id': 0, 'date': fecha_actual, 'state': 'draft', 'consultoria': False, 'servicio': False, 'letter_company': False, 'send_impi': False, 'mail_company': False, 'request_patent': False, 'ingress_impi': False, 'percent': 0, 'task': 0, 'date_next_task': False }) def return_task(self, cr, uid, ids, context=None): time_row = self.time_development(cr, uid, ids, context=context) fecha_actual = datetime.now().date() dias = time_row.letter_company fecha_siguiente = fecha_actual + timedelta(days=dias) percent = time_row.letter_company_percent self.write( cr, uid, ids[0], { 'send_impi': False, 'letter_company': False, 'percent': percent, 'task': '1', 'time_task': time_row.letter_company, 'date_bool': fecha_actual, 'date_next_task': fecha_siguiente }) def unlink(self, cr, uid, ids, context=None): data = self.read(cr, uid, ids, ['state'], context=context) unlink_ids = [] for row in data: if row['state'] in ['draft']: unlink_ids.append(row['id']) else: raise osv.except_osv(_('Acción Invalida!'), _('No puede eliminar el registro.!')) return super(patent_ihce, self).unlink(cr, uid, unlink_ids, context=context) def onchange_user(self, cr, uid, ids, user_id, context=None): result = {} result['value'] = {} if user_id: row = self.pool.get('res.users').browse(cr, uid, user_id) result['value'].update({ 'option': row.option, 'area': row.area.id, 'emprered': row.emprered.id }) return result
class kg_gate_pass_line(osv.osv): _name = "kg.gate.pass.line" _description = "Gate Pass Line" _columns = { ## Basic Info 'gate_id': fields.many2one('kg.gate.pass', 'Gate Pass', ondelete='cascade'), ## Module Requirement Fields 'product_id': fields.many2one('product.product', 'Item Name', domain=[('state', 'not in', ('reject', 'cancel'))]), 'brand_id': fields.many2one( 'kg.brand.master', 'Brand Name', domain= "[('product_ids','in',(product_id)),('state','in',('draft','confirmed','approved'))]" ), 'moc_id': fields.many2one( 'ch.brandmoc.rate.details', 'MOC', domain= "[('brand_id','=',brand_id),('header_id.product_id','=',product_id),('header_id.state','in',('draft','confirmed','approved'))]" ), 'uom': fields.many2one('product.uom', 'UOM'), 'qty': fields.float('Quantity'), 'indent_qty': fields.float('Indent Qty'), 'grn_pending_qty': fields.float('GRN pending Qty'), 'so_pending_qty': fields.float('SO pending Qty'), 'note': fields.text('Remarks'), 'si_line_id': fields.many2one('kg.service.indent.line', 'Service Indent Line'), 'group_flag': fields.boolean('Group By'), 'ser_no': fields.char('Serial No', size=128), 'so_flag': fields.boolean('SO Flag'), 'serial_no': fields.many2one('stock.production.lot', 'Serial No', domain="[('product_id','=',product_id)]"), 'mode': fields.selection([('direct', 'Direct'), ('frm_indent', 'From Indent')], 'Mode'), 'remark_id': fields.many2one('kg.rejection.master', 'Remarks'), 'entry_mode': fields.selection([('auto', 'Auto'), ('manual', 'Manual')], 'Entry Mode', readonly=True), } _defaults = { 'entry_mode': 'manual', } def onchange_uom(self, cr, uid, ids, product_id): if product_id: pro_rec = self.pool.get('product.product').browse( cr, uid, product_id) uom = pro_rec.uom_po_id.id return {'value': {'uom': uom}} else: return {'value': {'uom': False}} def onchange_pending_qty(self, cr, uid, ids, qty, grn_pending_qty, so_pending_qty): value = {'grn_pending_qty': '', 'so_pending_qty': ''} value = {'grn_pending_qty': qty, 'so_pending_qty': qty} return {'value': value} def default_get(self, cr, uid, fields, context=None): print "contextcontextcontext", context return context
class partner_vat_list(osv.osv_memory): """ Partner Vat Listing """ _name = "partner.vat.list" _columns = { 'partner_ids': fields.many2many( 'vat.listing.clients', 'vat_partner_rel', 'vat_id', 'partner_id', 'Clients', help= 'You can remove clients/partners which you do not want to show in xml file' ), 'name': fields.char('File Name', size=32), 'file_save': fields.binary('Save File', readonly=True), 'comments': fields.text('Comments'), } def _get_partners(self, cr, uid, context=None): return context.get('partner_ids', []) _defaults = { 'partner_ids': _get_partners, } def _get_datas(self, cr, uid, ids, context=None): obj_vat_lclient = self.pool.get('vat.listing.clients') datas = [] data = self.read(cr, uid, ids)[0] for partner in data['partner_ids']: if isinstance(partner, list) and partner: datas.append(partner[2]) else: client_data = obj_vat_lclient.read(cr, uid, partner, context=context) datas.append(client_data) client_datas = [] seq = 0 sum_tax = 0.00 sum_turnover = 0.00 amount_data = {} for line in datas: if not line: continue seq += 1 sum_tax += line['vat_amount'] sum_turnover += line['turnover'] vat = line['vat'].replace(' ', '').upper() amount_data = { 'seq': str(seq), 'vat': vat, 'only_vat': vat[2:], 'turnover': '%.2f' % line['turnover'], 'vat_amount': '%.2f' % line['vat_amount'], 'sum_tax': '%.2f' % sum_tax, 'sum_turnover': '%.2f' % sum_turnover, 'partner_name': line['name'], } client_datas += [amount_data] return client_datas def create_xml(self, cr, uid, ids, context=None): obj_sequence = self.pool.get('ir.sequence') obj_users = self.pool.get('res.users') obj_partner = self.pool.get('res.partner') obj_model_data = self.pool.get('ir.model.data') seq_declarantnum = obj_sequence.get(cr, uid, 'declarantnum') obj_cmpny = obj_users.browse(cr, uid, uid, context=context).company_id company_vat = obj_cmpny.partner_id.vat if not company_vat: raise osv.except_osv( _('Insufficient Data!'), _('No VAT number associated with the company.')) company_vat = company_vat.replace(' ', '').upper() SenderId = company_vat[2:] issued_by = company_vat[:2] seq_declarantnum = obj_sequence.get(cr, uid, 'declarantnum') dnum = company_vat[2:] + seq_declarantnum[-4:] street = city = country = '' addr = obj_partner.address_get(cr, uid, [obj_cmpny.partner_id.id], ['invoice']) if addr.get('invoice', False): ads = obj_partner.browse(cr, uid, [addr['invoice']], context=context)[0] phone = ads.phone and ads.phone.replace(' ', '') or '' email = ads.email or '' name = ads.name or '' city = ads.city or '' zip = obj_partner.browse(cr, uid, ads.id, context=context).zip or '' if not city: city = '' if ads.street: street = ads.street if ads.street2: street += ' ' + ads.street2 if ads.country_id: country = ads.country_id.code data = self.read(cr, uid, ids)[0] sender_date = time.strftime('%Y-%m-%d') comp_name = obj_cmpny.name if not email: raise osv.except_osv( _('Insufficient Data!'), _('No email address associated with the company.')) if not phone: raise osv.except_osv(_('Insufficient Data!'), _('No phone associated with the company.')) annual_listing_data = { 'issued_by': issued_by, 'company_vat': company_vat, 'comp_name': comp_name, 'street': street, 'zip': zip, 'city': city, 'country': country, 'email': email, 'phone': phone, 'SenderId': SenderId, 'period': context['year'], 'comments': data['comments'] or '' } data_file = """<?xml version="1.0" encoding="ISO-8859-1"?> <ns2:ClientListingConsignment xmlns="http://www.minfin.fgov.be/InputCommon" xmlns:ns2="http://www.minfin.fgov.be/ClientListingConsignment" ClientListingsNbr="1"> <ns2:Representative> <RepresentativeID identificationType="NVAT" issuedBy="%(issued_by)s">%(SenderId)s</RepresentativeID> <Name>%(comp_name)s</Name> <Street>%(street)s</Street> <PostCode>%(zip)s</PostCode> <City>%(city)s</City>""" if annual_listing_data['country']: data_file += "\n\t\t<CountryCode>%(country)s</CountryCode>" data_file += """ <EmailAddress>%(email)s</EmailAddress> <Phone>%(phone)s</Phone> </ns2:Representative>""" data_file = data_file % annual_listing_data data_comp = """ <ns2:Declarant> <VATNumber>%(SenderId)s</VATNumber> <Name>%(comp_name)s</Name> <Street>%(street)s</Street> <PostCode>%(zip)s</PostCode> <City>%(city)s</City> <CountryCode>%(country)s</CountryCode> <EmailAddress>%(email)s</EmailAddress> <Phone>%(phone)s</Phone> </ns2:Declarant> <ns2:Period>%(period)s</ns2:Period> """ % annual_listing_data # Turnover and Farmer tags are not included client_datas = self._get_datas(cr, uid, ids, context=context) if not client_datas: raise osv.except_osv(_('Data Insufficient!'), _('No data available for the client.')) data_client_info = '' for amount_data in client_datas: data_client_info += """ <ns2:Client SequenceNumber="%(seq)s"> <ns2:CompanyVATNumber issuedBy="BE">%(only_vat)s</ns2:CompanyVATNumber> <ns2:TurnOver>%(turnover)s</ns2:TurnOver> <ns2:VATAmount>%(vat_amount)s</ns2:VATAmount> </ns2:Client>""" % amount_data amount_data_begin = client_datas[-1] amount_data_begin.update({'dnum': dnum}) data_begin = """ <ns2:ClientListing SequenceNumber="1" ClientsNbr="%(seq)s" DeclarantReference="%(dnum)s" TurnOverSum="%(sum_turnover)s" VATAmountSum="%(sum_tax)s"> """ % amount_data_begin data_end = """ <ns2:Comment>%(comments)s</ns2:Comment> </ns2:ClientListing> </ns2:ClientListingConsignment> """ % annual_listing_data data_file += data_begin + data_comp + data_client_info + data_end file_save = base64.encodestring(data_file.encode('utf8')) self.write(cr, uid, ids, { 'file_save': file_save, 'name': 'vat_list.xml' }, context=context) model_data_ids = obj_model_data.search( cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'view_vat_listing_result')]) resource_id = obj_model_data.read(cr, uid, model_data_ids, fields=['res_id'])[0]['res_id'] return { 'name': _('XML File has been Created'), 'res_id': ids[0], 'view_type': 'form', 'view_mode': 'form', 'res_model': 'partner.vat.list', 'views': [(resource_id, 'form')], 'context': context, 'type': 'ir.actions.act_window', 'target': 'new', } def print_vatlist(self, cr, uid, ids, context=None): if context is None: context = {} obj_vat_lclient = self.pool.get('vat.listing.clients') datas = {'ids': []} datas['model'] = 'res.company' datas['year'] = context['year'] datas['limit_amount'] = context['limit_amount'] datas['client_datas'] = self._get_datas(cr, uid, ids, context=context) if not datas['client_datas']: raise osv.except_osv(_('Error!'), _('No record to print.')) return { 'type': 'ir.actions.report.xml', 'report_name': 'partner.vat.listing.print', 'datas': datas, }
class mail_mail(osv.Model): """ Model holding RFC2822 email messages to send. This model also provides facilities to queue and send new email messages. """ _name = 'mail.mail' _description = 'Outgoing Mails' _inherits = {'mail.message': 'mail_message_id'} _order = 'id desc' _columns = { 'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'), 'state': fields.selection([ ('outgoing', 'Outgoing'), ('sent', 'Sent'), ('received', 'Received'), ('exception', 'Delivery Failed'), ('cancel', 'Cancelled'), ], 'Status', readonly=True), 'auto_delete': fields.boolean( 'Auto Delete', help="Permanently delete this email after sending it, to save space" ), 'references': fields.text( 'References', help='Message references, such as identifiers of previous messages', readonly=1), 'email_to': fields.text('To', help='Message recipients (emails)'), 'recipient_ids': fields.many2many('res.partner', string='To (Partners)'), 'email_cc': fields.char('Cc', help='Carbon copy message recipients'), 'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"), # Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification # and during unlink() we will not cascade delete the parent and its attachments 'notification': fields.boolean( 'Is Notification', help= 'Mail has been created to notify people of an existing mail.message' ), } _defaults = { 'state': 'outgoing', } def default_get(self, cr, uid, fields, context=None): # protection for `default_type` values leaking from menu action context (e.g. for invoices) # To remove when automatic context propagation is removed in web client if context and context.get('default_type') and context.get( 'default_type' ) not in self._all_columns['type'].column.selection: context = dict(context, default_type=None) return super(mail_mail, self).default_get(cr, uid, fields, context=context) def create(self, cr, uid, values, context=None): # notification field: if not set, set if mail comes from an existing mail.message if 'notification' not in values and values.get('mail_message_id'): values['notification'] = True return super(mail_mail, self).create(cr, uid, values, context=context) def unlink(self, cr, uid, ids, context=None): # cascade-delete the parent message for all mails that are not created for a notification ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)]) parent_msg_ids = [ m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context) ] res = super(mail_mail, self).unlink(cr, uid, ids, context=context) self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context) return res def mark_outgoing(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context) def cancel(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) def process_email_queue(self, cr, uid, ids=None, context=None): """Send immediately queued messages, committing after each message is sent - this is not transactional and should not be called during another transaction! :param list ids: optional list of emails ids to send. If passed no search is performed, and these ids are used instead. :param dict context: if a 'filters' key is present in context, this value will be used as an additional filter to further restrict the outgoing messages to send (by default all 'outgoing' messages are sent). """ if context is None: context = {} if not ids: filters = [('state', '=', 'outgoing')] if 'filters' in context: filters.extend(context['filters']) ids = self.search(cr, uid, filters, context=context) res = None try: # Force auto-commit - this is meant to be called by # the scheduler, and we can't allow rolling back the status # of previously sent emails! res = self.send(cr, uid, ids, auto_commit=True, context=context) except Exception: _logger.exception("Failed processing mail queue") return res def _postprocess_sent_message(self, cr, uid, mail, context=None, mail_sent=True): """Perform any post-processing necessary after sending ``mail`` successfully, including deleting it completely along with its attachment if the ``auto_delete`` flag of the mail was set. Overridden by subclasses for extra post-processing behaviors. :param browse_record mail: the mail that was just sent :return: True """ if mail_sent and mail.auto_delete: # done with SUPERUSER_ID to avoid giving large unlink access rights self.unlink(cr, SUPERUSER_ID, [mail.id], context=context) return True #------------------------------------------------------ # mail_mail formatting, tools and send mechanism #------------------------------------------------------ def _get_partner_access_link(self, cr, uid, mail, partner=None, context=None): """Generate URLs for links in mails: partner has access (is user): link to action_mail_redirect action that will redirect to doc or Inbox """ if context is None: context = {} if partner and partner.user_ids: base_url = self.pool.get('ir.config_parameter').get_param( cr, uid, 'web.base.url') # the parameters to encode for the query and fragment part of url query = {'db': cr.dbname} fragment = { 'login': partner.user_ids[0].login, 'action': 'mail.action_mail_redirect', } if mail.notification: fragment['message_id'] = mail.mail_message_id.id elif mail.model and mail.res_id: fragment.update(model=mail.model, res_id=mail.res_id) url = urljoin( base_url, "/web?%s#%s" % (urlencode(query), urlencode(fragment))) return _( """<span class='oe_mail_footer_access'><small>about <a style='color:inherit' href="%s">%s %s</a></small></span>""" ) % (url, context.get('model_name', ''), mail.record_name) else: return None def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None): """If subject is void, set the subject as 'Re: <Resource>' or 'Re: <mail.parent_id.subject>' :param boolean force: force the subject replacement """ if (force or not mail.subject) and mail.record_name: return 'Re: %s' % (mail.record_name) elif (force or not mail.subject) and mail.parent_id and mail.parent_id.subject: return 'Re: %s' % (mail.parent_id.subject) return mail.subject def send_get_mail_body(self, cr, uid, mail, partner=None, context=None): """Return a specific ir_email body. The main purpose of this method is to be inherited to add custom content depending on some module.""" body = mail.body_html # generate footer link = self._get_partner_access_link(cr, uid, mail, partner, context=context) if link: body = tools.append_content_to_html(body, link, plaintext=False, container_tag='div') return body def send_get_mail_to(self, cr, uid, mail, partner=None, context=None): """Forge the email_to with the following heuristic: - if 'partner' and mail is a notification on a document: followers (Followers of 'Doc' <email>) - elif 'partner', no notificatoin or no doc: recipient specific (Partner Name <email>) - else fallback on mail.email_to splitting """ if partner and mail.notification and mail.record_name: sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name) email_to = [ _('"Followers of %s" <%s>') % (sanitized_record_name, partner.email) ] elif partner: email_to = ['%s <%s>' % (partner.name, partner.email)] else: email_to = tools.email_split(mail.email_to) return email_to def send_get_email_dict(self, cr, uid, mail, partner=None, context=None): """Return a dictionary for specific email values, depending on a partner, or generic to the whole recipients given by mail.email_to. :param browse_record mail: mail.mail browse_record :param browse_record partner: specific recipient partner """ body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context) body_alternative = tools.html2plaintext(body) return { 'body': body, 'body_alternative': body_alternative, 'subject': self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context), 'email_to': self.send_get_mail_to(cr, uid, mail, partner=partner, context=context), } def send(self, cr, uid, ids, auto_commit=False, raise_exception=False, context=None): """ Sends the selected emails immediately, ignoring their current state (mails that have already been sent should not be passed unless they should actually be re-sent). Emails successfully delivered are marked as 'sent', and those that fail to be deliver are marked as 'exception', and the corresponding error mail is output in the server logs. :param bool auto_commit: whether to force a commit of the mail status after sending each mail (meant only for scheduler processing); should never be True during normal transactions (default: False) :param bool raise_exception: whether to raise an exception if the email sending process has failed :return: True """ if context is None: context = {} ir_mail_server = self.pool.get('ir.mail_server') for mail in self.browse(cr, SUPERUSER_ID, ids, context=context): try: # TDE note: remove me when model_id field is present on mail.message - done here to avoid doing it multiple times in the sub method if mail.model: model_id = self.pool['ir.model'].search( cr, SUPERUSER_ID, [('model', '=', mail.model)], context=context)[0] model = self.pool['ir.model'].browse(cr, SUPERUSER_ID, model_id, context=context) else: model = None if model: context['model_name'] = model.name # handle attachments attachments = [] for attach in mail.attachment_ids: attachments.append( (attach.datas_fname, base64.b64decode(attach.datas))) # specific behavior to customize the send email for notified partners email_list = [] if mail.email_to: email_list.append( self.send_get_email_dict(cr, uid, mail, context=context)) for partner in mail.recipient_ids: email_list.append( self.send_get_email_dict(cr, uid, mail, partner=partner, context=context)) # headers headers = {} bounce_alias = self.pool['ir.config_parameter'].get_param( cr, uid, "mail.bounce.alias", context=context) catchall_domain = self.pool['ir.config_parameter'].get_param( cr, uid, "mail.catchall.domain", context=context) if bounce_alias and catchall_domain: if mail.model and mail.res_id: headers['Return-Path'] = '%s-%d-%s-%d@%s' % ( bounce_alias, mail.id, mail.model, mail.res_id, catchall_domain) else: headers['Return-Path'] = '%s-%d@%s' % ( bounce_alias, mail.id, catchall_domain) # build an RFC2822 email.message.Message object and send it without queuing res = None for email in email_list: msg = ir_mail_server.build_email( email_from=mail.email_from, email_to=email.get('email_to'), subject=email.get('subject'), body=email.get('body'), body_alternative=email.get('body_alternative'), email_cc=tools.email_split(mail.email_cc), reply_to=mail.reply_to, attachments=attachments, message_id=mail.message_id, references=mail.references, object_id=mail.res_id and ('%s-%s' % (mail.res_id, mail.model)), subtype='html', subtype_alternative='plain', headers=headers) res = ir_mail_server.send_email( cr, uid, msg, mail_server_id=mail.mail_server_id.id, context=context) if res: mail.write({'state': 'sent', 'message_id': res}) mail_sent = True else: mail.write({'state': 'exception'}) mail_sent = False # /!\ can't use mail.state here, as mail.refresh() will cause an error # see revid:[email protected] in 6.1 self._postprocess_sent_message(cr, uid, mail, context=context, mail_sent=mail_sent) except Exception as e: _logger.exception('failed sending mail.mail %s', mail.id) mail.write({'state': 'exception'}) self._postprocess_sent_message(cr, uid, mail, context=context, mail_sent=False) if raise_exception: if isinstance(e, AssertionError): # get the args of the original error, wrap into a value and throw a MailDeliveryException # that is an except_orm, with name and value as arguments value = '. '.join(e.args) raise MailDeliveryException(_("Mail Delivery Failed"), value) raise if auto_commit is True: cr.commit() return True
class hr_expense_expense(osv.osv): def copy(self, cr, uid, id, default=None, context=None): if context is None: context = {} if not default: default = {} default.update({ 'voucher_id': False, 'date_confirm': False, 'date_valid': False, 'user_valid': False }) return super(hr_expense_expense, self).copy(cr, uid, id, default, context=context) def _amount(self, cr, uid, ids, field_name, arg, context=None): res = {} for expense in self.browse(cr, uid, ids, context=context): total = 0.0 for line in expense.line_ids: total += line.unit_amount * line.unit_quantity res[expense.id] = total return res def _get_currency(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, [uid], context=context)[0] if user.company_id: return user.company_id.currency_id.id else: return self.pool.get('res.currency').search(cr, uid, [('rate', '=', 1.0)], context=context)[0] _name = "hr.expense.expense" _inherit = ['mail.thread'] _description = "Expense" _order = "id desc" _track = { 'state': { 'hr_expense.mt_expense_approved': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'accepted', 'hr_expense.mt_expense_refused': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'cancelled', 'hr_expense.mt_expense_confirmed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'confirm', }, } _columns = { 'name': fields.char('Description', size=128, required=True, readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'id': fields.integer('Sheet ID', readonly=True), 'date': fields.date('Date', select=True, readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'journal_id': fields.many2one('account.journal', 'Force Journal', help="The journal used when the expense is done."), 'employee_id': fields.many2one('hr.employee', "Employee", required=True, readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'user_id': fields.many2one('res.users', 'User', required=True), 'date_confirm': fields.date( 'Confirmation Date', select=True, help= "Date of the confirmation of the sheet expense. It's filled when the button Confirm is pressed." ), 'date_valid': fields.date( 'Validation Date', select=True, help= "Date of the acceptation of the sheet expense. It's filled when the button Accept is pressed." ), 'user_valid': fields.many2one('res.users', 'Validation By', readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'account_move_id': fields.many2one('account.move', 'Ledger Posting'), 'line_ids': fields.one2many('hr.expense.line', 'expense_id', 'Expense Lines', readonly=True, states={'draft': [('readonly', False)]}), 'note': fields.text('Note'), 'amount': fields.function(_amount, string='Total Amount', digits_compute=dp.get_precision('Account')), 'voucher_id': fields.many2one('account.voucher', "Employee's Receipt"), 'currency_id': fields.many2one('res.currency', 'Currency', required=True, readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'department_id': fields.many2one('hr.department', 'Department', readonly=True, states={ 'draft': [('readonly', False)], 'confirm': [('readonly', False)] }), 'company_id': fields.many2one('res.company', 'Company', required=True), 'state': fields.selection( [ ('draft', 'New'), ('cancelled', 'Refused'), ('confirm', 'Waiting Approval'), ('accepted', 'Approved'), ('done', 'Done'), ], 'Status', readonly=True, track_visibility='onchange', help= 'When the expense request is created the status is \'Draft\'.\n It is confirmed by the user and request is sent to admin, the status is \'Waiting Confirmation\'.\ \nIf the admin accepts it, the status is \'Accepted\'.\n If a receipt is made for the expense request, the status is \'Done\'.' ), } _defaults = { 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'hr.employee', context=c), 'date': fields.date.context_today, 'state': 'draft', 'employee_id': _employee_get, 'user_id': lambda cr, uid, id, c={}: id, 'currency_id': _get_currency, } def unlink(self, cr, uid, ids, context=None): for rec in self.browse(cr, uid, ids, context=context): if rec.state != 'draft': raise osv.except_osv(_('Warning!'), _('You can only delete draft expenses!')) return super(hr_expense_expense, self).unlink(cr, uid, ids, context) def onchange_currency_id(self, cr, uid, ids, currency_id=False, company_id=False, context=None): res = {'value': {'journal_id': False}} journal_ids = self.pool.get('account.journal').search( cr, uid, [('type', '=', 'purchase'), ('currency', '=', currency_id), ('company_id', '=', company_id)], context=context) if journal_ids: res['value']['journal_id'] = journal_ids[0] return res def onchange_employee_id(self, cr, uid, ids, employee_id, context=None): emp_obj = self.pool.get('hr.employee') department_id = False company_id = False if employee_id: employee = emp_obj.browse(cr, uid, employee_id, context=context) department_id = employee.department_id.id company_id = employee.company_id.id return { 'value': { 'department_id': department_id, 'company_id': company_id } } def expense_confirm(self, cr, uid, ids, context=None): for expense in self.browse(cr, uid, ids): if expense.employee_id and expense.employee_id.parent_id.user_id: self.message_subscribe_users( cr, uid, [expense.id], user_ids=[expense.employee_id.parent_id.user_id.id]) return self.write(cr, uid, ids, { 'state': 'confirm', 'date_confirm': time.strftime('%Y-%m-%d') }, context=context) def expense_accept(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, { 'state': 'accepted', 'date_valid': time.strftime('%Y-%m-%d'), 'user_valid': uid }, context=context) def expense_canceled(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'cancelled'}, context=context) def account_move_get(self, cr, uid, expense_id, context=None): ''' This method prepare the creation of the account move related to the given expense. :param expense_id: Id of voucher for which we are creating account_move. :return: mapping between fieldname and value of account move to create :rtype: dict ''' journal_obj = self.pool.get('account.journal') expense = self.browse(cr, uid, expense_id, context=context) company_id = expense.company_id.id date = expense.date_confirm ref = expense.name journal_id = False if expense.journal_id: journal_id = expense.journal_id.id else: journal_id = journal_obj.search(cr, uid, [('type', '=', 'purchase'), ('company_id', '=', company_id)]) if not journal_id: raise osv.except_osv( _('Error!'), _("No expense journal found. Please make sure you have a journal with type 'purchase' configured." )) journal_id = journal_id[0] return self.pool.get('account.move').account_move_prepare( cr, uid, journal_id, date=date, ref=ref, company_id=company_id, context=context) def line_get_convert(self, cr, uid, x, part, date, context=None): partner_id = self.pool.get('res.partner')._find_accounting_partner( part).id return { 'date_maturity': x.get('date_maturity', False), 'partner_id': partner_id, 'name': x['name'][:64], 'date': date, 'debit': x['price'] > 0 and x['price'], 'credit': x['price'] < 0 and -x['price'], 'account_id': x['account_id'], 'analytic_lines': x.get('analytic_lines', False), 'amount_currency': x['price'] > 0 and abs(x.get('amount_currency', False)) or -abs(x.get('amount_currency', False)), 'currency_id': x.get('currency_id', False), 'tax_code_id': x.get('tax_code_id', False), 'tax_amount': x.get('tax_amount', False), 'ref': x.get('ref', False), 'quantity': x.get('quantity', 1.00), 'product_id': x.get('product_id', False), 'product_uom_id': x.get('uos_id', False), 'analytic_account_id': x.get('account_analytic_id', False), } def compute_expense_totals(self, cr, uid, exp, company_currency, ref, account_move_lines, context=None): ''' internal method used for computation of total amount of an expense in the company currency and in the expense currency, given the account_move_lines that will be created. It also do some small transformations at these account_move_lines (for multi-currency purposes) :param account_move_lines: list of dict :rtype: tuple of 3 elements (a, b ,c) a: total in company currency b: total in hr.expense currency c: account_move_lines potentially modified ''' cur_obj = self.pool.get('res.currency') if context is None: context = {} context.update({'date': exp.date_confirm or time.strftime('%Y-%m-%d')}) total = 0.0 total_currency = 0.0 for i in account_move_lines: if exp.currency_id.id != company_currency: i['currency_id'] = exp.currency_id.id i['amount_currency'] = i['price'] i['price'] = cur_obj.compute(cr, uid, exp.currency_id.id, company_currency, i['price'], context=context) else: i['amount_currency'] = False i['currency_id'] = False total -= i['price'] total_currency -= i['amount_currency'] or i['price'] return total, total_currency, account_move_lines def action_receipt_create(self, cr, uid, ids, context=None): ''' main function that is called when trying to create the accounting entries related to an expense ''' move_obj = self.pool.get('account.move') for exp in self.browse(cr, uid, ids, context=context): if not exp.employee_id.address_home_id: raise osv.except_osv( _('Error!'), _('The employee must have a home address.')) if not exp.employee_id.address_home_id.property_account_payable.id: raise osv.except_osv( _('Error!'), _('The employee must have a payable account set on his home address.' )) company_currency = exp.company_id.currency_id.id diff_currency_p = exp.currency_id.id <> company_currency #create the move that will contain the accounting entries move_id = move_obj.create(cr, uid, self.account_move_get(cr, uid, exp.id, context=context), context=context) #one account.move.line per expense line (+taxes..) eml = self.move_line_get(cr, uid, exp.id, context=context) #create one more move line, a counterline for the total on payable account total, total_currency, eml = self.compute_expense_totals( cr, uid, exp, company_currency, exp.name, eml, context=context) acc = exp.employee_id.address_home_id.property_account_payable.id eml.append({ 'type': 'dest', 'name': '/', 'price': total, 'account_id': acc, 'date_maturity': exp.date_confirm, 'amount_currency': diff_currency_p and total_currency or False, 'currency_id': diff_currency_p and exp.currency_id.id or False, 'ref': exp.name }) #convert eml into an osv-valid format lines = map( lambda x: (0, 0, self.line_get_convert(cr, uid, x, exp.employee_id.address_home_id, exp.date_confirm, context=context)), eml) move_obj.write(cr, uid, [move_id], {'line_id': lines}, context=context) self.write(cr, uid, ids, { 'account_move_id': move_id, 'state': 'done' }, context=context) return True def move_line_get(self, cr, uid, expense_id, context=None): res = [] tax_obj = self.pool.get('account.tax') cur_obj = self.pool.get('res.currency') if context is None: context = {} exp = self.browse(cr, uid, expense_id, context=context) company_currency = exp.company_id.currency_id.id for line in exp.line_ids: mres = self.move_line_get_item(cr, uid, line, context) if not mres: continue res.append(mres) tax_code_found = False #Calculate tax according to default tax on product taxes = [] #Taken from product_id_onchange in account.invoice if line.product_id: fposition_id = False fpos_obj = self.pool.get('account.fiscal.position') fpos = fposition_id and fpos_obj.browse( cr, uid, fposition_id, context=context) or False product = line.product_id taxes = product.supplier_taxes_id #If taxes are not related to the product, maybe they are in the account if not taxes: a = product.property_account_expense.id #Why is not there a check here? if not a: a = product.categ_id.property_account_expense_categ.id a = fpos_obj.map_account(cr, uid, fpos, a) taxes = a and self.pool.get('account.account').browse( cr, uid, a, context=context).tax_ids or False tax_id = fpos_obj.map_tax(cr, uid, fpos, taxes) if not taxes: continue #Calculating tax on the line and creating move? for tax in tax_obj.compute_all(cr, uid, taxes, line.unit_amount, line.unit_quantity, line.product_id, exp.user_id.partner_id)['taxes']: tax_code_id = tax['base_code_id'] tax_amount = line.total_amount * tax['base_sign'] if tax_code_found: if not tax_code_id: continue res.append(self.move_line_get_item(cr, uid, line, context)) res[-1]['price'] = 0.0 res[-1]['account_analytic_id'] = False elif not tax_code_id: continue tax_code_found = True res[-1]['tax_code_id'] = tax_code_id res[-1]['tax_amount'] = cur_obj.compute( cr, uid, exp.currency_id.id, company_currency, tax_amount, context={'date': exp.date_confirm}) #Will create the tax here as we don't have the access assoc_tax = { 'type': 'tax', 'name': tax['name'], 'price_unit': tax['price_unit'], 'quantity': 1, 'price': tax['amount'] * tax['base_sign'] or 0.0, 'account_id': tax['account_collected_id'], 'tax_code_id': tax['tax_code_id'], 'tax_amount': tax['amount'] * tax['base_sign'], } res.append(assoc_tax) return res def move_line_get_item(self, cr, uid, line, context=None): company = line.expense_id.company_id property_obj = self.pool.get('ir.property') if line.product_id: acc = line.product_id.property_account_expense if not acc: acc = line.product_id.categ_id.property_account_expense_categ if not acc: raise osv.except_osv( _('Error!'), _('No purchase account found for the product %s (or for his category), please configure one.' ) % (line.product_id.name)) else: acc = property_obj.get(cr, uid, 'property_account_expense_categ', 'product.category', context={'force_company': company.id}) if not acc: raise osv.except_osv( _('Error!'), _('Please configure Default Expense account for Product purchase: `property_account_expense_categ`.' )) return { 'type': 'src', 'name': line.name.split('\n')[0][:64], 'price_unit': line.unit_amount, 'quantity': line.unit_quantity, 'price': line.total_amount, 'account_id': acc.id, 'product_id': line.product_id.id, 'uos_id': line.uom_id.id, 'account_analytic_id': line.analytic_account.id, } def action_view_receipt(self, cr, uid, ids, context=None): ''' This function returns an action that display existing account.move of given expense ids. ''' assert len( ids ) == 1, 'This option should only be used for a single id at a time' expense = self.browse(cr, uid, ids[0], context=context) assert expense.account_move_id try: dummy, view_id = self.pool.get( 'ir.model.data').get_object_reference(cr, uid, 'account', 'view_move_form') except ValueError, e: view_id = False result = { 'name': _('Expense Account Move'), 'view_type': 'form', 'view_mode': 'form', 'view_id': view_id, 'res_model': 'account.move', 'type': 'ir.actions.act_window', 'nodestroy': True, 'target': 'current', 'res_id': expense.account_move_id.id, } return result
class res_partner(osv.osv): def fields_view_get(self, cr, uid, view_id=None, view_type=None, context=None, toolbar=False, submenu=False): res = super(res_partner, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) context = context or {} if view_type == 'form' and context.get('Followupfirst'): doc = etree.XML(res['arch'], parser=None, base_url=None) first_node = doc.xpath("//page[@name='followup_tab']") root = first_node[0].getparent() root.insert(0, first_node[0]) res['arch'] = etree.tostring(doc, encoding="utf-8") return res def _get_latest(self, cr, uid, ids, names, arg, context=None, company_id=None): res = {} if company_id == None: company = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id else: company = self.pool.get('res.company').browse(cr, uid, company_id, context=context) for partner in self.browse(cr, uid, ids, context=context): amls = partner.unreconciled_aml_ids latest_date = False latest_level = False latest_days = False latest_level_without_lit = False latest_days_without_lit = False for aml in amls: if (aml.company_id == company) and (aml.followup_line_id != False) and ( not latest_days or latest_days < aml.followup_line_id.delay): latest_days = aml.followup_line_id.delay latest_level = aml.followup_line_id.id if (aml.company_id == company) and (not latest_date or latest_date < aml.followup_date): latest_date = aml.followup_date if (aml.company_id == company) and (aml.blocked == False) and ( aml.followup_line_id != False and (not latest_days_without_lit or latest_days_without_lit < aml.followup_line_id.delay)): latest_days_without_lit = aml.followup_line_id.delay latest_level_without_lit = aml.followup_line_id.id res[partner.id] = { 'latest_followup_date': latest_date, 'latest_followup_level_id': latest_level, 'latest_followup_level_id_without_lit': latest_level_without_lit } return res def do_partner_manual_action(self, cr, uid, partner_ids, context=None): #partner_ids -> res.partner for partner in self.browse(cr, uid, partner_ids, context=context): #Check action: check if the action was not empty, if not add action_text = "" if partner.payment_next_action: action_text = (partner.payment_next_action or '') + "\n" + ( partner.latest_followup_level_id_without_lit. manual_action_note or '') else: action_text = partner.latest_followup_level_id_without_lit.manual_action_note or '' #Check date: only change when it did not exist already action_date = partner.payment_next_action_date or fields.date.context_today( self, cr, uid, context=context) # Check responsible: if partner has not got a responsible already, take from follow-up responsible_id = False if partner.payment_responsible_id: responsible_id = partner.payment_responsible_id.id else: p = partner.latest_followup_level_id_without_lit.manual_action_responsible_id responsible_id = p and p.id or False self.write( cr, uid, [partner.id], { 'payment_next_action_date': action_date, 'payment_next_action': action_text, 'payment_responsible_id': responsible_id }) def do_partner_print(self, cr, uid, wizard_partner_ids, data, context=None): #wizard_partner_ids are ids from special view, not from res.partner if not wizard_partner_ids: return {} data['partner_ids'] = wizard_partner_ids datas = { 'ids': wizard_partner_ids, 'model': 'account_followup.followup', 'form': data } return self.pool['report'].get_action( cr, uid, wizard_partner_ids, 'account_followup.report_followup', data=datas, context=context) def do_partner_mail(self, cr, uid, partner_ids, context=None): if context is None: context = {} ctx = context.copy() ctx['followup'] = True #partner_ids are res.partner ids # If not defined by latest follow-up level, it will be the default template if it can find it mtp = self.pool.get('email.template') unknown_mails = 0 for partner in self.browse(cr, uid, partner_ids, context=ctx): if partner.email and partner.email.strip(): level = partner.latest_followup_level_id_without_lit if level and level.send_email and level.email_template_id and level.email_template_id.id: mtp.send_mail(cr, uid, level.email_template_id.id, partner.id, context=ctx) else: mail_template_id = self.pool.get( 'ir.model.data').get_object_reference( cr, uid, 'account_followup', 'email_template_account_followup_default') mtp.send_mail(cr, uid, mail_template_id[1], partner.id, context=ctx) else: unknown_mails = unknown_mails + 1 action_text = _( "Email not sent because of email address of partner not filled in" ) if partner.payment_next_action_date: payment_action_date = min( fields.date.context_today(self, cr, uid, context=ctx), partner.payment_next_action_date) else: payment_action_date = fields.date.context_today( self, cr, uid, context=ctx) if partner.payment_next_action: payment_next_action = partner.payment_next_action + " \n " + action_text else: payment_next_action = action_text self.write(cr, uid, [partner.id], { 'payment_next_action_date': payment_action_date, 'payment_next_action': payment_next_action }, context=ctx) return unknown_mails def get_followup_table_html(self, cr, uid, ids, context=None): """ Build the html tables to be included in emails send to partners, when reminding them their overdue invoices. :param ids: [id] of the partner for whom we are building the tables :rtype: string """ from report import account_followup_print assert len(ids) == 1 if context is None: context = {} partner = self.browse(cr, uid, ids[0], context=context) #copy the context to not change global context. Overwrite it because _() looks for the lang in local variable 'context'. #Set the language to use = the partner language context = dict(context, lang=partner.lang) followup_table = '' if partner.unreconciled_aml_ids: company = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id current_date = fields.date.context_today(self, cr, uid, context=context) rml_parse = account_followup_print.report_rappel( cr, uid, "followup_rml_parser") final_res = rml_parse._lines_get_with_partner(partner, company.id) for currency_dict in final_res: currency = currency_dict.get( 'line', [{ 'currency_id': company.currency_id }])[0]['currency_id'] followup_table += ''' <table border="2" width=100%%> <tr> <td>''' + _("Invoice Date") + '''</td> <td>''' + _("Description") + '''</td> <td>''' + _("Reference") + '''</td> <td>''' + _("Due Date") + '''</td> <td>''' + _( "Amount") + " (%s)" % (currency.symbol) + '''</td> <td>''' + _("Lit.") + '''</td> </tr> ''' total = 0 for aml in currency_dict['line']: block = aml['blocked'] and 'X' or ' ' total += aml['balance'] strbegin = "<TD>" strend = "</TD>" date = aml['date_maturity'] or aml['date'] if date <= current_date and aml['balance'] > 0: strbegin = "<TD><B>" strend = "</B></TD>" followup_table += "<TR>" + strbegin + str( aml['date'] ) + strend + strbegin + aml[ 'name'] + strend + strbegin + aml[ 'ref'] + strend + strbegin + str( date ) + strend + strbegin + str( aml['balance'] ) + strend + strbegin + block + strend + "</TR>" total = reduce(lambda x, y: x + y['balance'], currency_dict['line'], 0.00) total = rml_parse.formatLang(total, dp='Account', currency_obj=currency) followup_table += '''<tr> </tr> </table> <center>''' + _( "Amount due") + ''' : %s </center>''' % (total) return followup_table def write(self, cr, uid, ids, vals, context=None): if vals.get("payment_responsible_id", False): for part in self.browse(cr, uid, ids, context=context): if part.payment_responsible_id <> vals[ "payment_responsible_id"]: #Find partner_id of user put as responsible responsible_partner_id = self.pool.get("res.users").browse( cr, uid, vals['payment_responsible_id'], context=context).partner_id.id self.pool.get("mail.thread").message_post( cr, uid, 0, body= _("You became responsible to do the next action for the payment follow-up of" ) + " <b><a href='#id=" + str(part.id) + "&view_type=form&model=res.partner'> " + part.name + " </a></b>", type='comment', subtype="mail.mt_comment", context=context, model='res.partner', res_id=part.id, partner_ids=[responsible_partner_id]) return super(res_partner, self).write(cr, uid, ids, vals, context=context) def action_done(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, { 'payment_next_action_date': False, 'payment_next_action': '', 'payment_responsible_id': False }, context=context) def do_button_print(self, cr, uid, ids, context=None): assert (len(ids) == 1) company_id = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id.id #search if the partner has accounting entries to print. If not, it may not be present in the #psql view the report is based on, so we need to stop the user here. if not self.pool.get('account.move.line').search( cr, uid, [ ('partner_id', '=', ids[0]), ('account_id.type', '=', 'receivable'), ('reconcile_id', '=', False), ('state', '!=', 'draft'), ('company_id', '=', company_id), ], context=context): raise osv.except_osv( _('Error!'), _("The partner does not have any accounting entries to print in the overdue report for the current company." )) self.message_post(cr, uid, [ids[0]], body=_('Printed overdue payments report'), context=context) #build the id of this partner in the psql view. Could be replaced by a search with [('company_id', '=', company_id),('partner_id', '=', ids[0])] wizard_partner_ids = [ids[0] * 10000 + company_id] followup_ids = self.pool.get('account_followup.followup').search( cr, uid, [('company_id', '=', company_id)], context=context) if not followup_ids: raise osv.except_osv( _('Error!'), _("There is no followup plan defined for the current company.") ) data = { 'date': fields.date.today(), 'followup_id': followup_ids[0], } #call the print overdue report on this partner return self.do_partner_print(cr, uid, wizard_partner_ids, data, context=context) def _get_amounts_and_date(self, cr, uid, ids, name, arg, context=None): ''' Function that computes values for the followup functional fields. Note that 'payment_amount_due' is similar to 'credit' field on res.partner except it filters on user's company. ''' res = {} company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id current_date = fields.date.context_today(self, cr, uid, context=context) for partner in self.browse(cr, uid, ids, context=context): worst_due_date = False amount_due = amount_overdue = 0.0 for aml in partner.unreconciled_aml_ids: if (aml.company_id == company): date_maturity = aml.date_maturity or aml.date if not worst_due_date or date_maturity < worst_due_date: worst_due_date = date_maturity amount_due += aml.result if (date_maturity <= current_date): amount_overdue += aml.result res[partner.id] = { 'payment_amount_due': amount_due, 'payment_amount_overdue': amount_overdue, 'payment_earliest_due_date': worst_due_date } return res def _get_followup_overdue_query(self, cr, uid, args, overdue_only=False, context=None): ''' This function is used to build the query and arguments to use when making a search on functional fields * payment_amount_due * payment_amount_overdue Basically, the query is exactly the same except that for overdue there is an extra clause in the WHERE. :param args: arguments given to the search in the usual domain notation (list of tuples) :param overdue_only: option to add the extra argument to filter on overdue accounting entries or not :returns: a tuple with * the query to execute as first element * the arguments for the execution of this query :rtype: (string, []) ''' company_id = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id.id having_where_clause = ' AND '.join( map(lambda x: '(SUM(bal2) %s %%s)' % (x[1]), args)) having_values = [x[2] for x in args] query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) overdue_only_str = overdue_only and 'AND date_maturity <= NOW()' or '' return ('''SELECT pid AS partner_id, SUM(bal2) FROM (SELECT CASE WHEN bal IS NOT NULL THEN bal ELSE 0.0 END AS bal2, p.id as pid FROM (SELECT (debit-credit) AS bal, partner_id FROM account_move_line l WHERE account_id IN (SELECT id FROM account_account WHERE type=\'receivable\' AND active) ''' + overdue_only_str + ''' AND reconcile_id IS NULL AND company_id = %s AND ''' + query + ''') AS l RIGHT JOIN res_partner p ON p.id = partner_id ) AS pl GROUP BY pid HAVING ''' + having_where_clause, [company_id] + having_values) def _payment_overdue_search(self, cr, uid, obj, name, args, context=None): if not args: return [] query, query_args = self._get_followup_overdue_query(cr, uid, args, overdue_only=True, context=context) cr.execute(query, query_args) res = cr.fetchall() if not res: return [('id', '=', '0')] return [('id', 'in', [x[0] for x in res])] def _payment_earliest_date_search(self, cr, uid, obj, name, args, context=None): if not args: return [] company_id = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id.id having_where_clause = ' AND '.join( map(lambda x: '(MIN(l.date_maturity) %s %%s)' % (x[1]), args)) having_values = [x[2] for x in args] query = self.pool.get('account.move.line')._query_get(cr, uid, context=context) cr.execute('SELECT partner_id FROM account_move_line l '\ 'WHERE account_id IN '\ '(SELECT id FROM account_account '\ 'WHERE type=\'receivable\' AND active) '\ 'AND l.company_id = %s ' 'AND reconcile_id IS NULL '\ 'AND '+query+' '\ 'AND partner_id IS NOT NULL '\ 'GROUP BY partner_id HAVING '+ having_where_clause, [company_id] + having_values) res = cr.fetchall() if not res: return [('id', '=', '0')] return [('id', 'in', [x[0] for x in res])] def _payment_due_search(self, cr, uid, obj, name, args, context=None): if not args: return [] query, query_args = self._get_followup_overdue_query( cr, uid, args, overdue_only=False, context=context) cr.execute(query, query_args) res = cr.fetchall() if not res: return [('id', '=', '0')] return [('id', 'in', [x[0] for x in res])] def _get_partners(self, cr, uid, ids, context=None): #this function search for the partners linked to all account.move.line 'ids' that have been changed partners = set() for aml in self.browse(cr, uid, ids, context=context): if aml.partner_id: partners.add(aml.partner_id.id) return list(partners) _inherit = "res.partner" _columns = { 'payment_responsible_id':fields.many2one('res.users', ondelete='set null', string='Follow-up Responsible', help="Optionally you can assign a user to this field, which will make him responsible for the action.", track_visibility="onchange"), 'payment_note':fields.text('Customer Payment Promise', help="Payment Note", track_visibility="onchange"), 'payment_next_action':fields.text('Next Action', help="This is the next action to be taken. It will automatically be set when the partner gets a follow-up level that requires a manual action. ", track_visibility="onchange"), 'payment_next_action_date':fields.date('Next Action Date', help="This is when the manual follow-up is needed. " \ "The date will be set to the current date when the partner gets a follow-up level that requires a manual action. "\ "Can be practical to set manually e.g. to see if he keeps his promises."), 'unreconciled_aml_ids':fields.one2many('account.move.line', 'partner_id', domain=['&', ('reconcile_id', '=', False), '&', ('account_id.active','=', True), '&', ('account_id.type', '=', 'receivable'), ('state', '!=', 'draft')]), 'latest_followup_date':fields.function(_get_latest, method=True, type='date', string="Latest Follow-up Date", help="Latest date that the follow-up level of the partner was changed", store=False, multi="latest"), 'latest_followup_level_id':fields.function(_get_latest, method=True, type='many2one', relation='account_followup.followup.line', string="Latest Follow-up Level", help="The maximum follow-up level", store={ 'res.partner': (lambda self, cr, uid, ids, c: ids,[],10), 'account.move.line': (_get_partners, ['followup_line_id'], 10), }, multi="latest"), 'latest_followup_level_id_without_lit':fields.function(_get_latest, method=True, type='many2one', relation='account_followup.followup.line', string="Latest Follow-up Level without litigation", help="The maximum follow-up level without taking into account the account move lines with litigation", store={ 'res.partner': (lambda self, cr, uid, ids, c: ids,[],10), 'account.move.line': (_get_partners, ['followup_line_id'], 10), }, multi="latest"), 'payment_amount_due':fields.function(_get_amounts_and_date, type='float', string="Amount Due", store = False, multi="followup", fnct_search=_payment_due_search), 'payment_amount_overdue':fields.function(_get_amounts_and_date, type='float', string="Amount Overdue", store = False, multi="followup", fnct_search = _payment_overdue_search), 'payment_earliest_due_date':fields.function(_get_amounts_and_date, type='date', string = "Worst Due Date", multi="followup", fnct_search=_payment_earliest_date_search), }
class hr_expense_line(osv.osv): _name = "hr.expense.line" _description = "Expense Line" def _amount(self, cr, uid, ids, field_name, arg, context=None): if not ids: return {} cr.execute( "SELECT l.id,COALESCE(SUM(l.unit_amount*l.unit_quantity),0) AS amount FROM hr_expense_line l WHERE id IN %s GROUP BY l.id ", (tuple(ids), )) res = dict(cr.fetchall()) return res def _get_uom_id(self, cr, uid, context=None): result = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'product', 'product_uom_unit') return result and result[1] or False _columns = { 'name': fields.char('Expense Note', size=128, required=True), 'date_value': fields.date('Date', required=True), 'expense_id': fields.many2one('hr.expense.expense', 'Expense', ondelete='cascade', select=True), 'total_amount': fields.function(_amount, string='Total', digits_compute=dp.get_precision('Account')), 'unit_amount': fields.float('Unit Price', digits_compute=dp.get_precision('Product Price')), 'unit_quantity': fields.float( 'Quantities', digits_compute=dp.get_precision('Product Unit of Measure')), 'product_id': fields.many2one('product.product', 'Product', domain=[('hr_expense_ok', '=', True)]), 'uom_id': fields.many2one('product.uom', 'Unit of Measure', required=True), 'description': fields.text('Description'), 'analytic_account': fields.many2one('account.analytic.account', 'Analytic account'), 'ref': fields.char('Reference', size=32), 'sequence': fields.integer( 'Sequence', select=True, help= "Gives the sequence order when displaying a list of expense lines." ), } _defaults = { 'unit_quantity': 1, 'date_value': lambda *a: time.strftime('%Y-%m-%d'), 'uom_id': _get_uom_id, } _order = "sequence, date_value desc" def onchange_product_id(self, cr, uid, ids, product_id, context=None): res = {} if product_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) res['name'] = product.name amount_unit = product.price_get('standard_price')[product.id] res['unit_amount'] = amount_unit res['uom_id'] = product.uom_id.id return {'value': res} def onchange_uom(self, cr, uid, ids, product_id, uom_id, context=None): res = {'value': {}} if not uom_id or not product_id: return res product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) uom = self.pool.get('product.uom').browse(cr, uid, uom_id, context=context) if uom.category_id.id != product.uom_id.category_id.id: res['warning'] = { 'title': _('Warning'), 'message': _('Selected Unit of Measure does not belong to the same category as the product Unit of Measure' ) } res['value'].update({'uom_id': product.uom_id.id}) return res
_columns = { 'name': fields.char('Name', size=64, help="Exchange description like the name of the supplier, bank,...", require=True), 'type': fields.selection([('in','IN'),('out','OUT'),('in-out', 'IN & OUT')], 'Type',help=("IN for files coming from the other system" "and to be imported in the ERP ; OUT for files to be" "generated from the ERP and send to the other system")), 'mapping_id':fields.many2one('external.mapping', 'External Mapping', require="True", domain="[('referential_id', '=', referential_id)]"), 'format' : fields.selection([('csv','CSV'),('csv_no_header','CSV WITHOUT HEADER'), ('xls', 'XLS')], 'File format'), 'referential_id':fields.many2one('external.referential', 'Referential',help="Referential to use for connection and mapping", require=True), 'scheduler':fields.many2one('ir.cron', 'Scheduler',help="Scheduler that will execute the cron task"), 'search_filter': fields.char('Search Filter', size=256), 'filename': fields.char('Filename', size=128, help="Filename will be used to generate the output file name or to read the incoming file. It is possible to use variables (check in sequence for syntax)", require=True), 'folder_path': fields.char('Folder Path', size=128, help="folder that containt the incomming or the outgoing file"), 'archive_folder_path': fields.char('Archive Folder Path', size=128, help="if a path is set when a file is imported the file will be automatically moved to this folder"), 'encoding': fields.selection(_get_encoding, 'Encoding', require=True), 'field_ids': fields.one2many('file.fields', 'file_id', 'Fields'), 'action_before_all': fields.text('Action Before All', help="This python code will executed after the import/export"), 'action_after_all': fields.text('Action After All', help="This python code will executed after the import/export"), 'action_before_each': fields.text('Action Before Each', help="This python code will executed after each element of the import/export"), 'action_after_each': fields.text('Action After Each', help="This python code will executed after each element of the import/export"), 'check_if_import': fields.text('Check If Import', help="This python code will be executed before each element of the import"), 'delimiter':fields.char('Fields delimiter', size=64, help="Delimiter used in the CSV file"), 'lang': fields.many2one('res.lang', 'Language'), 'import_default_fields':fields.one2many('file.default.import.values', 'file_id', 'Default Field'), 'do_not_update':fields.boolean('Do Not Update'), 'pre_processing': fields.text('Pre-Processing', help="This python code will be executed before merge of elements of the import"), 'mapping_template_id':fields.many2one('external.mapping.template', 'External Mapping Template', require="True"), 'notes': fields.text('Notes'), 'related_mapping_ids': fields.function(_get_related_mapping_ids, type="many2many", relation="external.mapping", string='Related Mappings'), 'synchronize_from': fields.selection([('referential', 'Referential'), ('pop_up', 'Pop Up')], string='Synchronize From'), 'linked_task': fields.many2one('file.exchange', 'Linked Task'), }
class res_users(osv.osv): """ User class. A res.users record models an OpenERP user and is different from an employee. res.users class now inherits from res.partner. The partner model is used to store the data related to the partner: lang, name, address, avatar, ... The user model is now dedicated to technical data. """ __admin_ids = {} _uid_cache = {} _inherits = { 'res.partner': 'partner_id', } _name = "res.users" _description = 'Users' def _set_new_password(self, cr, uid, id, name, value, args, context=None): if value is False: # Do not update the password if no value is provided, ignore silently. # For example web client submits False values for all empty fields. return if uid == id: # To change their own password users must use the client-specific change password wizard, # so that the new password is immediately used for further RPC requests, otherwise the user # will face unexpected 'Access Denied' exceptions. raise osv.except_osv( _('Operation Canceled'), _('Please use the change password wizard (in User Preferences or User menu) to change your own password.' )) self.write(cr, uid, id, {'password': value}) def _get_password(self, cr, uid, ids, arg, karg, context=None): return dict.fromkeys(ids, '') _columns = { 'id': fields.integer('ID'), 'login_date': fields.date('Latest connection', select=1), 'partner_id': fields.many2one('res.partner', required=True, string='Related Partner', ondelete='restrict', help='Partner-related data of the user'), 'login': fields.char('Login', size=64, required=True, help="Used to log into the system"), 'password': fields.char('Password', size=64, invisible=False, help="Keep empty if you don't want the user to be able to connect on the system."), 'new_password': fields.function(_get_password, type='char', size=64, fnct_inv=_set_new_password, string='Set Password', help="Specify a value only when creating a user or if you're "\ "changing the user's password, otherwise leave empty. After "\ "a change of password, the user has to login again."), 'signature': fields.text('Signature'), 'active': fields.boolean('Active'), 'action_id': fields.many2one('ir.actions.actions', 'Home Action', help="If specified, this action will be opened at logon for this user, in addition to the standard menu."), 'menu_id': fields.many2one('ir.actions.actions', 'Menu Action', help="If specified, the action will replace the standard menu for this user."), 'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'), # Special behavior for this field: res.company.search() will only return the companies # available to the current user (should be the user's companies?), when the user_preference # context is set. 'company_id': fields.many2one('res.company', 'Company', required=True, help='The company this user is currently working for.', context={'user_preference': True}), 'company_ids':fields.many2many('res.company','res_company_users_rel','user_id','cid','Companies'), # backward compatibility fields 'user_email': fields.related('email', type='char', deprecated='Use the email field instead of user_email. This field will be removed with OpenERP 7.1.'), ### Added by Sangeetha ### 'user_menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_user_rel', 'user_id', 'menu_id', 'Access Menu', domain = [('name','!=','')]), } def on_change_company_id(self, cr, uid, ids, company_id): return { 'warning': { 'title': _("Company Switch Warning"), 'message': _("Please keep in mind that documents currently displayed may not be relevant after switching to another company. If you have unsaved changes, please make sure to save and close all forms before switching to a different company. (You can click on Cancel in the User Preferences now)" ), } } def onchange_state(self, cr, uid, ids, state_id, context=None): partner_ids = [ user.partner_id.id for user in self.browse(cr, uid, ids, context=context) ] return self.pool.get('res.partner').onchange_state(cr, uid, partner_ids, state_id, context=context) def onchange_type(self, cr, uid, ids, is_company, context=None): """ Wrapper on the user.partner onchange_type, because some calls to the partner form view applied to the user may trigger the partner.onchange_type method, but applied to the user object. """ partner_ids = [ user.partner_id.id for user in self.browse(cr, uid, ids, context=context) ] return self.pool.get('res.partner').onchange_type(cr, uid, partner_ids, is_company, context=context) def onchange_address(self, cr, uid, ids, use_parent_address, parent_id, context=None): """ Wrapper on the user.partner onchange_address, because some calls to the partner form view applied to the user may trigger the partner.onchange_type method, but applied to the user object. """ partner_ids = [ user.partner_id.id for user in self.browse(cr, uid, ids, context=context) ] return self.pool.get('res.partner').onchange_address( cr, uid, partner_ids, use_parent_address, parent_id, context=context) def _check_company(self, cr, uid, ids, context=None): return all( ((this.company_id in this.company_ids) or not this.company_ids) for this in self.browse(cr, uid, ids, context)) _constraints = [ (_check_company, 'The chosen company is not in the allowed companies for this user', ['company_id', 'company_ids']), ] _sql_constraints = [('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')] def _get_company(self, cr, uid, context=None, uid2=False): if not uid2: uid2 = uid user = self.pool.get('res.users').read(cr, uid, uid2, ['company_id'], context) company_id = user.get('company_id', False) return company_id and company_id[0] or False def _get_companies(self, cr, uid, context=None): c = self._get_company(cr, uid, context) if c: return [c] return False def _get_menu(self, cr, uid, context=None): dataobj = self.pool.get('ir.model.data') try: model, res_id = dataobj.get_object_reference( cr, uid, 'base', 'action_menu_admin') if model != 'ir.actions.act_window': return False return res_id except ValueError: return False def _get_group(self, cr, uid, context=None): dataobj = self.pool.get('ir.model.data') result = [] try: dummy, group_id = dataobj.get_object_reference( cr, SUPERUSER_ID, 'base', 'group_user') result.append(group_id) dummy, group_id = dataobj.get_object_reference( cr, SUPERUSER_ID, 'base', 'group_partner_manager') result.append(group_id) except ValueError: # If these groups does not exists anymore pass return result _defaults = { 'password': '', 'active': True, 'customer': False, 'menu_id': _get_menu, 'company_id': _get_company, 'company_ids': _get_companies, 'groups_id': _get_group, 'image': lambda self, cr, uid, ctx={}: self.pool.get('res.partner'). _get_default_image(cr, uid, False, ctx, colorize=True), } # User can write on a few of his own fields (but not his groups for example) SELF_WRITEABLE_FIELDS = [ 'password', 'signature', 'action_id', 'company_id', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz' ] # User can read a few of his own fields SELF_READABLE_FIELDS = [ 'signature', 'company_id', 'login', 'email', 'name', 'image', 'image_medium', 'image_small', 'lang', 'tz', 'tz_offset', 'groups_id', 'partner_id', '__last_update' ] def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'): def override_password(o): if 'password' in o and ('id' not in o or o['id'] != uid): o['password'] = '******' return o if fields and (ids == [uid] or ids == uid): for key in fields: if not (key in self.SELF_READABLE_FIELDS or key.startswith('context_')): break else: # safe fields only, so we read as super-user to bypass access rights uid = SUPERUSER_ID result = super(res_users, self).read(cr, uid, ids, fields=fields, context=context, load=load) canwrite = self.pool.get('ir.model.access').check( cr, uid, 'res.users', 'write', False) if not canwrite: if isinstance(ids, (int, long)): result = override_password(result) else: result = map(override_password, result) return result def write(self, cr, uid, ids, values, context=None): if not hasattr(ids, '__iter__'): ids = [ids] if ids == [uid]: for key in values.keys(): if not (key in self.SELF_WRITEABLE_FIELDS or key.startswith('context_')): break else: if 'company_id' in values: if not (values['company_id'] in self.read( cr, SUPERUSER_ID, uid, ['company_ids'], context=context)['company_ids']): del values['company_id'] uid = 1 # safe fields only, so we write as super-user to bypass access rights res = super(res_users, self).write(cr, uid, ids, values, context=context) # clear caches linked to the users self.pool.get('ir.model.access').call_cache_clearing_methods(cr) clear = partial(self.pool.get('ir.rule').clear_cache, cr) map(clear, ids) db = cr.dbname if db in self._uid_cache: for id in ids: if id in self._uid_cache[db]: del self._uid_cache[db][id] self.context_get.clear_cache(self) return res def unlink(self, cr, uid, ids, context=None): if 1 in ids: raise osv.except_osv( _('Can not remove root user!'), _('You can not remove the admin user as it is used internally for resources created by OpenERP (updates, module installation, ...)' )) db = cr.dbname if db in self._uid_cache: for id in ids: if id in self._uid_cache[db]: del self._uid_cache[db][id] return super(res_users, self).unlink(cr, uid, ids, context=context) def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if not args: args = [] if not context: context = {} ids = [] if name: ids = self.search(cr, user, [('login', '=', name)] + args, limit=limit, context=context) if not ids: ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit, context=context) return self.name_get(cr, user, ids, context=context) def copy(self, cr, uid, id, default=None, context=None): user2copy = self.read(cr, uid, [id], ['login', 'name'])[0] default = dict(default or {}) if ('name' not in default) and ('partner_id' not in default): default['name'] = _("%s (copy)") % user2copy['name'] if 'login' not in default: default['login'] = _("%s (copy)") % user2copy['login'] return super(res_users, self).copy(cr, uid, id, default, context) @tools.ormcache(skiparg=2) def context_get(self, cr, uid, context=None): user = self.browse(cr, SUPERUSER_ID, uid, context) result = {} for k in self._all_columns.keys(): if k.startswith('context_'): context_key = k[8:] elif k in ['lang', 'tz']: context_key = k else: context_key = False if context_key: res = getattr(user, k) or False if isinstance(res, browse_record): res = res.id result[context_key] = res or False return result def action_get(self, cr, uid, context=None): dataobj = self.pool.get('ir.model.data') data_id = dataobj._get_id(cr, SUPERUSER_ID, 'base', 'action_res_users_my') return dataobj.browse(cr, uid, data_id, context=context).res_id def check_super(self, passwd): if passwd == tools.config['admin_passwd']: return True else: raise openerp.exceptions.AccessDenied() def check_credentials(self, cr, uid, password): """ Override this method to plug additional authentication methods""" res = self.search(cr, SUPERUSER_ID, [('id', '=', uid), ('password', '=', password)]) if not res: raise openerp.exceptions.AccessDenied() def login(self, db, login, password): if not password: return False user_id = False cr = pooler.get_db(db).cursor() try: # autocommit: our single update request will be performed atomically. # (In this way, there is no opportunity to have two transactions # interleaving their cr.execute()..cr.commit() calls and have one # of them rolled back due to a concurrent access.) cr.autocommit(True) # check if user exists res = self.search(cr, SUPERUSER_ID, [('login', '=', login)]) if res: user_id = res[0] # check credentials self.check_credentials(cr, user_id, password) # We effectively unconditionally write the res_users line. # Even w/ autocommit there's a chance the user row will be locked, # in which case we can't delay the login just for the purpose of # update the last login date - hence we use FOR UPDATE NOWAIT to # try to get the lock - fail-fast # Failing to acquire the lock on the res_users row probably means # another request is holding it. No big deal, we don't want to # prevent/delay login in that case. It will also have been logged # as a SQL error, if anyone cares. try: cr.execute( "SELECT id FROM res_users WHERE id=%s FOR UPDATE NOWAIT", (user_id, ), log_exceptions=False) cr.execute( "UPDATE res_users SET login_date = now() AT TIME ZONE 'UTC' WHERE id=%s", (user_id, )) except Exception: _logger.debug( "Failed to update last_login for db:%s login:%s", db, login, exc_info=True) except openerp.exceptions.AccessDenied: _logger.info("Login failed for db:%s login:%s", db, login) user_id = False finally: cr.close() return user_id def authenticate(self, db, login, password, user_agent_env): """Verifies and returns the user ID corresponding to the given ``login`` and ``password`` combination, or False if there was no matching user. :param str db: the database on which user is trying to authenticate :param str login: username :param str password: user password :param dict user_agent_env: environment dictionary describing any relevant environment attributes """ uid = self.login(db, login, password) if uid == openerp.SUPERUSER_ID: # Successfully logged in as admin! # Attempt to guess the web base url... if user_agent_env and user_agent_env.get('base_location'): cr = pooler.get_db(db).cursor() try: base = user_agent_env['base_location'] ICP = self.pool.get('ir.config_parameter') if not ICP.get_param(cr, uid, 'web.base.url.freeze'): ICP.set_param(cr, uid, 'web.base.url', base) cr.commit() except Exception: _logger.exception( "Failed to update web.base.url configuration parameter" ) finally: cr.close() return uid def check(self, db, uid, passwd): """Verifies that the given (uid, password) is authorized for the database ``db`` and raise an exception if it is not.""" if not passwd: # empty passwords disallowed for obvious security reasons raise openerp.exceptions.AccessDenied() if self._uid_cache.get(db, {}).get(uid) == passwd: return cr = pooler.get_db(db).cursor() try: self.check_credentials(cr, uid, passwd) if self._uid_cache.has_key(db): self._uid_cache[db][uid] = passwd else: self._uid_cache[db] = {uid: passwd} finally: cr.close() def change_password(self, cr, uid, old_passwd, new_passwd, context=None): """Change current user password. Old password must be provided explicitly to prevent hijacking an existing user session, or for cases where the cleartext password is not used to authenticate requests. :return: True :raise: openerp.exceptions.AccessDenied when old password is wrong :raise: except_osv when new password is not set or empty """ self.check(cr.dbname, uid, old_passwd) if new_passwd: return self.write(cr, uid, uid, {'password': new_passwd}) raise osv.except_osv( _('Warning!'), _("Setting empty passwords is not allowed for security reasons!")) def preference_save(self, cr, uid, ids, context=None): return { 'type': 'ir.actions.client', 'tag': 'reload', } def preference_change_password(self, cr, uid, ids, context=None): return { 'type': 'ir.actions.client', 'tag': 'change_password', 'target': 'new', } def has_group(self, cr, uid, group_ext_id): """Checks whether user belongs to given group. :param str group_ext_id: external ID (XML ID) of the group. Must be provided in fully-qualified form (``module.ext_id``), as there is no implicit module to use.. :return: True if the current user is a member of the group with the given external ID (XML ID), else False. """ assert group_ext_id and '.' in group_ext_id, "External ID must be fully qualified" module, ext_id = group_ext_id.split('.') cr.execute( """SELECT 1 FROM res_groups_users_rel WHERE uid=%s AND gid IN (SELECT res_id FROM ir_model_data WHERE module=%s AND name=%s)""", (uid, module, ext_id)) return bool(cr.fetchone())
datas = (attachment.datas).decode('base64') fname = attachment.datas_fname target = open (path+'/'+fname, 'a') print "\n targetttttttttttttttttttttttt", target target.write(datas) target.close() paths.append(path+'/'+fname) so_name = self.read(cr, uid, attach_ids, ['res_name'], context=context)[0].get('res_name') print "paths*****",paths self.upload_document(paths, so_name) return True def create(self, cr, uid, vals, context=None): return super(ir_attachment, self).create(cr, uid, vals, context) _columns = { 'url': fields.text('Url'), 'file_upload' : fields.binary('File Upload'), 'datafile_of':fields.char('File of'), } class google_drive(osv.osv): _name = 'google.drive' def connect_google(self, cr, uid, email, password, context=None): source = 'Document List Python Sample' try: self.gd_client = gdata.docs.service.DocsService() self.gd_client.ClientLogin(email, password, source=source) self.gs_client = gdata.spreadsheet.service.SpreadsheetsService() self.gs_client.ClientLogin(email, password, source=source) except:
class groups(osv.osv): _name = "res.groups" _description = "Access Groups" _rec_name = 'full_name' def _get_full_name(self, cr, uid, ids, field, arg, context=None): res = {} for g in self.browse(cr, uid, ids, context): if g.category_id: res[g.id] = '%s / %s' % (g.category_id.name, g.name) else: res[g.id] = g.name return res def _search_group(self, cr, uid, obj, name, args, context=None): operand = args[0][2] operator = args[0][1] values = operand.split('/') group_name = values[0] where = [('name', operator, group_name)] if len(values) > 1: application_name = values[0] group_name = values[1] where = ['|', ('category_id.name', operator, application_name)] + where return where _columns = { 'name': fields.char('Name', size=64, required=True, translate=True), 'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'), 'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'), 'rule_groups': fields.many2many('ir.rule', 'rule_group_rel', 'group_id', 'rule_group_id', 'Rules', domain=[('global', '=', False)]), 'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'), 'view_access': fields.many2many('ir.ui.view', 'ir_ui_view_group_rel', 'group_id', 'view_id', 'Views'), 'comment': fields.text('Comment', size=250, translate=True), 'category_id': fields.many2one('ir.module.category', 'Application', select=True), 'full_name': fields.function(_get_full_name, type='char', string='Group Name', fnct_search=_search_group), 'custom_group': fields.boolean('Custom Group'), } _sql_constraints = [('name_uniq', 'unique (category_id, name)', 'The name of the group must be unique !')] def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): # add explicit ordering if search is sorted on full_name if order and order.startswith('full_name'): ids = super(groups, self).search(cr, uid, args, context=context) gs = self.browse(cr, uid, ids, context) gs.sort(key=lambda g: g.full_name, reverse=order.endswith('DESC')) gs = gs[offset:offset + limit] if limit else gs[offset:] return map(int, gs) return super(groups, self).search(cr, uid, args, offset, limit, order, context, count) def copy(self, cr, uid, id, default=None, context=None): group_name = self.read(cr, uid, [id], ['name'])[0]['name'] default.update({'name': _('%s (copy)') % group_name}) return super(groups, self).copy(cr, uid, id, default, context) def write(self, cr, uid, ids, vals, context=None): if 'name' in vals: if vals['name'].startswith('-'): raise osv.except_osv( _('Error'), _('The name of the group can not start with "-"')) res = super(groups, self).write(cr, uid, ids, vals, context=context) self.pool.get('ir.model.access').call_cache_clearing_methods(cr) return res
if emps_clock: clock_ids_exist.append(clock_id) resu[order.id] = clock_ids_exist except Exception,e: traceback.print_exc() pass return resu _columns = { 'employee_id': fields.many2one('hr.employee', 'Employee', required=True, select=True), 'department_id':fields.related('employee_id','department_id', type='many2one', relation='hr.department', string='Department', store=True), 'job_id':fields.related('employee_id','job_id', type='many2one', relation='hr.job', string='Title', store=True), 'emp_code':fields.related('employee_id','emp_code', type='char', string='Employee Code', store=True), 'mobile_phone':fields.related('employee_id','mobile_phone', type='char', string='Work Mobile', store=True), 'borrow_money_residual':fields.related('employee_id','money_residual', type='float', string='Borrowed residual', readonly=True), 'dimmission_reason':fields.text('Dimission Reason', required=True), 'advice_to_company':fields.text('Advice to company'), 'employment_start':fields.date('Employment Started'), 'date_request':fields.date('Request Date', required=True), 'date_done':fields.date('Done Date', required=False, readonly=True), 'approve_ids': fields.one2many('hr.dimission.item', 'dimission_id', 'Approvals', domain=[('type','=','approve')]), 'transfer_ids': fields.one2many('hr.dimission.item', 'dimission_id', 'Transfers', domain=[('type','=','transfer')]), 'payslip_id': fields.many2many('hr.emppay', string='Payslip'), 'attrpt_ids': fields.many2many('hr.rpt.attend.month', string='Attendance Reports'), 'hr_clock_ids': fields.function(_emp_clocks, string='HR Clocks', type='many2many', relation='hr.clock', readonly=True), 'attachment_lines': fields.one2many('ir.attachment', 'hr_admission_id','Attachment'), 'company_id':fields.many2one('res.company', 'Company', required=True), 'state': fields.selection([
class hr_payslip_amendment(orm.Model): _name = 'hr.payslip.amendment' _description = 'Pay Slip Amendment' _inherit = ['mail.thread'] _columns = { 'name': fields.char( 'Description', size=128, required=True, readonly=True, states={'draft': [('readonly', False)]}, ), 'input_id': fields.many2one( 'hr.rule.input', 'Salary Rule Input', required=True, readonly=True, states={'draft': [('readonly', False)]}, ), 'employee_id': fields.many2one( 'hr.employee', 'Employee', required=True, readonly=True, states={'draft': [('readonly', False)]}, ), 'amount': fields.float( 'Amount', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="The meaning of this field is dependant on the salary rule " "that uses it."), 'state': fields.selection([ ('draft', 'Draft'), ('validate', 'Confirmed'), ('cancel', 'Cancelled'), ('done', 'Done'), ], 'State', required=True, readonly=True), 'note': fields.text('Memo'), } _defaults = { 'state': 'draft', } def onchange_employee(self, cr, uid, ids, employee_id, context=None): if not employee_id: return {} ee = self.pool.get('hr.employee').browse(cr, uid, employee_id, context=context) name = _('Pay Slip Amendment: %s (%s)') % (ee.name, ee.employee_no) val = {'name': name} return {'value': val} def unlink(self, cr, uid, ids, context=None): for psa in self.browse(cr, uid, ids, context=context): if psa.state in ['validate', 'done']: raise orm.except_orm( _('Invalid Action'), _('A Pay Slip Amendment that has been confirmed cannot be ' 'deleted!')) return super(hr_payslip_amendment, self).unlink(cr, uid, ids, context=context)
res[this.id] = this.invoice_lines and \ all(iline.invoice_id.state != 'cancel' for iline in this.invoice_lines) return res def _order_lines_from_invoice(self, cr, uid, ids, context=None): # direct access to the m2m table is the less convoluted way to achieve this (and is ok ACL-wise) cr.execute("""SELECT DISTINCT sol.id FROM sale_order_invoice_rel rel JOIN sale_order_line sol ON (sol.order_id = rel.order_id) WHERE rel.invoice_id = ANY(%s)""", (list(ids),)) return [i[0] for i in cr.fetchall()] _name = 'sale.order.line' _description = 'Sales Order Line' _columns = { 'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}), 'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True), 'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True), 'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean', store={ 'account.invoice': (_order_lines_from_invoice, ['state'], 10), 'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)}), 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}), 'type': fields.selection([('make_to_stock', 'from stock'), ('make_to_order', 'on order')], 'Procurement Method', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="From stock: When needed, the product is taken from the stock or we wait for replenishment.\nOn order: When needed, the product is purchased or produced."), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}), 'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."), 'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}),
class crm_phonecall(osv.osv): """ Model for CRM phonecalls """ _name = "crm.phonecall" _description = "Phonecall" _order = "id desc" _inherit = ['mail.thread'] _columns = { 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'create_date': fields.datetime('Creation Date' , readonly=True), 'section_id': fields.many2one('crm.case.section', 'Sales Team', \ select=True, help='Sales team to which Case belongs to.'), 'user_id': fields.many2one('res.users', 'Responsible'), 'partner_id': fields.many2one('res.partner', 'Contact'), 'company_id': fields.many2one('res.company', 'Company'), 'description': fields.text('Description'), 'state': fields.selection( [('open', 'Confirmed'), ('cancel', 'Cancelled'), ('pending', 'Pending'), ('done', 'Held') ], string='Status', readonly=True, track_visibility='onchange', help='The status is set to Confirmed, when a case is created.\n' 'When the call is over, the status is set to Held.\n' 'If the callis not applicable anymore, the status can be set to Cancelled.'), 'email_from': fields.char('Email', size=128, help="These people will receive email."), 'date_open': fields.datetime('Opened', readonly=True), # phonecall fields 'name': fields.char('Call Summary', required=True), 'active': fields.boolean('Active', required=False), 'duration': fields.float('Duration', help='Duration in minutes and seconds.'), 'categ_id': fields.many2one('crm.case.categ', 'Category', \ domain="['|',('section_id','=',section_id),('section_id','=',False),\ ('object_id.model', '=', 'crm.phonecall')]" ), 'partner_phone': fields.char('Phone'), 'partner_mobile': fields.char('Mobile'), 'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'), 'date_closed': fields.datetime('Closed', readonly=True), 'date': fields.datetime('Date'), 'opportunity_id': fields.many2one ('crm.lead', 'Lead/Opportunity'), } def _get_default_state(self, cr, uid, context=None): if context and context.get('default_state'): return context.get('default_state') return 'open' _defaults = { 'date': fields.datetime.now, 'priority': '1', 'state': _get_default_state, 'user_id': lambda self, cr, uid, ctx: uid, 'active': 1 } def on_change_partner_id(self, cr, uid, ids, partner_id, context=None): values = {} if partner_id: partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context) values = { 'partner_phone': partner.phone, 'partner_mobile': partner.mobile, } return {'value': values} def write(self, cr, uid, ids, values, context=None): """ Override to add case management: open/close dates """ if values.get('state'): if values.get('state') == 'done': values['date_closed'] = fields.datetime.now() self.compute_duration(cr, uid, ids, context=context) elif values.get('state') == 'open': values['date_open'] = fields.datetime.now() values['duration'] = 0.0 return super(crm_phonecall, self).write(cr, uid, ids, values, context=context) def compute_duration(self, cr, uid, ids, context=None): for phonecall in self.browse(cr, uid, ids, context=context): if phonecall.duration <= 0: duration = datetime.now() - datetime.strptime( phonecall.date, DEFAULT_SERVER_DATETIME_FORMAT) values = {'duration': duration.seconds / float(60)} self.write(cr, uid, [phonecall.id], values, context=context) return True def schedule_another_phonecall(self, cr, uid, ids, schedule_time, call_summary, \ user_id=False, section_id=False, categ_id=False, action='schedule', context=None): """ action :('schedule','Schedule a call'), ('log','Log a call') """ model_data = self.pool.get('ir.model.data') phonecall_dict = {} if not categ_id: try: res_id = model_data._get_id(cr, uid, 'crm', 'categ_phone2') categ_id = model_data.browse(cr, uid, res_id, context=context).res_id except ValueError: pass for call in self.browse(cr, uid, ids, context=context): if not section_id: section_id = call.section_id and call.section_id.id or False if not user_id: user_id = call.user_id and call.user_id.id or False if not schedule_time: schedule_time = call.date vals = { 'name': call_summary, 'user_id': user_id or False, 'categ_id': categ_id or False, 'description': call.description or False, 'date': schedule_time, 'section_id': section_id or False, 'partner_id': call.partner_id and call.partner_id.id or False, 'partner_phone': call.partner_phone, 'partner_mobile': call.partner_mobile, 'priority': call.priority, 'opportunity_id': call.opportunity_id and call.opportunity_id.id or False, } new_id = self.create(cr, uid, vals, context=context) if action == 'log': self.write(cr, uid, [new_id], {'state': 'done'}, context=context) phonecall_dict[call.id] = new_id return phonecall_dict def _call_create_partner(self, cr, uid, phonecall, context=None): partner = self.pool.get('res.partner') partner_id = partner.create( cr, uid, { 'name': phonecall.name, 'user_id': phonecall.user_id.id, 'comment': phonecall.description, 'address': [] }) return partner_id def on_change_opportunity(self, cr, uid, ids, opportunity_id, context=None): values = {} if opportunity_id: opportunity = self.pool.get('crm.lead').browse(cr, uid, opportunity_id, context=context) values = { 'section_id': opportunity.section_id and opportunity.section_id.id or False, 'partner_phone': opportunity.phone, 'partner_mobile': opportunity.mobile, 'partner_id': opportunity.partner_id and opportunity.partner_id.id or False, } return {'value': values} def _call_set_partner(self, cr, uid, ids, partner_id, context=None): write_res = self.write(cr, uid, ids, {'partner_id': partner_id}, context=context) self._call_set_partner_send_note(cr, uid, ids, context) return write_res def _call_create_partner_address(self, cr, uid, phonecall, partner_id, context=None): address = self.pool.get('res.partner') return address.create( cr, uid, { 'parent_id': partner_id, 'name': phonecall.name, 'phone': phonecall.partner_phone, }) def handle_partner_assignation(self, cr, uid, ids, action='create', partner_id=False, context=None): """ Handle partner assignation during a lead conversion. if action is 'create', create new partner with contact and assign lead to new partner_id. otherwise assign lead to specified partner_id :param list ids: phonecalls ids to process :param string action: what has to be done regarding partners (create it, assign an existing one, or nothing) :param int partner_id: partner to assign if any :return dict: dictionary organized as followed: {lead_id: partner_assigned_id} """ #TODO this is a duplication of the handle_partner_assignation method of crm_lead partner_ids = {} # If a partner_id is given, force this partner for all elements force_partner_id = partner_id for call in self.browse(cr, uid, ids, context=context): # If the action is set to 'create' and no partner_id is set, create a new one if action == 'create': partner_id = force_partner_id or self._call_create_partner( cr, uid, call, context=context) self._call_create_partner_address(cr, uid, call, partner_id, context=context) self._call_set_partner(cr, uid, [call.id], partner_id, context=context) partner_ids[call.id] = partner_id return partner_ids def redirect_phonecall_view(self, cr, uid, phonecall_id, context=None): model_data = self.pool.get('ir.model.data') # Select the view tree_view = model_data.get_object_reference( cr, uid, 'crm', 'crm_case_phone_tree_view') form_view = model_data.get_object_reference( cr, uid, 'crm', 'crm_case_phone_form_view') search_view = model_data.get_object_reference( cr, uid, 'crm', 'view_crm_case_phonecalls_filter') value = { 'name': _('Phone Call'), 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'crm.phonecall', 'res_id': int(phonecall_id), 'views': [(form_view and form_view[1] or False, 'form'), (tree_view and tree_view[1] or False, 'tree'), (False, 'calendar')], 'type': 'ir.actions.act_window', 'search_view_id': search_view and search_view[1] or False, } return value def convert_opportunity(self, cr, uid, ids, opportunity_summary=False, partner_id=False, planned_revenue=0.0, probability=0.0, context=None): partner = self.pool.get('res.partner') opportunity = self.pool.get('crm.lead') opportunity_dict = {} default_contact = False for call in self.browse(cr, uid, ids, context=context): if not partner_id: partner_id = call.partner_id and call.partner_id.id or False if partner_id: address_id = partner.address_get(cr, uid, [partner_id])['default'] if address_id: default_contact = partner.browse(cr, uid, address_id, context=context) opportunity_id = opportunity.create( cr, uid, { 'name': opportunity_summary or call.name, 'planned_revenue': planned_revenue, 'probability': probability, 'partner_id': partner_id or False, 'mobile': default_contact and default_contact.mobile, 'section_id': call.section_id and call.section_id.id or False, 'description': call.description or False, 'priority': call.priority, 'type': 'opportunity', 'phone': call.partner_phone or False, 'email_from': default_contact and default_contact.email, }) vals = { 'partner_id': partner_id, 'opportunity_id': opportunity_id, 'state': 'done', } self.write(cr, uid, [call.id], vals, context=context) opportunity_dict[call.id] = opportunity_id return opportunity_dict def action_make_meeting(self, cr, uid, ids, context=None): """ Open meeting's calendar view to schedule a meeting on current phonecall. :return dict: dictionary value for created meeting view """ partner_ids = [] phonecall = self.browse(cr, uid, ids[0], context) if phonecall.partner_id and phonecall.partner_id.email: partner_ids.append(phonecall.partner_id.id) res = self.pool.get('ir.actions.act_window').for_xml_id( cr, uid, 'calendar', 'action_calendar_event', context) res['context'] = { 'default_phonecall_id': phonecall.id, 'default_partner_ids': partner_ids, 'default_user_id': uid, 'default_email_from': phonecall.email_from, 'default_name': phonecall.name, } return res def action_button_convert2opportunity(self, cr, uid, ids, context=None): """ Convert a phonecall into an opp and then redirect to the opp view. :param list ids: list of calls ids to convert (typically contains a single id) :return dict: containing view information """ if len(ids) != 1: raise osv.except_osv( _('Warning!'), _('It\'s only possible to convert one phonecall at a time.')) opportunity_dict = self.convert_opportunity(cr, uid, ids, context=context) return self.pool.get('crm.lead').redirect_opportunity_view( cr, uid, opportunity_dict[ids[0]], context) # ---------------------------------------- # OpenChatter # ---------------------------------------- def _call_set_partner_send_note(self, cr, uid, ids, context=None): return self.message_post(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
def create(self, cr, uid, vals, context=None): search_domain = [] def make_search(model,column,inner_type,model_type,no=1,range=False): if inner_type == 'selection' and model_type == 'boolean' : fields = [x for x in self._columns.keys() if x.find(model) >= 0] v = [] for f in fields : if vals[f] : v.append(f.replace(str(column)+'_','')) if v : search_domain.append((column, 'in', v)) if inner_type == 'many2one' and model_type == 'boolean': fields = [x for x in self._columns.keys() if x.find(model) >= 0 and not '_change' in x] ids = [] for f in fields : if vals[f]: id = f.replace(model,'') ids.append(int(id)) if ids: search_domain.append((column, 'in', ids)) if inner_type == 'char' and model_type == 'char': fields = [x for x in self._columns.keys() if x.find(model) >= 0] for f in fields: if vals[f]: search_domain.append((column, 'ilike', vals[f])) if inner_type == 'many2one' and model_type == 'selection': fields = [x for x in self._columns.keys() if x.find(model) >= 0] for f in fields : if vals[f]: search_domain.append((column, '=', vals[f])) if range : orr = 0 fields = [x for x in self._columns.keys() if x.find(model) >= 0] for f in fields: #### if 'from' in f: if vals.get(f, False): orr = orr + 1 #### if orr > 1: while orr != 1: orr-=1 search_domain.append(('|')) tmp = no while tmp != 0: tmp-=1 for f in fields: from_val = 0.0 if str(tmp) in f: if model + '_from' in f and vals[f] : search_domain.append(('&')) search_domain.append((column, '>=', vals[f])) from_val = vals[f] break; for f in fields: to_val = 0.0 if str(tmp) in f: if model + '_to' in f and vals[f] : search_domain.append((column, '<=', vals[f])) to_val = vals[f] break; if from_val > to_val : raise osv.except_osv(_('Error!'), _('%s From value always be less then To Value !!')%(column.capitalize())) return True info = [ {'model':None,'_column_name':'product_name','label':'Stone ID','type':'char','name':'name','product_search_type':'char'}, {'model':None,'_column_name':'product_certificate_no','label':'Certificate No.','type':'char','name':'certificate_no','product_search_type':'char'}, {'model':None,'_column_name':'product_weight','no':3,'label':'Weight','type':'float','name':'weight','product_search_type':'char','range':True}, {'model':None,'_column_name':'product_price_caret','label':'PPC','no':1,'type':'float','name':'price_caret','product_search_type':'char','range':True}, {'model':None,'_column_name':'product_discount','label':'Discount','no':1,'type':'float','name':'discount','product_search_type':'char','range':True}, {'model':'product.shape','_column_name':'product_shape','label':'Shape','type':'many2one','name':'shape_id','product_search_type':'boolean'}, {'model':'product.color','_column_name':'product_color','label':'Color','type':'many2one','name':'color_id','product_search_type':'boolean'}, {'model':'product.fancy.color','_column_name':'product_fancy_color','label':'Fancy Color','type':'many2one','name':'fancy_color_id','product_search_type':'boolean'}, {'model':'product.fancy.color.intensity','_column_name':'product1_fancy_color_intensity','label':'Fancy Color Intensity','type':'many2one','name':'fancy_color_intensity','product_search_type':'boolean'}, {'model':'product.fancy.color.overtone','_column_name':'product2_fancy_color_overtone','label':'Fancy Color Overtone','type':'many2one','name':'fancy_color_overtone','product_search_type':'boolean'}, {'model':'product.clarity','_column_name':'product_clarity','label':'Clarity','type':'many2one','name':'clarity_id','product_search_type':'boolean'}, {'model':'product.cut','_column_name':'product_cut','label':'Cut','type':'many2one','name':'cut_id','product_search_type':'boolean'}, {'model':'product.polish','_column_name':'product_polish','label':'Polish','type':'many2one','name':'polish_id','product_search_type':'boolean'}, {'model':'product.symmetry','_column_name':'product_symmetry','label':'Symmetry','type':'many2one','name':'symmetry_id','product_search_type':'boolean'}, {'model':'product.fluorescence.intensity','_column_name':'product_fluorescence_intensity','label':'Fluorescence Intensity','type':'many2one','name':'fluorescence_intensity_id','product_search_type':'boolean'}, {'model':'product.lab','_column_name':'product_lab','label':'Lab','type':'many2one','name':'lab_id','product_search_type':'boolean'}, {'model':'stock.location','_column_name':'stock_location','label':'Location','type':'many2one','name':'location_id','product_search_type':'boolean','domain':[('usage','=','internal')]}, {'model':None,'_column_name':'product_status','label':'Status','type':'selection','name':'product_status','product_search_type':'boolean','selection_val':[('available','Available'), ('hold','Hold'), ('sold','Sold'), ('on_approval','On Approval'), ('on_consignment','On Consignment'), ('offline','Offline'), ('repair','Repair'), ('web_sale','Web Sale'),],}, ] for model_info in info : make_search(model_info['_column_name'],model_info['name'],model_info['type'],model_info['product_search_type'], no=model_info.get('no') or 1, range=model_info.get('range') or False) search_domain.append(('is_certified', '=', True)) print "Domain : ",search_domain p = self.pool.get('product.product').search(cr, uid, search_domain,context=context) product_ids = '' if p : product_ids = str(p).strip('[]') self._columns = {} self._columns['product_ids'] = fields.text('Product IDS') result = super(product_search_ept, self).create(cr, uid, {'product_ids':product_ids}, context=context) return result
class wizard(osv.TransientModel): """ A wizard to manage the modification of document protocollo """ _name = 'protocollo.modify.pec.wizard' _description = 'Modify Protocollo PEC Management' def set_before(self, before, label, value): before += label + ': ' + value + '\n' return before def set_after(self, after, label, value): after += label + ': ' + value + '\n' return after _columns = { 'name': fields.char('Numero Protocollo', size=256, required=True, readonly=True), 'sender_receivers': fields.one2many( 'protocollo.sender_receiver.pec.wizard', 'wizard_id', 'Destinatari', required=True,), 'cause': fields.text('Motivo della Modifica', required=False), 'protocol_sent': fields.boolean('Mail Inviata'), } def _default_name(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse(cr, uid, context['active_id'], {'skip_check': True}) return protocollo.name def _default_sender_receivers(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse(cr, uid, context['active_id'], {'skip_check': True}) res = [] for send_rec in protocollo.sender_receivers: if send_rec.pec_errore_consegna_status or send_rec.pec_non_accettazione_status: res.append({ 'sender_receiver_id': send_rec.id, 'name': send_rec.name, 'pec_mail': send_rec.pec_mail, }) return res def _default_protocol_sent(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse(cr, uid, context['active_id'], {'skip_check': True}) if protocollo.state == 'registered': return False return True _defaults = { 'name': _default_name, 'sender_receivers': _default_sender_receivers, 'protocol_sent': _default_protocol_sent, } def _process_mail(self, cr, uid, ids, protocollo_obj, context=None): # check if waiting then resend pec mail protocollo = protocollo_obj.browse(cr, uid, context['active_id'], {'skip_check': True}) if protocollo.state in ('waiting', 'error'): wf_service = netsvc.LocalService('workflow') wf_service.trg_validate(uid, 'protocollo.protocollo', context['active_id'], 'resend', cr) return True def action_save(self, cr, uid, ids, context=None): wizard = self.browse(cr, uid, ids[0], context) vals = {} before = {} after = {} if not wizard.cause: raise osv.except_osv( _('Attenzione!'), _('Manca la causale della modifica!') ) protocollo_obj = self.pool.get('protocollo.protocollo') sender_receiver_obj = self.pool.get('protocollo.sender_receiver') protocollo = protocollo_obj.browse(cr, uid, context['active_id'], {'skip_check': True}) for send_rec in protocollo.sender_receivers: before[send_rec.id] = {'name': send_rec.name, 'mail': send_rec.pec_mail} for send_rec in wizard.sender_receivers: srvals = {'pec_mail': send_rec.pec_mail, 'to_resend': True} after[send_rec.sender_receiver_id.id] = {'name': send_rec.name, 'mail': send_rec.pec_mail} # after = self.set_after(after, '', 'pec_mail: ' + send_rec.pec_mail + ', ') sender_receiver_obj.write(cr, uid, [send_rec.sender_receiver_id.id], srvals) protocollo_obj.write(cr, uid, [context['active_id']], vals) action_class = "history_icon update" body = "<div class='%s'><ul>" % action_class for key, after_item in after.items(): body = body + "<li>%s: <span style='color:#990000'> %s</span> -> <span style='color:#007ea6'> %s </span></li>" \ % (after_item['name'], before[key]['mail'].encode("utf-8"), after_item['mail'].encode("utf-8")) body += "</ul></div>" post_vars = {'subject': "Modificato indirizzo PEC: %s" % wizard.cause, 'body': body, 'model': "protocollo.protocollo", 'res_id': context['active_id'], } thread_pool = self.pool.get('protocollo.protocollo') thread_pool.message_post(cr, uid, context['active_id'], type="notification", context=context, **post_vars) self._process_mail(cr, uid, ids, protocollo_obj, context) return {'type': 'ir.actions.act_window_close'} def action_resend(self, cr, uid, ids, context=None): protocollo_obj = self.pool.get('protocollo.protocollo') self._process_mail(cr, uid, ids, protocollo_obj, context)
return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all _columns = { 'host':fields.char('Host', size=64, required=True), 'port':fields.integer('Port', required=True), 'ooo_restart_cmd': fields.char('OOO restart command', size=256, \ help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \ 'The command will be executed as the user of the OpenERP server process,'+ \ 'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'), 'state':fields.selection([ ('init','Init'), ('error','Error'), ('done','Done'), ],'State', select=True, readonly=True), 'msg': fields.text('Message', readonly=True), 'error_details': fields.text('Error Details', readonly=True), 'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True), 'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True), } def default_get(self, cr, uid, fields, context=None): config_obj = self.pool.get('oo.config') data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context) ids = config_obj.search(cr, 1, [], context=context) if ids: res = config_obj.read(cr, 1, ids[0], context=context) del res['id'] data.update(res) return data
class acquirer(osv.Model): _name = 'portal.payment.acquirer' _description = 'Online Payment Acquirer' _columns = { 'name': fields.char('Name', required=True), 'form_template': fields.text('Payment form template (HTML)', translate=True, required=True), 'visible': fields.boolean( 'Visible', help= "Make this payment acquirer available in portal forms (Customer invoices, etc.)" ), } _defaults = { 'visible': True, } def render(self, cr, uid, id, object, reference, currency, amount, context=None, **kwargs): """ Renders the form template of the given acquirer as a mako template """ if not isinstance(id, (int, long)): id = id[0] this = self.browse(cr, uid, id) if context is None: context = {} try: i18n_kind = _(object._description ) # may fail to translate, but at least we try result = MakoTemplate(this.form_template).render_unicode( object=object, reference=reference, currency=currency, amount=amount, kind=i18n_kind, quote=quote, # context kw would clash with mako internals ctx=context, format_exceptions=True) return result.strip() except Exception: _logger.exception( "failed to render mako template value for payment.acquirer %s: %r", this.name, this.form_template) return def _wrap_payment_block(self, cr, uid, html_block, amount, currency, context=None): if not html_block: link = '#action=account.action_account_config' payment_header = _( 'You can finish the configuration in the <a href="%s">Bank&Cash settings</a>' ) % link amount = _('No online payment acquirers configured') group_ids = self.pool.get('res.users').browse( cr, uid, uid, context=context).groups_id if any(group.is_portal for group in group_ids): return '' else: payment_header = _('Pay safely online') amount_str = float_repr( amount, self.pool.get('decimal.precision').precision_get( cr, uid, 'Account')) currency_str = currency.symbol or currency.name amount = u"%s %s" % ((currency_str, amount_str) if currency.position == 'before' else (amount_str, currency_str)) result = """<div class="payment_acquirers"> <div class="payment_header"> <div class="payment_amount">%s</div> %s </div> %%s </div>""" % (amount, payment_header) return result % html_block def render_payment_block(self, cr, uid, object, reference, currency, amount, context=None, **kwargs): """ Renders all visible payment acquirer forms for the given rendering context, and return them wrapped in an appropriate HTML block, ready for direct inclusion in an PengERP v7 form view """ acquirer_ids = self.search(cr, uid, [('visible', '=', True)]) if not acquirer_ids: return html_forms = [] for this in self.browse(cr, uid, acquirer_ids): content = this.render(object, reference, currency, amount, context=context, **kwargs) if content: html_forms.append(content) html_block = '\n'.join(filter(None, html_forms)) return self._wrap_payment_block(cr, uid, html_block, amount, currency, context=context)
def function_fn_write(model, cr, uid, id, field_name, field_value, fnct_inv_arg, context): """ just so CreatorCase.export can be used """ pass models = [ ('boolean', fields.boolean()), ('integer', fields.integer()), ('float', fields.float()), ('decimal', fields.float(digits=(16, 3))), ('string.bounded', fields.char('unknown', size=16)), ('string.required', fields.char('unknown', size=None, required=True)), ('string', fields.char('unknown', size=None)), ('date', fields.date()), ('datetime', fields.datetime()), ('text', fields.text()), ('selection', fields.selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])), # here use size=-1 to store the values as integers instead of strings ('selection.function', fields.selection(selection_fn, size=-1)), # just relate to an integer ('many2one', fields.many2one('export.integer')), ('one2many', fields.one2many('export.one2many.child', 'parent_id')), ('many2many', fields.many2many('export.many2many.other')), ('function', fields.function(function_fn, fnct_inv=function_fn_write, type="integer")), # related: specialization of fields.function, should work the same way # TODO: reference ] for name, field in models: class NewModel(orm.Model): _name = 'export.%s' % name
class gamification_goal_definition(osv.Model): """Goal definition A goal definition contains the way to evaluate an objective Each module wanting to be able to set goals to the users needs to create a new gamification_goal_definition """ _name = 'gamification.goal.definition' _description = 'Gamification goal definition' def _get_suffix(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, '') for goal in self.browse(cr, uid, ids, context=context): if goal.suffix and not goal.monetary: res[goal.id] = goal.suffix elif goal.monetary: # use the current user's company currency user = self.pool.get('res.users').browse(cr, uid, uid, context) if goal.suffix: res[goal.id] = "%s %s" % (user.company_id.currency_id.symbol, goal.suffix) else: res[goal.id] = user.company_id.currency_id.symbol else: res[goal.id] = "" return res _columns = { 'name': fields.char('Goal Definition', required=True, translate=True), 'description': fields.text('Goal Description'), 'monetary': fields.boolean('Monetary Value', help="The target and current value are defined in the company currency."), 'suffix': fields.char('Suffix', help="The unit of the target and current values", translate=True), 'full_suffix': fields.function(_get_suffix, type="char", string="Full Suffix", help="The currency and suffix field"), 'computation_mode': fields.selection([ ('manually', 'Recorded manually'), ('count', 'Automatic: number of records'), ('sum', 'Automatic: sum on a field'), ('python', 'Automatic: execute a specific Python code'), ], string="Computation Mode", help="Defined how will be computed the goals. The result of the operation will be stored in the field 'Current'.", required=True), 'display_mode': fields.selection([ ('progress', 'Progressive (using numerical values)'), ('boolean', 'Exclusive (done or not-done)'), ], string="Displayed as", required=True), 'model_id': fields.many2one('ir.model', string='Model', help='The model object for the field to evaluate'), 'model_inherited_model_ids': fields.related('model_id', 'inherited_model_ids', type="many2many", obj="ir.model", string="Inherited models", readonly="True"), 'field_id': fields.many2one('ir.model.fields', string='Field to Sum', help='The field containing the value to evaluate'), 'field_date_id': fields.many2one('ir.model.fields', string='Date Field', help='The date to use for the time period evaluated'), 'domain': fields.char("Filter Domain", help="Domain for filtering records. General rule, not user depending, e.g. [('state', '=', 'done')]. The expression can contain reference to 'user' which is a browse record of the current user if not in batch mode.", required=True), 'batch_mode': fields.boolean('Batch Mode', help="Evaluate the expression in batch instead of once for each user"), 'batch_distinctive_field': fields.many2one('ir.model.fields', string="Distinctive field for batch user", help="In batch mode, this indicates which field distinct one user form the other, e.g. user_id, partner_id..."), 'batch_user_expression': fields.char("Evaluted expression for batch mode", help="The value to compare with the distinctive field. The expression can contain reference to 'user' which is a browse record of the current user, e.g. user.id, user.partner_id.id..."), 'compute_code': fields.text('Python Code', help="Python code to be executed for each user. 'result' should contains the new current value. Evaluated user can be access through object.user_id."), 'condition': fields.selection([ ('higher', 'The higher the better'), ('lower', 'The lower the better') ], string='Goal Performance', help='A goal is considered as completed when the current value is compared to the value to reach', required=True), 'action_id': fields.many2one('ir.actions.act_window', string="Action", help="The action that will be called to update the goal value."), 'res_id_field': fields.char("ID Field of user", help="The field name on the user profile (res.users) containing the value for res_id for action."), } _defaults = { 'condition': 'higher', 'computation_mode': 'manually', 'domain': "[]", 'monetary': False, 'display_mode': 'progress', } def number_following(self, cr, uid, model_name="mail.thread", context=None): """Return the number of 'model_name' objects the user is following The model specified in 'model_name' must inherit from mail.thread """ user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return self.pool.get('mail.followers').search(cr, uid, [('res_model', '=', model_name), ('partner_id', '=', user.partner_id.id)], count=True, context=context) def _check_domain_validity(self, cr, uid, ids, context=None): # take admin as should always be present superuser = self.pool['res.users'].browse(cr, uid, SUPERUSER_ID, context=context) for definition in self.browse(cr, uid, ids, context=context): if definition.computation_mode not in ('count', 'sum'): continue obj = self.pool[definition.model_id.model] try: domain = safe_eval(definition.domain, {'user': superuser}) # demmy search to make sure the domain is valid obj.search(cr, uid, domain, context=context, count=True) except (ValueError, SyntaxError), e: msg = e.message or (e.msg + '\n' + e.text) raise osv.except_osv(_('Error!'),_("The domain for the definition %s seems incorrect, please check it.\n\n%s" % (definition.name, msg))) return True
'styles_mode': fields.selection([ ('default','Not used'), ('global', 'Global'), ('specified', 'Specified'), ], string='Stylesheet'), 'stylesheet_id':fields.many2one('report.stylesheets', 'Template Stylesheet'), 'preload_mode':fields.selection([ ('static',_('Static')), ('preload',_('Preload')), ],'Preload Mode'), 'tml_source':fields.selection([ ('database','Database'), ('file','File'), ('parser','Parser'), ],'Template source', select=True), 'parser_def': fields.text('Parser Definition'), 'parser_loc':fields.char('Parser location', size=128, help="Path to the parser location. Beginning of the path must be start with the module name!\nLike this: {module name}/{path to the parser.py file}"), 'parser_state':fields.selection([ ('default',_('Default')), ('def',_('Definition')), ('loc',_('Location')), ],'State of Parser', select=True), 'in_format': fields.selection(_get_in_mimetypes, 'Template Mime-type'), 'out_format':fields.many2one('report.mimetypes', 'Output Mime-type'), 'report_sxw_content': fields.function(_report_content, fnct_inv=_report_content_inv, method=True, type='binary', string='SXW content',), 'active':fields.boolean('Active', help='Disables the report if unchecked.'), 'report_wizard':fields.boolean('Report Wizard'), 'copies': fields.integer('Number of Copies'), 'fallback_false':fields.boolean('Disable Format Fallback'),
class project_description_wizard(orm.TransientModel): _name = 'project.description.wizard' def _get_project_id(self, cr, uid, context=None): # Retrieve the id of the project from context return context.get('active_id', False) def _get_value_ids(self, cr, uid, ids, fieldname, args, context=None): project_id = context.get('active_id') if not project_id: return {id: [] for id in ids} query = '''SELECT rel.value_id FROM project_property_to_value as rel, compassion_translated_value as val WHERE rel.project_id = %s AND rel.value_id = val.id AND val.is_tag = false ORDER BY val.value_en, val.property_name''' % project_id cr.execute(query) value_ids = [x[0] for x in cr.fetchall()] return {id: value_ids for id in ids} def _get_default_ids(self, cr, uid, context=None): return self._get_value_ids(cr, uid, [0], '', '', context)[0] def _write_values(self, cr, uid, id, name, value, inv_arg, context=None): value_obj = self.pool.get('compassion.translated.value') for line in value: if line[0] == 1: # on2many update value_id = line[1] value_obj.write(cr, uid, [value_id], line[2]) return True def _get_needs(self, cr, uid, lang, context): """ Returns the needs descrption of the given language. It will either generate it from a pattern or retrieve the last saved description if one exist. """ project = self.pool.get('compassion.project').browse( cr, uid, context.get('active_id'), context) res = False if lang == 'fr': res = project.needs_fr or \ Project_description_fr._get_needs_pattern_fr(cr, uid, project, context) elif lang == 'de': res = project.needs_de or \ Project_description_de._get_needs_pattern_de(cr, uid, project, context) elif lang == 'it': res = project.needs_it or \ Project_description_it._get_needs_pattern_it(cr, uid, project, context) return res + '\n\n' # Fix for display of the textfield def _get_desc(self, cr, uid, lang, context): project = self.pool.get('compassion.project').browse( cr, uid, context.get('active_id'), context) res = False if lang == 'fr': res = Project_description_fr.gen_fr_translation( cr, uid, project, context) elif lang == 'de': res = Project_description_de.gen_de_translation( cr, uid, project, context) elif lang == 'it': res = Project_description_it.gen_it_translation( cr, uid, project, context) elif lang == 'en': res = project.description_en return res + '\n\n' # Fix for display of the textfield _columns = { 'project_id': fields.many2one('compassion.project', 'Project code'), # Complete descriptions 'keep_desc_fr': fields.boolean(_('Update french description')), 'keep_desc_de': fields.boolean(_('Update german description')), 'keep_desc_it': fields.boolean(_('Update italian description')), 'desc_fr': fields.html(_('French description')), 'desc_de': fields.html(_('German description')), 'desc_it': fields.html(_('Italian description')), 'desc_en': fields.text(_('English description')), # Needs descriptions 'needs_desc_fr': fields.text(_('French needs description')), 'needs_desc_de': fields.text(_('German needs description')), 'needs_desc_it': fields.text(_('Italian needs description')), 'project_property_value_ids': fields.function(_get_value_ids, type='one2many', relation='compassion.translated.value', fnct_inv=_write_values), } _defaults = { 'project_id': _get_project_id, 'desc_fr': lambda self, cr, uid, context: self._get_desc(cr, uid, 'fr', context), 'desc_de': lambda self, cr, uid, context: self._get_desc(cr, uid, 'de', context), 'desc_it': lambda self, cr, uid, context: self._get_desc(cr, uid, 'it', context), 'desc_en': lambda self, cr, uid, context: self._get_desc(cr, uid, 'en', context), 'needs_desc_fr': lambda self, cr, uid, context: self._get_needs(cr, uid, 'fr', context), 'needs_desc_de': lambda self, cr, uid, context: self._get_needs(cr, uid, 'de', context), 'needs_desc_it': lambda self, cr, uid, context: self._get_needs(cr, uid, 'it', context), 'project_property_value_ids': lambda self, cr, uid, context: self._get_default_ids(cr, uid, context), } def generate_descriptions(self, cr, uid, ids, context=None): wizard = self.browse(cr, uid, ids, context)[0] project = wizard.project_id desc_fr = Project_description_fr.gen_fr_translation( cr, uid, project, context) desc_de = Project_description_de.gen_de_translation( cr, uid, project, context) desc_it = Project_description_it.gen_it_translation( cr, uid, project, context) self.write( cr, uid, ids, { 'desc_fr': desc_fr, 'desc_de': desc_de, 'desc_it': desc_it, 'desc_en': wizard.desc_en, }, context) return { 'name': _('Descriptions generation'), 'type': 'ir.actions.act_window', 'res_model': self._name, 'view_mode': 'auto_description_form', 'view_type': 'form', 'context': context, 'target': 'new', } def validate_descriptions(self, cr, uid, ids, context=None): """ Save the selected descriptions in the project. """ wizard = self.browse(cr, uid, ids, context)[0] vals = dict() p = re.compile(r'<.*?>') # Remove HTML markers if wizard.keep_desc_fr: vals['description_fr'] = p.sub( '', wizard.desc_fr + wizard.needs_desc_fr.strip('\n')) vals['needs_fr'] = wizard.needs_desc_fr if wizard.keep_desc_de: vals['description_de'] = p.sub( '', wizard.desc_de + wizard.needs_desc_de.strip('\n')) vals['needs_de'] = wizard.needs_desc_de if wizard.keep_desc_it: vals['description_it'] = p.sub( '', wizard.desc_it + wizard.needs_desc_it.strip('\n')) vals['needs_it'] = wizard.needs_desc_it if not vals: raise orm.except_orm( 'ValueError', _('No description selected. Please select one or click cancel' ' to abort current task.')) wizard.project_id.write(vals) return { 'type': 'ir.actions.client', 'tag': 'reload', }
return self.write(cr, uid, id, {"file_db_store": value}, context=context) _columns = { "name": fields.char("Image Title", size=64), "filename": fields.char("Filename", size=64), "extension": fields.char("file extension", oldname="extention"), "link": fields.boolean( "Link?", help="Images can be linked from files on " "your file system or remote (Preferred)" ), "file_db_store": fields.binary("Image stored in database"), "file": fields.function( _get_image, fnct_inv=_set_image, type="binary", string="File", filters="*.png,*.jpg,*.gif" ), "url": fields.char("File Location"), "url_big": fields.char("File Location Image Size Big"), "url_medium": fields.char("File Location Image Size Medium"), "url_small": fields.char("File Location Image Size Small"), "comments": fields.text("Comments"), "product_id": fields.many2one("product.product", "Product"), } _defaults = {"link": True} _sql_constraints = [ ( "uniq_name_product_id", "UNIQUE(product_id, name)", _("A product can have only one " "image with the same name"), ) ]
logger = logging.getLogger(__name__) class pattern_model(orm.Model): _name = 'tk.json.pattern' def validate_json(self, cr, uid, ids, context=None): if not ids: return if not isinstance(ids, list): ids = [ids] for pattern in self.browse(cr, uid, ids): try: simplejson.loads(pattern.json_pattern) except Exception, e: logger.error(e) return False return True _columns = { 'name': fields.char('Name', size=128, required=False), 'model_id': fields.many2one('ir.model', 'Model', required=False), 'json_pattern': fields.text('JSON Pattern') } pattern_model()