def patch_addons_base(): """Backport missing _columns and methods for the "base" addon.""" openerp.modules.load_openerp_module('base') ir_ui_menu = openerp.addons.base.ir.ir_ui_menu.ir_ui_menu company_columns = openerp.addons.base.res.res_company.res_company._columns def get_needaction_data(self, cr, uid, ids, context=None): return {} company_columns['logo_web'] = fields.binary("Logo Web") ir_ui_menu.get_needaction_data = get_needaction_data
class cursos_ihce_ejecutivo(osv.osv_memory): _name = "cursos.ihce.ejecutivo" _inherit = ['mail.thread', 'ir.needaction_mixin'] _columns = { 'name': fields.text('Instrucciones'), 'date': fields.date('Fecha de reporte'), 'date_ini': fields.date('Fecha Inicio'), 'date_fin': fields.date('Fecha Final'), 'xls_file_name': fields.char('xls file name', size=128), 'xls_file': fields.binary('Archivo', readonly=True), 'user_id': fields.many2one('res.users', "Responsable"), } _defaults = { 'name': "Se creara un archivo .xls con el reporte seleccionado.", 'date': lambda *a: time.strftime('%Y-%m-%d'), 'user_id': lambda obj, cr, uid, context: uid, } #~ Función que crea la hoja de calculo para el reportes def action_create_report(self, cr, uid, ids, context=None): # Creamos la hoja de calculo workbook = xlwt.Workbook(encoding='utf-8') sheet_principal = workbook.add_sheet('Cursos IHCE Ejecutivo', cell_overwrite_ok=True) # Creamos la Hoja principal self.create_principal_sheet(cr, uid, ids, sheet_principal, context) # Creamos el nombre del archivo name = "Cursos IHCE Ejecutivo.xls" # Creamos la ruta con el nombre del archivo donde se guardara root = "/tmp/" + str(name) # Guardamos la hoja de calculo en la ruta antes creada workbook.save(root) sprint_file = base64.b64encode(open("/tmp/%s" % (name), 'rb').read()) # Creamos el Archivo adjunto al sprint data_attach = { 'name': name, 'datas': sprint_file, 'datas_fname': name, 'description': 'Reporte Cursos IHCE Ejecutivo', 'res_model': 'cursos.ihce.ejecutivo', 'res_id': ids[0], } self.pool.get('ir.attachment').create(cr, uid, data_attach, context=context) # Se guarda el archivo para poder descargarlo self.write(cr, uid, ids, { 'xls_file': sprint_file, 'xls_file_name': name }) return True #~ Función que llena la hoja con los datos correspondientes del reporte def create_principal_sheet(self, cr, uid, ids, sheet, context={}): data = self.browse(cr, uid, ids[0], context=context) #ESTILOS styleT = xlwt.easyxf(( 'font: height 260, bold 1, color black; alignment: horizontal center; ' )) style = xlwt.easyxf(( 'font: height 180, bold 1, color white; alignment: horizontal center; pattern: pattern solid, fore_colour green;' )) style_n = xlwt.easyxf( ('font: height 160, color black; alignment: horizontal center')) #CABECERA sheet.write_merge(0, 0, 0, 7, ("SUBPROGRAMA DE FORMACIÓN DE CAPITAL HUMANO"), styleT) sheet.write_merge(1, 1, 0, 7, ("Cursos impartidos en el IHCE "), styleT) sheet.write_merge( 2, 2, 0, 7, ("Reporte correspondiente del " + time.strftime( '%d-%m-%Y', time.strptime(data.date_ini, '%Y-%m-%d')) + " al " + time.strftime('%d-%m-%Y', time.strptime(data.date_fin, '%Y-%m-%d'))), styleT) #TITULOS sheet.write(6, 1, 'No. Cursos', style) sheet.write(6, 2, 'Horas', style) sheet.write(6, 3, 'Asistentes', style) sheet.write(6, 4, 'Hombres', style) sheet.write(6, 5, 'Mujeres', style) sheet.write(9, 0, 'No.', style) sheet.write_merge(9, 9, 1, 4, 'Curso/Taller', style) sheet.write_merge(9, 9, 5, 6, 'Institución', style) i = 10 a = 1 horas = 0 asistentes = 0 mujeres = 0 hombres = 0 courses_ids = self.pool.get('date.courses').search( cr, uid, [('state', '=', 'done'), ('date', '>=', data.date_ini), ('date', '<=', data.date_fin), ('dependence', '=', 'ihce')], order='date ASC') sheet.write(7, 1, len(courses_ids), style_n) for row in self.pool.get('date.courses').browse( cr, uid, courses_ids, context): sheet.write(i, 0, a, style_n) sheet.write_merge(i, i, 1, 4, (row.courses_id.name.encode('utf-8')) or '', style_n) sheet.write_merge(i, i, 5, 6, row.supplier_id.name or '', style_n) horas += row.hours_training asistentes += row.number_attendees for line in self.pool.get('company.line').search( cr, uid, [('course_id', '=', row.id)]): li = self.pool.get('company.line').browse(cr, uid, line) ro = self.pool.get('companies.ihce').browse( cr, uid, li.contact_id.id, context) if ro.sexo == 'M': hombres = hombres + 1 else: if ro.sexo == 'F': mujeres = mujeres + 1 for line in self.pool.get('list.new.persons').search( cr, uid, [('course_id', '=', row.id)]): li = self.pool.get('list.new.persons').browse(cr, uid, line) if li.sexo == 'M': hombres = hombres + 1 else: if li.sexo == 'F': mujeres = mujeres + 1 a = a + 1 i = i + 1 sheet.write(7, 2, horas, style_n) sheet.write(7, 3, asistentes, style_n) sheet.write(7, 4, hombres, style_n) sheet.write(7, 5, mujeres, style_n) return sheet
class upload_file_meeting(osv.osv_memory): _name = 'upload.file.meeting.wizard' _description = 'Valida XML Factura' def onchange_meeting(self, cr, uid, ids, meeting_id, context=None): """ Valida si ya se agrego la reunion """ file_obj = self.pool.get('project.phase.file.meeting') meeting_obj = self.pool.get('crm.meeting') if context is None: context = {} res = False fname = '' if meeting_id: file_ids = file_obj.search( cr, uid, [('meeting_id', '=', meeting_id or False)]) if file_ids: res = True # Obtiene el nombre del archivo meeting = meeting_obj.browse(cr, uid, meeting_id, context=context) fname = "%s-%s.pdf" % (meeting.name, meeting.project_id.code) return {'value': {'check_files': res, 'file_name': fname}} def get_week_next(self, cr, uid, num_week=1, context=None): """ Obtiene el numero siguiente de la semana contando de 1-7 """ if num_week == 7: num_week = 1 else: num_week += 1 return num_week def get_timezone(self, cr, uid, context=None): """ Obtiene la zona horaria del context o del usuario """ if context is None: context = {} tz = 'UTC' if context.get('tz', False) != False: tz = context.get('tz', 'UTC') print "*********** context tz ************ ", context.get('tz', 'UTC') # Valida si el context trae el dato de la zona horaria if tz == 'UTC' or tz == False: zone = self.pool.get('res.users').browse(cr, uid, uid, context=context).tz # Valida que el usuario tenga una zona horaria if zone: tz = zone print "**************** get tz ************ ", tz return tz def get_date_next(self, cr, uid, date, value=0, unit='hours', context=None): """ Obtiene la fecha siguiente en base a los parametros """ res = date # Obtiene la zona horaria configurada tz_utc = pytz.utc tz = self.get_timezone(cr, uid, context=context) print "**************** tz **************** ", tz if not date: date = datetime.strftime('%Y-%m-%d %H:%M:%S') # Inicializa la fecha datet = datetime.strptime(date, '%Y-%m-%d %H:%M:%S').replace(tzinfo=tz_utc) if tz != 'UTC': datet = datet.astimezone(timezone(tz)) print "************* datetime get date next ************** ", datet # Valida si se incrementa por horas if unit == 'hours': datet = datet + timedelta(hours=value) elif unit == 'days': datet = datet + timedelta(days=value) elif unit == 'minutes': datet = datet + timedelta(minutes=value) # Regresa la fecha al formato de UTC datet = datet.astimezone(tz_utc) res = datet.strftime('%Y-%m-%d %H:%M:%S') return res def get_config_week(self, cr, uid, context=None): """ Configuracion base para dias trabajados en la semana """ return { 'week1': True, 'week2': True, 'week3': True, 'week4': True, 'week5': True, 'week6': False, 'week7': False, } def get_date_next_zone(self, cr, uid, date, value=0, unit='hours', context=None): """ Valida dias inhabiles para calcular la fecha siguiente (Considera zona horaria) """ config_obj = self.pool.get('delivery.config.settings') # Obtiene la configuracion de dias inhabiles sobre los dias de la semana config_week = self.get_config_week(cr, uid, context=context) hours_date = 24 next_value = 0 validate_hours = 0 # Obtiene la zona horaria configurada tz_utc = pytz.utc tz = self.get_timezone(cr, uid, context=context) if unit != 'minutes': print "************** timezone *********** ", tz # Valida el dia de semana de la fecha actual datet = datetime.strptime( date, '%Y-%m-%d %H:%M:%S').replace(tzinfo=tz_utc) if tz != 'UTC': # Inicializa la fecha con la zona horaria datet = datet.astimezone(timezone(tz)) print "************* datetime get date next ************** ", datet num_week = datetime.isoweekday(datet) # Obtiene el total de dias en horas total_hours = value if unit == 'days': total_hours = total_hours * hours_date print "********************** total horas *********** ", total_hours validate_week = 'week%s' % (num_week, ) # Valida si hay dias inhabiles entre las entregas while validate_hours != total_hours: # Valida si es un dia inhabil el dia de la semana if config_week.get(validate_week, False) == False: print "************** dia inhabil ********* ", validate_week # Dias inhabiles a recorrer next_value += hours_date else: # Si son menos de 24 horas agrega el tiempo sobre el dia de la entrega if (total_hours - validate_hours) < hours_date: print "************* menor a 24 horas ****************** ", total_hours - validate_hours validate_hours = total_hours else: # Si es un dia habil descuenta 24 horas del tiempo de entrega validate_hours += hours_date print "************** dia habil ********* ", validate_week print "************************ horas validadas ************ ", validate_hours # Recorre a la siguiente semana num_week = self.get_week_next(cr, uid, num_week, context=context) validate_week = 'week%s' % (num_week, ) print "************** horas dias inhabiles ********* ", next_value # Funcion original de obtener fecha res = self.get_date_next(cr, uid, date, value=(total_hours + next_value), unit='hours', context=context) else: # Funcion original de obtener fecha res = self.get_date_next(cr, uid, date, value=value, unit=unit, context=context) print "************************ resultado fecha ************** ", res # Valida el dia de la fecha obtenida datet = datetime.strptime(res, '%Y-%m-%d %H:%M:%S').replace(tzinfo=tz_utc) if tz != 'UTC': # Inicializa la fecha con la zona horaria datet = datet.astimezone(timezone(tz)) #datet = datetime.strptime(res, '%Y-%m-%d %H:%M:%S').replace(tzinfo=tz_utc).astimezone(timezone(tz)) num_week = datetime.isoweekday(datet) next_value = 0 print "*********** fecha formato Mexico ******************* ", datet print "*********** numero dia de la semana ********* ", num_week validate_week = 'week%s' % (num_week, ) # Valida si el dia de la entrega es un dia inhabil while config_week.get(validate_week, False) == False: print "************** fec nueva dia inhabil ********* ", validate_week next_value += hours_date # Recorre a la siguiente semana num_week = self.get_week_next(cr, uid, num_week, context=context) validate_week = 'week%s' % (num_week, ) print "************** fec nueva dia habil ********* ", validate_week # Incrementa la fecha de entrega con los dias dela semana inhabiles if next_value > 0: print "************** fec nueva - incrementar horas ********* ", next_value datet = datet + timedelta(hours=next_value) datet = datet.astimezone(tz_utc) res = datet.strftime('%Y-%m-%d %H:%M:%S') print "********* Fecha final ************* ", res return res def import_file(self, cr, uid, ids, context=None): """ Sube el archivo para dar por completado el entregable """ file_obj = self.pool.get('project.phase.file.meeting') meeting_obj = self.pool.get('crm.meeting') # Obtiene la fecha actual cur_date = time.strftime('%Y-%m-%d %H:%M:%S') # Obtiene la informacion del wizard wizard = self.browse(cr, uid, ids[0], context=context) # Valida el tipo de usuario que esta queriendo subir el archivo user = self.pool.get('res.users').browse(cr, uid, uid, context=context) if user.type_contact == 'emp': raise osv.except_osv( _('Error!'), _("No esta autorizado para agregar minutas sobre reuniones!")) # Valida que no hayan pasado 48 horas de la reunion if user.type_contact == 'con': # Obtiene la fecha de la reunion mas 48 horas date = self.get_date_next_zone(cr, uid, wizard.meeting_id.date, value=(52 + wizard.meeting_id.duration), unit='hours', context=context) print "************* valida fechas ********** ", cur_date, " > ", date # Valida que la fecha actual sea menor a la fecha limite if cur_date > date: raise osv.except_osv( _('Error!'), _("Se ha excedido el tiempo limite para la carga del archivo!" )) # Elimina archivos ya adjuntados sobre el sistema file_ids = file_obj.search( cr, uid, [('meeting_id', '=', wizard.meeting_id.id or False)]) if file_ids: file_obj.unlink(cr, uid, file_ids) # Crea el nuevo registro file_id = file_obj.create( cr, uid, { 'name': wizard.file_name, 'file': wizard.file, 'meeting_id': wizard.meeting_id.id or False, 'phase_id': wizard.phase_id.id or False }, context=context) # Actualiza el archivo sobre la reunion meeting_obj.write(cr, uid, [wizard.meeting_id.id], { 'file_name': wizard.file_name, 'file': wizard.file, }, context=context) # Cierra la reunion meeting_obj.meeting_done(cr, uid, [wizard.meeting_id.id or False], context=context) return True _columns = { 'meeting_id': fields.many2one('crm.meeting', 'Reunion', readonly=True, select=1, ondelete='cascade'), 'phase_id': fields.many2one('project.phase', 'Fase', readonly=True, select=1, ondelete='cascade'), 'file_name': fields.char('Nombre Archivo'), 'file': fields.binary('Archivo', required=True, help='Archivo a actualizar', filters="*.pdf"), 'check_files': fields.boolean('Ya adjuntado'), } _defaults = {'file_name': 'minuta.pdf', 'check_files': False}
class product_product(osv.osv): def view_header_get(self, cr, uid, view_id, view_type, context=None): if context is None: context = {} res = super(product_product, self).view_header_get(cr, uid, view_id, view_type, context) if (context.get('categ_id', False)): return _('Products: ')+self.pool.get('product.category').browse(cr, uid, context['categ_id'], context=context).name return res def _product_price(self, cr, uid, ids, name, arg, context=None): res = {} if context is None: context = {} quantity = context.get('quantity') or 1.0 pricelist = context.get('pricelist', False) partner = context.get('partner', False) if pricelist: for id in ids: try: price = self.pool.get('product.pricelist').price_get(cr,uid,[pricelist], id, quantity, partner=partner, context=context)[pricelist] except: price = 0.0 res[id] = price for id in ids: res.setdefault(id, 0.0) return res def _get_product_available_func(states, what): def _product_available(self, cr, uid, ids, name, arg, context=None): return {}.fromkeys(ids, 0.0) return _product_available _product_qty_available = _get_product_available_func(('done',), ('in', 'out')) _product_virtual_available = _get_product_available_func(('confirmed','waiting','assigned','done'), ('in', 'out')) _product_outgoing_qty = _get_product_available_func(('confirmed','waiting','assigned'), ('out',)) _product_incoming_qty = _get_product_available_func(('confirmed','waiting','assigned'), ('in',)) def _product_lst_price(self, cr, uid, ids, name, arg, context=None): res = {} product_uom_obj = self.pool.get('product.uom') for id in ids: res.setdefault(id, 0.0) for product in self.browse(cr, uid, ids, context=context): if 'uom' in context: uom = product.uos_id or product.uom_id res[product.id] = product_uom_obj._compute_price(cr, uid, uom.id, product.list_price, context['uom']) else: res[product.id] = product.list_price res[product.id] = (res[product.id] or 0.0) * (product.price_margin or 1.0) + product.price_extra return res def _get_partner_code_name(self, cr, uid, ids, product, partner_id, context=None): for supinfo in product.seller_ids: if supinfo.name.id == partner_id: return {'code': supinfo.product_code or product.default_code, 'name': supinfo.product_name or product.name, 'variants': ''} res = {'code': product.default_code, 'name': product.name, 'variants': product.variants} return res def _product_code(self, cr, uid, ids, name, arg, context=None): res = {} if context is None: context = {} for p in self.browse(cr, uid, ids, context=context): res[p.id] = self._get_partner_code_name(cr, uid, [], p, context.get('partner_id', None), context=context)['code'] return res def _product_partner_ref(self, cr, uid, ids, name, arg, context=None): res = {} if context is None: context = {} for p in self.browse(cr, uid, ids, context=context): data = self._get_partner_code_name(cr, uid, [], p, context.get('partner_id', None), context=context) if not data['variants']: data['variants'] = p.variants if not data['code']: data['code'] = p.code if not data['name']: data['name'] = p.name res[p.id] = (data['code'] and ('['+data['code']+'] ') or '') + \ (data['name'] or '') + (data['variants'] and (' - '+data['variants']) or '') return res def _get_main_product_supplier(self, cr, uid, product, context=None): """Determines the main (best) product supplier for ``product``, returning the corresponding ``supplierinfo`` record, or False if none were found. The default strategy is to select the supplier with the highest priority (i.e. smallest sequence). :param browse_record product: product to supply :rtype: product.supplierinfo browse_record or False """ sellers = [(seller_info.sequence, seller_info) for seller_info in product.seller_ids or [] if seller_info and isinstance(seller_info.sequence, (int, long))] return sellers and sellers[0][1] or False def _calc_seller(self, cr, uid, ids, fields, arg, context=None): result = {} for product in self.browse(cr, uid, ids, context=context): main_supplier = self._get_main_product_supplier(cr, uid, product, context=context) result[product.id] = { 'seller_info_id': main_supplier and main_supplier.id or False, 'seller_delay': main_supplier.delay if main_supplier else 1, 'seller_qty': main_supplier and main_supplier.qty or 0.0, 'seller_id': main_supplier and main_supplier.name.id or False } return result def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image, avoid_resize_medium=True) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) _defaults = { 'active': lambda *a: 1, 'price_extra': lambda *a: 0.0, 'price_margin': lambda *a: 1.0, 'color': 0, } _name = "product.product" _description = "Product" _table = "product_product" _inherits = {'product.template': 'product_tmpl_id'} _inherit = ['mail.thread'] _order = 'default_code,name_template' _columns = { 'qty_available': fields.function(_product_qty_available, type='float', string='Quantity On Hand'), 'virtual_available': fields.function(_product_virtual_available, type='float', string='Quantity Available'), 'incoming_qty': fields.function(_product_incoming_qty, type='float', string='Incoming'), 'outgoing_qty': fields.function(_product_outgoing_qty, type='float', string='Outgoing'), 'price': fields.function(_product_price, type='float', string='Price', digits_compute=dp.get_precision('Product Price')), 'lst_price' : fields.function(_product_lst_price, type='float', string='Public Price', digits_compute=dp.get_precision('Product Price')), 'code': fields.function(_product_code, type='char', string='Internal Reference'), 'partner_ref' : fields.function(_product_partner_ref, type='char', string='Customer ref'), 'default_code' : fields.char('Internal Reference', size=64, select=True), 'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the product without removing it."), 'variants': fields.char('Variants', size=64), 'product_tmpl_id': fields.many2one('product.template', 'Product Template', required=True, ondelete="cascade", select=True), 'ean13': fields.char('EAN13 Barcode', size=13, help="International Article Number used for product identification."), 'packaging' : fields.one2many('product.packaging', 'product_id', 'Logistical Units', help="Gives the different ways to package the same product. This has no impact on the picking order and is mainly used if you use the EDI module."), 'price_extra': fields.float('Variant Price Extra', digits_compute=dp.get_precision('Product Price')), 'price_margin': fields.float('Variant Price Margin', digits_compute=dp.get_precision('Product Price')), 'pricelist_id': fields.dummy(string='Pricelist', relation='product.pricelist', type='many2one'), 'name_template': fields.related('product_tmpl_id', 'name', string="Template Name", type='char', size=128, store=True, select=True), 'color': fields.integer('Color Index'), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Image", help="This field holds the image used as image for the product, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'product.product': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the product. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'product.product': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of the product. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'seller_info_id': fields.function(_calc_seller, type='many2one', relation="product.supplierinfo", string="Supplier Info", multi="seller_info"), 'seller_delay': fields.function(_calc_seller, type='integer', string='Supplier Lead Time', multi="seller_info", help="This is the average delay in days between the purchase order confirmation and the reception of goods for this product and for the default supplier. It is used by the scheduler to order requests based on reordering delays."), 'seller_qty': fields.function(_calc_seller, type='float', string='Supplier Quantity', multi="seller_info", help="This is minimum quantity to purchase from Main Supplier."), 'seller_id': fields.function(_calc_seller, type='many2one', relation="res.partner", string='Main Supplier', help="Main Supplier who has highest priority in Supplier List.", multi="seller_info"), } def unlink(self, cr, uid, ids, context=None): unlink_ids = [] unlink_product_tmpl_ids = [] for product in self.browse(cr, uid, ids, context=context): tmpl_id = product.product_tmpl_id.id # Check if the product is last product of this template other_product_ids = self.search(cr, uid, [('product_tmpl_id', '=', tmpl_id), ('id', '!=', product.id)], context=context) if not other_product_ids: unlink_product_tmpl_ids.append(tmpl_id) unlink_ids.append(product.id) self.pool.get('product.template').unlink(cr, uid, unlink_product_tmpl_ids, context=context) return super(product_product, self).unlink(cr, uid, unlink_ids, context=context) def onchange_uom(self, cursor, user, ids, uom_id, uom_po_id): if uom_id and uom_po_id: uom_obj=self.pool.get('product.uom') uom=uom_obj.browse(cursor,user,[uom_id])[0] uom_po=uom_obj.browse(cursor,user,[uom_po_id])[0] if uom.category_id.id != uom_po.category_id.id: return {'value': {'uom_po_id': uom_id}} return False def _check_ean_key(self, cr, uid, ids, context=None): for product in self.read(cr, uid, ids, ['ean13'], context=context): res = check_ean(product['ean13']) return res _constraints = [(_check_ean_key, 'You provided an invalid "EAN13 Barcode" reference. You may use the "Internal Reference" field instead.', ['ean13'])] def on_order(self, cr, uid, ids, orderline, quantity): pass def name_get(self, cr, user, ids, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] if not len(ids): return [] def _name_get(d): name = d.get('name','') code = d.get('default_code',False) if code: name = '[%s] %s' % (code,name) if d.get('variants'): name = name + ' - %s' % (d['variants'],) return (d['id'], name) partner_id = context.get('partner_id', False) result = [] for product in self.browse(cr, user, ids, context=context): sellers = filter(lambda x: x.name.id == partner_id, product.seller_ids) if sellers: for s in sellers: mydict = { 'id': product.id, 'name': s.product_name or product.name, 'default_code': s.product_code or product.default_code, 'variants': product.variants } result.append(_name_get(mydict)) else: mydict = { 'id': product.id, 'name': product.name, 'default_code': product.default_code, 'variants': product.variants } result.append(_name_get(mydict)) return result def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if not args: args = [] if name: ids = self.search(cr, user, [('default_code','=',name)]+ args, limit=limit, context=context) if not ids: ids = self.search(cr, user, [('ean13','=',name)]+ args, limit=limit, context=context) if not ids: # Do not merge the 2 next lines into one single search, SQL search performance would be abysmal # on a database with thousands of matching products, due to the huge merge+unique needed for the # OR operator (and given the fact that the 'name' lookup results come from the ir.translation table # Performing a quick memory merge of ids in Python will give much better performance ids = set() ids.update(self.search(cr, user, args + [('default_code',operator,name)], limit=limit, context=context)) if not limit or len(ids) < limit: # we may underrun the limit because of dupes in the results, that's fine ids.update(self.search(cr, user, args + [('name',operator,name)], limit=(limit and (limit-len(ids)) or False) , context=context)) ids = list(ids) if not ids: ptrn = re.compile('(\[(.*?)\])') res = ptrn.search(name) if res: ids = self.search(cr, user, [('default_code','=', res.group(2))] + args, limit=limit, context=context) else: ids = self.search(cr, user, args, limit=limit, context=context) result = self.name_get(cr, user, ids, context=context) return result # # Could be overrided for variants matrices prices # def price_get(self, cr, uid, ids, ptype='list_price', context=None): if context is None: context = {} if 'currency_id' in context: pricetype_obj = self.pool.get('product.price.type') price_type_id = pricetype_obj.search(cr, uid, [('field','=',ptype)])[0] price_type_currency_id = pricetype_obj.browse(cr,uid,price_type_id).currency_id.id res = {} product_uom_obj = self.pool.get('product.uom') for product in self.browse(cr, uid, ids, context=context): res[product.id] = product[ptype] or 0.0 if ptype == 'list_price': res[product.id] = (res[product.id] * (product.price_margin or 1.0)) + \ product.price_extra if 'uom' in context: uom = product.uom_id or product.uos_id res[product.id] = product_uom_obj._compute_price(cr, uid, uom.id, res[product.id], context['uom']) # Convert from price_type currency to asked one if 'currency_id' in context: # Take the price_type currency from the product field # This is right cause a field cannot be in more than one currency res[product.id] = self.pool.get('res.currency').compute(cr, uid, price_type_currency_id, context['currency_id'], res[product.id],context=context) return res def copy(self, cr, uid, id, default=None, context=None): if context is None: context={} if not default: default = {} # Craft our own `<name> (copy)` in en_US (self.copy_translation() # will do the other languages). context_wo_lang = context.copy() context_wo_lang.pop('lang', None) product = self.read(cr, uid, id, ['name'], context=context_wo_lang) default = default.copy() default.update(name=_("%s (copy)") % (product['name'])) if context.get('variant',False): fields = ['product_tmpl_id', 'active', 'variants', 'default_code', 'price_margin', 'price_extra'] data = self.read(cr, uid, id, fields=fields, context=context) for f in fields: if f in default: data[f] = default[f] data['product_tmpl_id'] = data.get('product_tmpl_id', False) \ and data['product_tmpl_id'][0] del data['id'] return self.create(cr, uid, data) else: return super(product_product, self).copy(cr, uid, id, default=default, context=context) def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): if context is None: context = {} if context and context.get('search_default_categ_id', False): args.append((('categ_id', 'child_of', context['search_default_categ_id']))) return super(product_product, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False)
class partner_vat_intra(osv.osv_memory): """ Partner Vat Intra """ _name = "partner.vat.intra" _description = 'Partner VAT Intra' def _get_xml_data(self, cr, uid, context=None): if context.get('file_save', False): return base64.encodestring(context['file_save'].encode('utf8')) return '' def _get_europe_country(self, cursor, user, context=None): return self.pool.get('res.country').search(cursor, user, [ ('code', 'in', [ 'AT', 'BG', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'LU', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE', 'GB' ]) ]) _columns = { 'name': fields.char('File Name', size=32), 'period_code': fields.char( 'Period Code', size=6, required=True, help= '''This is where you have to set the period code for the intracom declaration using the format: ppyyyy PP can stand for a month: from '01' to '12'. PP can stand for a trimester: '31','32','33','34' The first figure means that it is a trimester, The second figure identify the trimester. PP can stand for a complete fiscal year: '00'. YYYY stands for the year (4 positions). '''), 'period_ids': fields.many2many( 'account.period', 'account_period_rel', 'acc_id', 'period_id', 'Period (s)', help= 'Select here the period(s) you want to include in your intracom declaration' ), 'tax_code_id': fields.many2one('account.tax.code', 'Company', domain=[('parent_id', '=', False)], help="Keep empty to use the user's company", required=True), 'test_xml': fields.boolean('Test XML file', help="Sets the XML output as test file"), 'mand_id': fields.char( 'Reference', size=14, help="Reference given by the Representative of the sending company." ), 'msg': fields.text('File created', size=14, readonly=True), 'no_vat': fields.text( 'Partner With No VAT', size=14, readonly=True, help= "The Partner whose VAT number is not defined and they are not included in XML File." ), 'file_save': fields.binary('Save File', readonly=True), 'country_ids': fields.many2many('res.country', 'vat_country_rel', 'vat_id', 'country_id', 'European Countries'), 'comments': fields.text('Comments'), } def _get_tax_code(self, cr, uid, context=None): obj_tax_code = self.pool.get('account.tax.code') obj_user = self.pool.get('res.users') company_id = obj_user.browse(cr, uid, uid, context=context).company_id.id tax_code_ids = obj_tax_code.search(cr, uid, [('company_id', '=', company_id), ('parent_id', '=', False)], context=context) return tax_code_ids and tax_code_ids[0] or False _defaults = { 'country_ids': _get_europe_country, 'file_save': _get_xml_data, 'name': 'vat_intra.xml', 'tax_code_id': _get_tax_code, } def _get_datas(self, cr, uid, ids, context=None): """Collects require data for vat intra xml :param ids: id of wizard. :return: dict of all data to be used to generate xml for Partner VAT Intra. :rtype: dict """ if context is None: context = {} obj_user = self.pool.get('res.users') obj_sequence = self.pool.get('ir.sequence') obj_partner = self.pool.get('res.partner') xmldict = {} post_code = street = city = country = data_clientinfo = '' seq = amount_sum = 0 wiz_data = self.browse(cr, uid, ids[0], context=context) comments = wiz_data.comments if wiz_data.tax_code_id: data_company = wiz_data.tax_code_id.company_id else: data_company = obj_user.browse(cr, uid, uid, context=context).company_id # Get Company vat company_vat = data_company.partner_id.vat if not company_vat: raise osv.except_osv( _('Insufficient Data!'), _('No VAT number associated with your company.')) company_vat = company_vat.replace(' ', '').upper() issued_by = company_vat[:2] if len(wiz_data.period_code) != 6: raise osv.except_osv(_('Error!'), _('Period code is not valid.')) if not wiz_data.period_ids: raise osv.except_osv(_('Insufficient Data!'), _('Please select at least one Period.')) p_id_list = obj_partner.search(cr, uid, [('vat', '!=', False)], context=context) if not p_id_list: raise osv.except_osv( _('Insufficient Data!'), _('No partner has a VAT number associated with him.')) seq_declarantnum = obj_sequence.get(cr, uid, 'declarantnum') dnum = company_vat[2:] + seq_declarantnum[-4:] addr = obj_partner.address_get(cr, uid, [data_company.partner_id.id], ['invoice']) email = data_company.partner_id.email or '' phone = data_company.partner_id.phone or '' if addr.get('invoice', False): ads = obj_partner.browse(cr, uid, [addr['invoice']])[0] city = (ads.city or '') post_code = (ads.zip or '') if ads.street: street = ads.street if ads.street2: street += ' ' street += ads.street2 if ads.country_id: country = ads.country_id.code if not country: country = company_vat[:2] if not email: raise osv.except_osv( _('Insufficient Data!'), _('No email address associated with the company.')) if not phone: raise osv.except_osv(_('Insufficient Data!'), _('No phone associated with the company.')) xmldict.update({ 'company_name': data_company.name, 'company_vat': company_vat, 'vatnum': company_vat[2:], 'mand_id': wiz_data.mand_id, 'sender_date': str(time.strftime('%Y-%m-%d')), 'street': street, 'city': city, 'post_code': post_code, 'country': country, 'email': email, 'phone': phone.replace('/', '').replace('.', '').replace('(', '').replace( ')', '').replace(' ', ''), 'period': wiz_data.period_code, 'clientlist': [], 'comments': comments, 'issued_by': issued_by, }) #tax code 44: services #tax code 46L: normal good deliveries #tax code 46T: ABC good deliveries #tax code 48xxx: credite note on tax code xxx codes = ('44', '46L', '46T', '48s44', '48s46L', '48s46T') cr.execute( '''SELECT p.name As partner_name, l.partner_id AS partner_id, p.vat AS vat, (CASE WHEN t.code = '48s44' THEN '44' WHEN t.code = '48s46L' THEN '46L' WHEN t.code = '48s46T' THEN '46T' ELSE t.code END) AS intra_code, SUM(CASE WHEN t.code in ('48s44','48s46L','48s46T') THEN -l.tax_amount ELSE l.tax_amount END) AS amount FROM account_move_line l LEFT JOIN account_tax_code t ON (l.tax_code_id = t.id) LEFT JOIN res_partner p ON (l.partner_id = p.id) WHERE t.code IN %s AND l.period_id IN %s AND t.company_id = %s GROUP BY p.name, l.partner_id, p.vat, intra_code''', (codes, tuple([p.id for p in wiz_data.period_ids]), data_company.id)) p_count = 0 for row in cr.dictfetchall(): if not row['vat']: row['vat'] = '' p_count += 1 seq += 1 amt = row['amount'] or 0.0 amount_sum += amt intra_code = row['intra_code'] == '44' and 'S' or ( row['intra_code'] == '46L' and 'L' or (row['intra_code'] == '46T' and 'T' or '')) xmldict['clientlist'].append({ 'partner_name': row['partner_name'], 'seq': seq, 'vatnum': row['vat'][2:].replace(' ', '').upper(), 'vat': row['vat'], 'country': row['vat'][:2], 'amount': round(amt, 2), 'intra_code': row['intra_code'], 'code': intra_code }) xmldict.update({ 'dnum': dnum, 'clientnbr': str(seq), 'amountsum': round(amount_sum, 2), 'partner_wo_vat': p_count }) return xmldict def create_xml(self, cursor, user, ids, context=None): """Creates xml that is to be exported and sent to estate for partner vat intra. :return: Value for next action. :rtype: dict """ mod_obj = self.pool.get('ir.model.data') xml_data = self._get_datas(cursor, user, ids, context=context) month_quarter = xml_data['period'][:2] year = xml_data['period'][2:] data_file = '' # Can't we do this by etree? data_head = """<?xml version="1.0" encoding="ISO-8859-1"?> <ns2:IntraConsignment xmlns="http://www.minfin.fgov.be/InputCommon" xmlns:ns2="http://www.minfin.fgov.be/IntraConsignment" IntraListingsNbr="1"> <ns2:Representative> <RepresentativeID identificationType="NVAT" issuedBy="%(issued_by)s">%(company_vat)s</RepresentativeID> <Name>%(company_name)s</Name> <Street>%(street)s</Street> <PostCode>%(post_code)s</PostCode> <City>%(city)s</City> <CountryCode>%(country)s</CountryCode> <EmailAddress>%(email)s</EmailAddress> <Phone>%(phone)s</Phone> </ns2:Representative>""" % (xml_data) if xml_data['mand_id']: data_head += '\n\t\t<ns2:RepresentativeReference>%(mand_id)s</ns2:RepresentativeReference>' % ( xml_data) data_comp_period = '\n\t\t<ns2:Declarant>\n\t\t\t<VATNumber>%(vatnum)s</VATNumber>\n\t\t\t<Name>%(company_name)s</Name>\n\t\t\t<Street>%(street)s</Street>\n\t\t\t<PostCode>%(post_code)s</PostCode>\n\t\t\t<City>%(city)s</City>\n\t\t\t<CountryCode>%(country)s</CountryCode>\n\t\t\t<EmailAddress>%(email)s</EmailAddress>\n\t\t\t<Phone>%(phone)s</Phone>\n\t\t</ns2:Declarant>' % ( xml_data) if month_quarter.startswith('3'): data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Quarter>' + month_quarter[ 1] + '</ns2:Quarter> \n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' elif month_quarter.startswith('0') and month_quarter.endswith('0'): data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' else: data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Month>' + month_quarter + '</ns2:Month> \n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' data_clientinfo = '' for client in xml_data['clientlist']: if not client['vatnum']: raise osv.except_osv( _('Insufficient Data!'), _('No vat number defined for %s.') % client['partner_name']) data_clientinfo += '\n\t\t<ns2:IntraClient SequenceNumber="%(seq)s">\n\t\t\t<ns2:CompanyVATNumber issuedBy="%(country)s">%(vatnum)s</ns2:CompanyVATNumber>\n\t\t\t<ns2:Code>%(code)s</ns2:Code>\n\t\t\t<ns2:Amount>%(amount)s</ns2:Amount>\n\t\t</ns2:IntraClient>' % ( client) data_decl = '\n\t<ns2:IntraListing SequenceNumber="1" ClientsNbr="%(clientnbr)s" DeclarantReference="%(dnum)s" AmountSum="%(amountsum)s">' % ( xml_data) data_file += data_head + data_decl + data_comp_period + data_clientinfo + '\n\t\t<ns2:Comment>%(comments)s</ns2:Comment>\n\t</ns2:IntraListing>\n</ns2:IntraConsignment>' % ( xml_data) context['file_save'] = data_file model_data_ids = mod_obj.search(cursor, user, [('model', '=', 'ir.ui.view'), ('name', '=', 'view_vat_intra_save')], context=context) resource_id = mod_obj.read(cursor, user, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Save'), 'context': context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'partner.vat.intra', 'views': [(resource_id, 'form')], 'view_id': 'view_vat_intra_save', 'type': 'ir.actions.act_window', 'target': 'new', } def preview(self, cr, uid, ids, context=None): xml_data = self._get_datas(cr, uid, ids, context=context) datas = {'ids': [], 'model': 'partner.vat.intra', 'form': xml_data} return { 'type': 'ir.actions.report.xml', 'report_name': 'partner.vat.intra.print', 'datas': datas, }
class account_asset_asset(osv.osv): _inherit = ['account.asset.asset', 'mail.thread'] _name = 'account.asset.asset' _order = 'id desc' _mail_post_access = 'read' #dada la categoria del activo, obtiene los grupos asociados tomando en cuenta la unidad ejecutora def _get_partners_notification(self, cr, uid, asset_id, context=None): res_users_obj = self.pool.get('res.users') group_ids = [] asset = self.browse(cr, uid, asset_id, context=context) for categoria in asset.category_id.categoria_ue_notificacion_ids: if categoria.operating_unit_id == asset.operating_unit_id and categoria.group_id.id not in group_ids: group_ids.append(categoria.group_id.id) user_ids = res_users_obj.search(cr, uid, [('groups_id', 'in', group_ids)], context=context) partner_ids = list( set(u.partner_id.id for u in res_users_obj.browse( cr, SUPERUSER_ID, user_ids, context=context))) return partner_ids def _default_category_ids(self, cr, uid, context=None): return self.pool.get('account.asset.category').search(cr, uid, [], context=context) def _es_resp(self, cr, uid, ids, object, arg, context=None): res = {} for activo in self.browse(cr, uid, ids, context=context): resultado = (uid == activo.user_id.id) res[activo.id] = resultado return res def _get_category_ids(self, cr, uid, ids, name, args, context=None): result = {} asset_category_obj = self.pool.get('account.asset.category') for rec in self.browse(cr, uid, ids): if rec.invoice_id.id: result[rec.id] = asset_category_obj.search( cr, uid, [('codigo', '=', rec.category_id.codigo)], context=context) else: result[rec.id] = asset_category_obj.search(cr, uid, [], context=context) return result def _concatenar(self, cr, uid, ids, name, arg, context={}): result = {} for rec in self.browse(cr, uid, ids, context): if rec.state in ['open', 'close', 'baja']: result[rec.id] = str( rec.category_id.codigo ) + " / " + rec.purchase_date[:4] + " / " + str( rec.secuencia_activo).zfill(4) else: result[rec.id] = False return result def _generar_qr(self, cr, uid, ids, field_name, args=None, context=None): res = {} # Armo la cadena para el QR con la url conteniendo el id, modelo y tipo de form web_base_url = self.pool.get('ir.config_parameter').get_param( cr, uid, 'web.base.url') for activo_rec in self.browse(cr, uid, ids, context): if activo_rec.numero_activo: str_registro_id = str(activo_rec.id) str_url = web_base_url + '/#id=' + str_registro_id + '&view_type=form&model=account.asset.asset' str_qr = activo_rec.name + '\n' + activo_rec.numero_activo + '\n' + '\n' + str_url else: str_qr = '' url = pyqrcode.create(str_qr) buffer = io.BytesIO() url.png(buffer, scale=2, module_color=[0, 0, 0, 128], background=[0xff, 0xff, 0xff]) imagen = buffer.getvalue() res[activo_rec.id] = imagen return res def _generar_qr_url(self, cr, uid, ids, field_name, args=None, context=None): res = {} # Armo la cadena para el QR con la url conteniendo el id, modelo y tipo de form web_base_url = self.pool.get('ir.config_parameter').get_param( cr, uid, 'web.base.url') for activo_rec in self.browse(cr, uid, ids, context): if activo_rec.numero_activo: str_registro_id = str(activo_rec.id) str_url = web_base_url + '/#id=' + str_registro_id + '&view_type=form&model=account.asset.asset' else: str_url = '' url = pyqrcode.create(str_url) buffer = io.BytesIO() url.png(buffer, scale=2, module_color=[0, 0, 0, 128], background=[0xff, 0xff, 0xff]) imagen = buffer.getvalue() res[activo_rec.id] = imagen return res @api.model def _default_res_country(self): return self.env['res.country'].search( [('code', 'in', ['UY', 'Uy', 'uy', 'uY'])], limit=1) @api.onchange('country_id') def _onchange_country_id(self): self.localidad = False self.country_state_id = False if self.country_id: return { 'domain': { 'country_state_id': [('country_id', '=', self.country_id.id)], 'localidad': [('country_id', '=', self.country_id.id)] } } @api.onchange('country_state_id') def _onchange_country_state_id(self): self.localidad = False if self.country_state_id: return { 'domain': { 'localidad': [('country_id', '=', self.country_id.id), ('state_id', '=', self.country_state_id.id)] } } @api.onchange('surface_mts2') def _onchange_surface_mts2(self): if len(str(self.surface_mts2)) > 9: self.surface_mts2 = False return { 'warning': { 'title': 'Error', 'message': u'Se superó la cantidad permitida de superficie' }, } @api.onchange('cant_pasajero') def _onchange_cant_pasajero(self): if len(str(self.cant_pasajero)) > 3: self.cant_pasajero = False return { 'warning': { 'title': 'Error', 'message': u'Se superó la cantidad permitida de pasajeros' }, } @api.depends('method_period', 'depreciation_line_ids', 'depreciation_line_ids.move_check', 'depreciation_line_ids.amount', 'depreciation_line_ids.depreciation_date') def _compute_amortizacion(self): for record in self: record.amortizacion = 0 for line in record.depreciation_line_ids.search( [('asset_id', '=', record.id), ('move_check', '=', False)], limit=1, order='depreciation_date'): if record.method_period > 0: record.amortizacion = line.amount / (record.method_period * 30) break @api.depends('depreciation_line_ids', 'depreciation_line_ids.depreciated_value', 'depreciation_line_ids.move_check', 'depreciation_line_ids.depreciation_date', 'amortizacion_ac_baja') def _compute_amortizacion_ac(self): for record in self: for line in record.depreciation_line_ids.search([ ('asset_id', '=', record.id), ('move_check', '=', True) ]): record.amortizacion_ac += line.amount record.amortizacion_ac += record.amortizacion_ac_baja @api.one @api.constrains('inf_fecha_ini', 'inf_fecha_fin') def _check_garantia(self): if self.inf_fecha_ini and self.inf_fecha_fin and (self.inf_fecha_ini > self.inf_fecha_fin): raise exceptions.ValidationError( 'La fecha de fin de la garantia no puede ser menor que la fecha de inicio' ) # 001-Inicio def _amount_residual(self, cr, uid, ids, name, args, context=None): # PCAR Se sustituye SUM(abs(l.debit-l.credit)) por abs(SUM(l.debit-l.credit)) en la consulta cr.execute( """SELECT l.asset_id as id, abs(SUM(l.debit-l.credit)) AS amount FROM account_move_line l WHERE l.asset_id IN %s GROUP BY l.asset_id """, (tuple(ids), )) res = dict(cr.fetchall()) for asset in self.browse(cr, uid, ids, context): company_currency = asset.company_id.currency_id.id current_currency = asset.currency_id.id amount = self.pool['res.currency'].compute(cr, uid, company_currency, current_currency, res.get(asset.id, 0.0), context=context) res[asset.id] = asset.purchase_value - amount - asset.salvage_value for id in ids: res.setdefault(id, 0.0) return res # 001-Fin _columns = { 'secuencia_activo': fields.integer('Secuencia'), 'numero_activo': fields.function(_concatenar, type='char', method=True, string=u'Número', store=True), 'image_medium': fields.binary("Medium-sized photo"), # 'unidades_originales': fields.integer('Unidades originales', readonly=True, states={'draft': [('readonly', False)]}), # 'unidades_actuales': fields.integer('Unidades actuales', readonly=True, states={'draft': [('invisible', True)]}), 'fecha_baja': fields.date('Fecha de baja', select="1", readonly=True, states={'draft': [('invisible', True)], 'open': [('invisible', True)]}), # 001-Inicio 'name': fields.char('Name', size=50, required=True, select=1), 'department_id': fields.many2one('hr.department', u'Ubicación', select="1", track_visibility='onchange'), 'user_id': fields.many2one('res.users', u'Responsable de la dirección', select="1"), 'estado_activo': fields.selection(_ESTADO_ACTIVO, 'Estado del activo', track_visibility='onchange'), 'estado_responsable': fields.selection(_ESTADO_HISTORIAL_RESP, 'Estado responsable'), 'es_resp': fields.function(_es_resp, string='Es responsable?', type="boolean"), 'invoice_id': fields.many2one('account.invoice', 'Nro. factura GRP', select="1"), 'domain_category_ids': fields.function(_get_category_ids, method=True, type='many2many', relation='account.asset.category', string=u'Lista domain categorías'), 'tenencia': fields.selection(_TENENCIA_ACTIVO, 'Tenencia', track_visibility='onchange'), 'prestado_a_de': fields.char('Prestado a/de', size=30), 'nombre_contacto': fields.char('Nombre-Contacto responsable'), # PCARBALLO - Se agregan los campos Fecha y Valor de Alta 'fecha_alta': fields.date('Fecha de alta'), 'valor_alta': fields.float('Valor de alta'), # -------------------------------------------------------------------------------------------- # Campos relativos a Obras de Arte # -------------------------------------------------------------------------------------------- 'obra_arte_fecha': fields.date("Fecha de la obra"), 'obra_arte_propietario': fields.char('Propietario', size=240), 'obra_arte_codigo': fields.char(u'Código', size=64), 'obra_arte_categoria_id': fields.many2one('grp.cat_obras_arte', u'Categoría', ondelete='restrict'), 'obra_arte_genero_id': fields.many2one('grp.gen_obras_arte', u'Género', ondelete='restrict'), 'obra_arte_autor': fields.char(u'Autor', size=64), 'obra_arte_firma': fields.char(u'Firma', size=64), 'obra_arte_firma_ubicacion': fields.selection(_FIRMA_UBICACION, u'Ubicación de la firma'), 'obra_arte_estado_firma': fields.selection(_ESTADO_FIRMA, u'Estado de la firma'), 'obra_arte_forma_id': fields.many2one('grp.forma_obras_arte', u'Forma', ondelete='restrict'), 'obra_arte_dimension_alto': fields.char('Alto', size=20), 'obra_arte_dimension_ancho': fields.char('Ancho', size=20), 'obra_arte_dimension_diametro': fields.char(u'Diámetro', size=20), # Campo etiqueta TECNICA (falta) 'obra_arte_tecnicas_ids': fields.many2many('grp.etiquetas_obras_arte', 'grp_etiquetas_obras_activos_rel', 'asset_id', 'obra_tecnica_id', u'Técnica'), 'obra_arte_soporte': fields.selection(_SOPORTE, 'Soporte'), 'obra_arte_modo_adquisicion': fields.selection(_MODO_ADQUISICION, u'Modo de adquisición'), 'obra_arte_para_restaurar': fields.boolean('Para restaurar'), # -------------------------------------------------------------------------------------------- # Campos relativos a Informatica # -------------------------------------------------------------------------------------------- 'inf_nuc': fields.char('NUC', size=64), 'inf_tipo_id': fields.many2one('grp.tipos_bien_informatica', u'Tipo', ondelete='restrict'), 'inf_marca': fields.char('Marca', size=64), 'inf_modelo': fields.char('Modelo', size=64), 'inf_ip': fields.char('IP', size=15), 'inf_serial_num': fields.char(u'Número de serie', size=64), 'inf_garantia_duracion': fields.char(u'Duración', size=64), 'inf_fecha_ini': fields.date('Fecha inicio'), 'inf_fecha_fin': fields.date('Fecha fin'), # -------------------------------------------------------------- # Separador Caracteristicas # -------------------------------------------------------------- 'inf_tipo_name': fields.related('inf_tipo_id', 'name', type="char", relation="grp.tipos_bien_informatica", string="Nombre tipo"), 'inf_prestaciones': fields.many2one('grp.tipos_impresoras', 'Prestaciones', ondelete='restrict'), 'inf_contador_pag': fields.integer(u'Contador pág.'), 'inf_carac_fecha': fields.date('Fecha'), 'inf_contador_tot': fields.integer('Contador total'), 'inf_disco': fields.char('Disco', size=64), 'inf_memoria': fields.char('Memoria', size=64), 'inf_procesador': fields.char('Procesador', size=64), 'amortizacion': fields.float(u'Amortización', compute="_compute_amortizacion", readonly=True), 'amortizacion_ac': fields.float(u'Amortización Acumulada', compute="_compute_amortizacion_ac", readonly=True), 'amortizacion_ac_baja': fields.float(u'Amortización Acumulada', copy=False, readonly=True), # -------------------------------------------------------------- # Separador General FALTA # -------------------------------------------------------------- 'gral_atributo_1': fields.many2one('grp.atributos', 'Atributo', ondelete='restrict'), 'gral_atributo_2': fields.many2one('grp.atributos', 'Atributo', ondelete='restrict'), 'gral_atributo_3': fields.many2one('grp.atributos', 'Atributo', ondelete='restrict'), 'gral_valor_1': fields.many2one('grp.valores_atributos', 'Valor', ondelete='restrict'), 'gral_valor_2': fields.many2one('grp.valores_atributos', 'Valor', ondelete='restrict'), 'gral_valor_3': fields.many2one('grp.valores_atributos', 'Valor', ondelete='restrict'), #Otros atributos 'gral_otro_atributo1': fields.char('Atributo1', size=100), 'gral_otro_atributo2': fields.char('Atributo2', size=100), # Es activo Padre? 'es_padre': fields.boolean(u'Es activo padre?'), # MVARELA 26_03 - se modifican campos estandar para agregar track_visibility 'note': fields.text('Note', track_visibility='onchange'), 'state': fields.selection([('draft', 'Draft'), ('check', 'En revisión'), ('open', 'Running'), ('close', 'Amortizado'), ('baja', 'Dado de baja')], 'Status', required=True, help="When an asset is created, the status is 'Draft'.\n" \ "If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \ "You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status.", track_visibility='onchange'), 'purchase_value_date': fields.date(u'Fecha primera amortización'), #CAMPOS one2many y many2many # Historial Baja 'historial': fields.one2many('grp.historial_baja_activo', 'grp_account_asset_id', 'Historial de baja'), # Historial Responsable 'historial_resp': fields.one2many('grp.historial_responsable', 'hist_resp_id', 'Historial responsable'), #codigo QR 'grafico_qr': fields.function(_generar_qr, type='binary', store=False), 'grafico_qr_url':fields.function(_generar_qr_url, type='binary', store=False), # -------------------------------------------------------------------------------------------- # Campos relativos a Inmuebles # -------------------------------------------------------------------------------------------- # todo faltan los campos many2one con localidad y many2no con no_contrato pq dichas clases no se encuentran en el core 'direction': fields.char(u'Dirección', size=60), 'name_inmueble': fields.char('Nombre', size=40), 'padron': fields.char(u'Nro. padrón', size=10), 'property': fields.selection(_PROPIEDAD, 'Propiedad'), 'state_inciso': fields.selection(_ESTADO_INCISO, 'Estado'), 'surface_mts2': fields.integer(u'Superficie (mts2)', size=5), # 'contract_number': fields.char('Nro. contrato', size=200), 'inventory_ids': fields.one2many("grp.account_asset_inventory_line", 'activo_id', string="Inventarios", domain=[('state', '=', 'validado')]), 'country_id': fields.many2one('res.country', u'País', default=_default_res_country), 'localidad': fields.many2one('grp.localidad','Localidad'), 'country_state_id': fields.many2one('res.country.state', 'Departamento'), # ------------------------------------------------- # Vehicles Fields # ------------------------------------------------- # Puede cambiar si se usa fleet.vehicle 'tipo_vehiculo': fields.selection([('auto', 'Auto'), ('camioneta', 'Camioneta'), ('minibus', 'Minibus')], string=u'Tipo de vehículo'), 'cant_pasajero': fields.integer('Cantidad de pasajeros'), 'cilindrada': fields.char('Cilindrada', size=20), 'tipo_combustible': fields.char('Tipo de combustible', size=20), # 001-Inicio 'value_residual': fields.function(_amount_residual, method=True, digits_compute=dp.get_precision('Account'), string='Residual Value'), # 001-Fin } _defaults = { # 'unidades_originales': 1, # 'unidades_actuales': 1, 'estado_activo': 'B', # 'purchase_value_date': _default_purchase_value_date, 'domain_category_ids': _default_category_ids, 'property': 'inciso', } def onchange_inf_tipo_id(self, cr, uid, ids, inf_tipo_id, context=None): inf_tipo_name = False if inf_tipo_id: inf_tipo_name = self.pool.get('grp.tipos_bien_informatica').browse( cr, uid, inf_tipo_id, context).name return {'value': {'inf_tipo_name': inf_tipo_name}} def onchange_category_id(self, cr, uid, ids, category_id, context=None): res = super(account_asset_asset, self).onchange_category_id(cr, uid, ids, category_id, context=context) # MVARELA - Si el activo proviene de una factura, solo permite cambiar la subcategoria if len(ids) > 0 and category_id: activo = self.browse(cr, uid, ids[0], context) if activo.invoice_id: nuevo_codigo = self.pool.get('account.asset.category').browse( cr, uid, category_id, context=context).codigo if nuevo_codigo != activo.category_id.codigo: res['value']['category_id'] = False res['warning'] = { 'title': 'Error', 'message': u'Sólo puede cambiar la subcategoría del activo. Este activo fue generado a partir de una factura.' } return res def _default_purchase_value_date(self, cr, uid, context=None): ret = False if date.today().month >= 6: year_def = date.today().year ret = date(year_def, 12, 31) else: year_def = date.today().year ret = date(year_def, 06, 30) return ret.strftime('%Y-%m-%d') def aceptar_responsable(self, cr, uid, ids, context=None): super(account_asset_asset, self).write(cr, uid, ids, {'estado_responsable': 'AC'}, context=context) for activo in self.browse(cr, uid, ids, context): valores = { 'department_id': activo.department_id.id, 'user_id': activo.user_id.id, 'estado_responsable': 'AC', 'hist_resp_id': activo.id, } self.pool.get('grp.historial_responsable').create( cr, uid, valores, context) def rechazar_responsable(self, cr, uid, ids, context=None): super(account_asset_asset, self).write(cr, uid, ids, {'estado_responsable': 'RE'}, context=context) for activo in self.browse(cr, uid, ids, context): valores = { 'department_id': activo.department_id.id, 'user_id': activo.user_id.id, 'estado_responsable': 'RE', 'hist_resp_id': activo.id, } self.pool.get('grp.historial_responsable').create( cr, uid, valores, context) # MVARELA 07_01_2016: Al confirmar se llama a la funcion calcular def validate(self, cr, uid, ids, context=None): for activo in self.browse(cr, uid, ids, context): if not activo.es_padre: anio = datetime.strptime(activo.purchase_date, "%Y-%m-%d").date().year cat = activo.category_id codigo = cat.codigo company_id = activo.company_id.id cr.execute( """select max(secuencia_activo) from account_asset_asset a, account_asset_category ac where a.category_id = ac.id and ac.codigo = %s and EXTRACT(YEAR FROM purchase_date) = %s and a.company_id = %s """ % (codigo, anio, company_id)) maxima = cr.fetchone()[0] if not maxima: maxima = 0 self.write(cr, uid, [activo.id], {'secuencia_activo': maxima + 1}, context=context) res = super(account_asset_asset, self).validate(cr, uid, ids, context=context) self.compute_depreciation_board(cr, uid, ids, context=context) return res def set_to_draft(self, cr, uid, ids, context=None): res = super(account_asset_asset, self).set_to_draft(cr, uid, ids, context=context) self.write(cr, uid, ids, {'secuencia_activo': 0}, context=context) return res # PCARBALLO def _compute_board_amount(self, cr, uid, asset, i, residual_amount, amount_to_depr, undone_dotation_number, posted_depreciation_line_ids, total_days, depreciation_date, context=None): # by default amount = 0 amount = 0 if i == undone_dotation_number: amount = residual_amount else: if asset.method == 'linear': amount = amount_to_depr / (undone_dotation_number - len(posted_depreciation_line_ids)) if asset.prorata: amount = amount_to_depr / asset.method_number days = total_days - float(depreciation_date.strftime('%j')) fecha_compra = datetime.strptime(asset.purchase_date, '%Y-%m-%d') fecha_valor = datetime.strptime(asset.purchase_value_date, '%Y-%m-%d') days_first_time = fecha_valor - fecha_compra month_days = calendar.monthrange(fecha_compra.year, fecha_compra.month)[1] _logger.info("Days First Time: %s", days_first_time.days) if i == 1: # amount = (amount_to_depr / asset.method_number) / total_days * days # amount = (asset.purchase_value / ((asset.method_number * asset.method_period))/total_days) * days_first_time.days amount = (asset.purchase_value / ( (asset.method_number * asset.method_period)) / month_days) * days_first_time.days elif i == undone_dotation_number: amount = (amount_to_depr / asset.method_number ) / total_days * (total_days - days) elif asset.method == 'degressive': amount = residual_amount * asset.method_progress_factor if asset.prorata: days = total_days - float(depreciation_date.strftime('%j')) if i == 1: amount = ( residual_amount * asset.method_progress_factor) / total_days * days elif i == undone_dotation_number: amount = (residual_amount * asset.method_progress_factor ) / total_days * (total_days - days) return amount def compute_depreciation_board(self, cr, uid, ids, context=None): depreciation_lin_obj = self.pool.get('account.asset.depreciation.line') currency_obj = self.pool.get('res.currency') for asset in self.browse(cr, uid, ids, context=context): if asset.value_residual == 0.0: continue posted_depreciation_line_ids = depreciation_lin_obj.search( cr, uid, [('asset_id', '=', asset.id), ('move_check', '=', True)], order='depreciation_date desc') old_depreciation_line_ids = depreciation_lin_obj.search( cr, uid, [('asset_id', '=', asset.id), ('move_id', '=', False)]) if old_depreciation_line_ids: depreciation_lin_obj.unlink(cr, uid, old_depreciation_line_ids, context=context) amount_to_depr = residual_amount = asset.value_residual depreciation_date = datetime.strptime(asset.purchase_value_date, '%Y-%m-%d') day = depreciation_date.day month = depreciation_date.month year = depreciation_date.year total_days = (year % 4) and 365 or 366 precision_digits = self.pool.get( 'decimal.precision').precision_get(cr, uid, 'Account') undone_dotation_number = self._compute_board_undone_dotation_nb( cr, uid, asset, depreciation_date, total_days, context=context) for x in range(len(posted_depreciation_line_ids), undone_dotation_number): i = x + 1 amount = self._compute_board_amount( cr, uid, asset, i, residual_amount, amount_to_depr, undone_dotation_number, posted_depreciation_line_ids, total_days, depreciation_date, context=context) amount = round(amount, 2) if float_is_zero(amount, precision_digits=precision_digits): continue residual_amount -= amount vals = { 'amount': amount, 'asset_id': asset.id, 'sequence': i, 'name': str(asset.id) + '/' + str(i), 'remaining_value': residual_amount, 'depreciated_value': (asset.purchase_value - asset.salvage_value) - (residual_amount + amount), 'depreciation_date': depreciation_date.strftime('%Y-%m-%d'), } depreciation_lin_obj.create(cr, uid, vals, context=context) # Considering Depr. Period as months depreciation_date = ( datetime(year, month, day) + relativedelta(months=+asset.method_period)) day = depreciation_date.day month = depreciation_date.month year = depreciation_date.year return True def create(self, cr, uid, values, context=None): if 'image_medium' in values and values['image_medium']: mediana = tools.image_resize_image_medium(values['image_medium']) values['image_medium'] = mediana # if 'unidades_originales' in values: # values['unidades_actuales'] = values['unidades_originales'] if 'user_id' in values and values['user_id']: values['estado_responsable'] = 'AC' asset_id = super(account_asset_asset, self).create(cr, uid, values, context=context) # group_id = self._get_groups_by_category(cr, uid, asset_id, context) # res_users = self.pool['res.users'] # user_ids = res_users.search(cr, uid, [('groups_id', 'in', group_id)], context=context) # partner_ids = list(set(u.partner_id.id for u in res_users.browse(cr, SUPERUSER_ID, user_ids, context=context))) partner_ids = self._get_partners_notification(cr, uid, asset_id, context=context) body = "Se ha creado un activo fijo, favor de ingresar los datos adicionales que\ corresponden para que el mismo pueda ser dado de alta en el sistema" self.message_post(cr, uid, [asset_id], body=body, partner_ids=partner_ids, context=context) return asset_id def write(self, cr, uid, ids, values, context=None): if 'image_medium' in values and values['image_medium']: mediana = tools.image_resize_image_medium(values['image_medium']) values['image_medium'] = mediana # if 'unidades_originales' in values: # values['unidades_actuales'] = values['unidades_originales'] #TODO: Se debe revisar el tema que este procedimiento se está haciendo comparando solo con el primer valor if 'user_id' in values: # si estado responsable esta seteado, pregunto por el primero if len(ids) > 0 and self.browse(cr, uid, ids[0], context).estado_responsable: values['estado_responsable'] = 'EP' # al primero se lo marca como aceptado else: values['estado_responsable'] = 'AC' #--------------------------------------------------------------------------------------------------------- #Llamado a super res = super(account_asset_asset, self).write(cr, uid, ids, values, context=context) if 'user_id' in values: for activo in self.browse(cr, uid, ids, context=context): valores = { 'department_id': activo.department_id.id, 'user_id': activo.user_id.id, 'estado_responsable': 'EP', 'hist_resp_id': activo.id, } self.pool.get('grp.historial_responsable').create( cr, uid, valores, context=context) if activo.state == 'open' and activo.estado_responsable == 'EP': if activo.user_id: body = "Tiene un activo asignado para aceptar." self.message_post( cr, uid, ids, type="notification", subtype='mt_comment', body=body, partner_ids=[activo.user_id.partner_id.id], context=context) elif 'department_id' in values: for activo in self.browse(cr, uid, ids, context=context): self.pool.get('grp.historial_responsable').create( cr, uid, { 'department_id': activo.department_id.id, 'user_id': activo.user_id.id, 'estado_responsable': 'AC', 'hist_resp_id': activo.id, }, context=context) return res
def _save_file(self, path, b64_file): """Save a file encoded in base 64""" self._check_filestore(path) with open(path, 'w') as ofile: ofile.write(base64.b64decode(b64_file)) return True def _set_image(self, cr, uid, id, name, value, arg, context=None): image = self.browse(cr, uid, id, context=context) full_path = self._image_path(cr, uid, image, context=context) if full_path: return self._save_file(full_path, value) return self.write(cr, uid, id, {'file_db_store' : value}, context=context) _columns = { 'name':fields.char('Image Title', size=100, required=True), 'extention': fields.char('file extention', size=6), 'link':fields.boolean('Link?', help="Images can be linked from files on your file system or remote (Preferred)"), 'file_db_store':fields.binary('Image stored in database'), 'file':fields.function(_get_image, fnct_inv=_set_image, type="binary", filters='*.png,*.jpg,*.gif'), 'url':fields.char('File Location', size=250), 'comments':fields.text('Comments'), 'product_id':fields.many2one('product.product', 'Product') } _defaults = { 'link': lambda *a: False, } _sql_constraints = [('uniq_name_product_id', 'UNIQUE(product_id, name)', _('A product can have only one image with the same name'))]
)[0] report = self.pool.get("ir.actions.report.xml").browse(cr, uid, rep_id, context=context) return report.report_name def print_invoice(self, cr, uid, ids, context=None): """ Method called by button print report in wizard, to print report in pdf @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: Ids to invoice's to print ticket @param context: A standard dictionary @return : retrun report pdf generated """ if context is None: context = {} if len(context.get("active_ids", [])) > 1: return {} return { "type": "ir.actions.report.xml", "report_name": self._get_report_name(cr, uid, context=context), "datas": {"ids": context["active_ids"]}, } _columns = { "journal": fields.char("Journal", 64, readonly=True, requied=True), "report_format": fields.binary("Report", readonly=True, required=True), } _defaults = {"journal": _get_journal, "report_format": _get_report}
class purchase_order(osv.osv): _inherit = "purchase.order" _name = "purchase.order" STATE_SELECTION = [('draft', 'Draft PO'), ('sent', 'RFQ'), ('manager_confirm', 'Manager Confirm'), ('bid', 'Bid Received'), ('confirmed', 'Waiting Approval'), ('approved', 'Purchase Confirmed'), ('except_picking', 'Shipping Exception'), ('except_invoice', 'Invoice Exception'), ('done', 'Done'), ('cancel', 'Cancelled')] _columns = { 'dest_address_id':fields.many2one('res.partner', u'送货地址', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'done':[('readonly',True)]}, help="Put an address if you want to deliver directly from the supplier to the customer. " \ "Otherwise, keep empty to deliver to your own company." ), 'upload_file':fields.binary('Up&Download Order Lines'), 'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, help="The status of the purchase order or the quotation request. " "A request for quotation is a purchase order in a 'Draft' status. " "Then the order has to be confirmed by the user, the status switch " "to 'Confirmed'. Then the supplier must confirm the order to change " "the status to 'Approved'. When the purchase order is paid and " "received, the status becomes 'Done'. If a cancel action occurs in " "the invoice or in the receipt of goods, the status becomes " "in exception.", select=True, copy=False), } def export_csv(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids, context=context)[0] buf = cStringIO.StringIO() writer = csv.writer(buf) for line in this.order_line: writer.writerow([ line.product_id.id, line.name, line.product_qty, line.price_unit, line.product_uom.id ]) out = base64.encodestring(buf.getvalue()) this.write({'upload_file': out}) return #@api.one def import_csv(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids, context=context)[0] sol_obj = self.pool.get( 'purchase.order.line') #sol_obj: sale.order.line buf = cStringIO.StringIO(base64.decodestring(this.upload_file)) reader = csv.reader(buf) conn_string = "host='localhost' dbname='LoewieHK' user='******' password='******'" conn = psycopg2.connect(conn_string) cursor = conn.cursor() for line in reader: statement = "insert into purchase_order_line(order_id,product_id,name,product_qty,price_unit,product_uom,date_planned,state) values(%d,%s,'%s',%s,%s,%s,'%s','draft')" % ( this.id, line[0], line[1], line[2], line[3], line[4], datetime.date.today().strftime("%m/%d/%Y")) cursor.execute(statement) conn.commit() return def view_picking(self, cr, uid, ids, context=None): if context is None: context = {} pick_ids = [] for po in self.browse(cr, uid, ids, context=context): for picking in po.picking_ids: picking.create_uid = po.create_uid.id return super(purchase_order, self).view_picking(cr, uid, ids, context=None)
class asset_asset(osv.osv): """ Assets """ _name = 'asset.asset' _description = 'Asset' _inherit = ['mail.thread'] def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images( obj.image, avoid_resize_medium=True) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) def _read_group_state_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None, team='3'): access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('asset.state') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # write the domain # - ('id', 'in', 'ids'): add columns that should be present # - OR ('team','=',team): add default columns that belongs team search_domain = [] search_domain += ['|', ('team', '=', team)] search_domain += [('id', 'in', ids)] stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort( lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) return result, {} def _read_group_finance_state_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): return self._read_group_state_ids(cr, uid, ids, domain, read_group_order, access_rights_uid, context, '0') def _read_group_warehouse_state_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): return self._read_group_state_ids(cr, uid, ids, domain, read_group_order, access_rights_uid, context, '1') def _read_group_manufacture_state_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): return self._read_group_state_ids(cr, uid, ids, domain, read_group_order, access_rights_uid, context, '2') def _read_group_maintenance_state_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): return self._read_group_state_ids(cr, uid, ids, domain, read_group_order, access_rights_uid, context, '3') CRITICALITY_SELECTION = [('0', 'General'), ('1', 'Important'), ('2', 'Very important'), ('3', 'Critical')] _columns = { 'name': fields.char('Asset Name', size=64, required=True, translate=True), 'finance_state_id': fields.many2one('asset.state', 'State', domain=[('team','=','0')]), 'warehouse_state_id': fields.many2one('asset.state', 'State', domain=[('team','=','1')]), 'manufacture_state_id': fields.many2one('asset.state', 'State', domain=[('team','=','2')]), 'maintenance_state_id': fields.many2one('asset.state', 'State', domain=[('team','=','3')]), 'maintenance_state_color': fields.related('maintenance_state_id', 'state_color', type="selection", selection=STATE_COLOR_SELECTION, string="Color", readonly=True), 'criticality': fields.selection(CRITICALITY_SELECTION, 'Criticality'), 'property_stock_asset': fields.property( type='many2one', relation='stock.location', string="Asset Location", store=True, help="This location will be used as the destination location for installed parts during asset life."), 'user_id': fields.many2one('res.users', 'Assigned to', track_visibility='onchange'), 'active': fields.boolean('Active'), 'asset_number': fields.char('Asset Number', size=64), 'model': fields.char('Model', size=64), 'serial': fields.char('Serial no.', size=64), 'vendor_id':fields.many2one('res.partner', 'Vendor'), 'manufacturer_id': fields.many2one('res.partner', 'Manufacturer'), 'start_date': fields.date('Start Date'), 'purchase_date': fields.date('Purchase Date'), 'warranty_start_date': fields.date('Warranty Start'), 'warranty_end_date': fields.date('Warranty End'), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Image", help="This field holds the image used as image for the asset, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'asset.asset': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the asset. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'asset.asset': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of the asset. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'category_ids': fields.many2many('asset.category', id1='asset_id', id2='category_id', string='Tags'), } _defaults = { 'active': True, } _group_by_full = { 'finance_state_id': _read_group_finance_state_ids, 'warehouse_state_id': _read_group_warehouse_state_ids, 'manufacture_state_id': _read_group_manufacture_state_ids, 'maintenance_state_id': _read_group_maintenance_state_ids, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
class account_coda_import(osv.osv_memory): _name = 'account.coda.import' _description = 'Import CODA File' _columns = { 'coda_data': fields.binary('CODA File', required=True), 'coda_fname': fields.char('CODA Filename', required=True), 'note': fields.text('Log'), } _defaults = { 'coda_fname': lambda *a: '', } def coda_parsing(self, cr, uid, ids, context=None, batch=False, codafile=None, codafilename=None): if context is None: context = {} if batch: codafile = str(codafile) codafilename = codafilename else: data = self.browse(cr, uid, ids)[0] try: codafile = data.coda_data codafilename = data.coda_fname except: raise osv.except_osv( _('Error'), _('Wizard in incorrect state. Please hit the Cancel button' )) return {} recordlist = unicode(base64.decodestring(codafile), 'windows-1252', 'strict').split('\n') statements = [] for line in recordlist: if not line: pass elif line[0] == '0': #Begin of a new Bank statement statement = {} statements.append(statement) statement['version'] = line[127] if statement['version'] not in ['1', '2']: raise osv.except_osv( _('Error') + ' R001', _('CODA V%s statements are not supported, please contact your bank' ) % statement['version']) statement['globalisation_stack'] = [] statement['lines'] = [] statement['date'] = time.strftime( tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[5:11]), '%d%m%y')) statement['separateApplication'] = rmspaces(line[83:88]) elif line[0] == '1': #Statement details if statement['version'] == '1': statement['acc_number'] = rmspaces(line[5:17]) statement['currency'] = rmspaces(line[18:21]) elif statement['version'] == '2': if line[1] == '0': # Belgian bank account BBAN structure statement['acc_number'] = rmspaces(line[5:17]) statement['currency'] = rmspaces(line[18:21]) elif line[1] == '1': # foreign bank account BBAN structure raise osv.except_osv( _('Error') + ' R1001', _('Foreign bank accounts with BBAN structure are not supported ' )) elif line[1] == '2': # Belgian bank account IBAN structure statement['acc_number'] = rmspaces(line[5:21]) statement['currency'] = rmspaces(line[39:42]) elif line[1] == '3': # foreign bank account IBAN structure raise osv.except_osv( _('Error') + ' R1002', _('Foreign bank accounts with IBAN structure are not supported ' )) else: # Something else, not supported raise osv.except_osv( _('Error') + ' R1003', _('Unsupported bank account structure ')) statement['journal_id'] = False statement['bank_account'] = False # Belgian Account Numbers are composed of 12 digits. # In OpenERP, the user can fill the bank number in any format: With or without IBan code, with or without spaces, with or without '-' # The two following sql requests handle those cases. if len(statement['acc_number']) >= 12: # If the Account Number is >= 12 digits, it is mostlikely a Belgian Account Number (With or without IBAN). # The following request try to find the Account Number using a 'like' operator. # So, if the Account Number is stored with IBAN code, it can be found thanks to this. cr.execute( "select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') like %s", ('%' + statement['acc_number'] + '%', )) else: # This case is necessary to avoid cases like the Account Number in the CODA file is set to a single or few digits, # and so a 'like' operator would return the first account number in the database which matches. cr.execute( "select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') = %s", (statement['acc_number'], )) bank_ids = [id[0] for id in cr.fetchall()] # Filter bank accounts which are not allowed bank_ids = self.pool.get('res.partner.bank').search( cr, uid, [('id', 'in', bank_ids)]) if bank_ids and len(bank_ids) > 0: bank_accs = self.pool.get('res.partner.bank').browse( cr, uid, bank_ids) for bank_acc in bank_accs: if bank_acc.journal_id.id and ( (bank_acc.journal_id.currency.id and bank_acc.journal_id.currency.name == statement['currency']) or (not bank_acc.journal_id.currency.id and bank_acc.journal_id.company_id.currency_id.name == statement['currency'])): statement['journal_id'] = bank_acc.journal_id statement['bank_account'] = bank_acc break if not statement['bank_account']: raise osv.except_osv( _('Error') + ' R1004', _("No matching Bank Account (with Account Journal) found.\n\nPlease set-up a Bank Account with as Account Number '%s' and as Currency '%s' and an Account Journal." ) % (statement['acc_number'], statement['currency'])) statement['description'] = rmspaces(line[90:125]) statement['balance_start'] = float(rmspaces( line[43:58])) / 1000 if line[42] == '1': #1 = Debit, the starting balance is negative statement['balance_start'] = -statement['balance_start'] statement['balance_start_date'] = time.strftime( tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[58:64]), '%d%m%y')) statement['accountHolder'] = rmspaces(line[64:90]) statement['paperSeqNumber'] = rmspaces(line[2:5]) statement['codaSeqNumber'] = rmspaces(line[125:128]) elif line[0] == '2': if line[1] == '1': #New statement line statementLine = {} statementLine['ref'] = rmspaces(line[2:10]) statementLine['ref_move'] = rmspaces(line[2:6]) statementLine['ref_move_detail'] = rmspaces(line[6:10]) statementLine['sequence'] = len(statement['lines']) + 1 statementLine['transactionRef'] = rmspaces(line[10:31]) statementLine['debit'] = line[31] # 0 = Credit, 1 = Debit statementLine['amount'] = float(rmspaces( line[32:47])) / 1000 if statementLine['debit'] == '1': statementLine['amount'] = -statementLine['amount'] statementLine['transactionDate'] = time.strftime( tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[47:53]), '%d%m%y')) statementLine['transaction_family'] = rmspaces(line[54:56]) statementLine['transaction_code'] = rmspaces(line[56:58]) statementLine['transaction_category'] = rmspaces( line[58:61]) if line[61] == '1': #Structured communication statementLine['communication_struct'] = True statementLine['communication_type'] = line[62:65] statementLine['communication'] = '+++' + line[ 65:68] + '/' + line[68:72] + '/' + line[ 72:77] + '+++' else: #Non-structured communication statementLine['communication_struct'] = False statementLine['communication'] = rmspaces(line[62:115]) statementLine['entryDate'] = time.strftime( tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[115:121]), '%d%m%y')) statementLine['type'] = 'normal' statementLine['globalisation'] = int(line[124]) if len(statement['globalisation_stack'] ) > 0 and statementLine['communication'] != '': statementLine['communication'] = "\n".join([ statement['globalisation_stack'][-1] ['communication'], statementLine['communication'] ]) if statementLine['globalisation'] > 0: if len(statement['globalisation_stack'] ) > 0 and statement['globalisation_stack'][-1][ 'globalisation'] == statementLine[ 'globalisation']: # Destack statement['globalisation_stack'].pop() else: #Stack statementLine['type'] = 'globalisation' statement['globalisation_stack'].append( statementLine) statement['lines'].append(statementLine) elif line[1] == '2': if statement['lines'][-1]['ref'][0:4] != line[2:6]: raise osv.except_osv( _('Error') + 'R2004', _('CODA parsing error on movement data record 2.2, seq nr %s! Please report this issue via your OpenERP support channel.' ) % line[2:10]) statement['lines'][-1]['communication'] += rmspaces( line[10:63]) statement['lines'][-1]['payment_reference'] = rmspaces( line[63:98]) statement['lines'][-1]['counterparty_bic'] = rmspaces( line[98:109]) elif line[1] == '3': if statement['lines'][-1]['ref'][0:4] != line[2:6]: raise osv.except_osv( _('Error') + 'R2005', _('CODA parsing error on movement data record 2.3, seq nr %s! Please report this issue via your OpenERP support channel.' ) % line[2:10]) if statement['version'] == '1': statement['lines'][-1][ 'counterpartyNumber'] = rmspaces(line[10:22]) statement['lines'][-1]['counterpartyName'] = rmspaces( line[47:73]) statement['lines'][-1][ 'counterpartyAddress'] = rmspaces(line[73:125]) statement['lines'][-1]['counterpartyCurrency'] = '' else: if line[22] == ' ': statement['lines'][-1][ 'counterpartyNumber'] = rmspaces(line[10:22]) statement['lines'][-1][ 'counterpartyCurrency'] = rmspaces(line[23:26]) else: statement['lines'][-1][ 'counterpartyNumber'] = rmspaces(line[10:44]) statement['lines'][-1][ 'counterpartyCurrency'] = rmspaces(line[44:47]) statement['lines'][-1]['counterpartyName'] = rmspaces( line[47:82]) statement['lines'][-1]['communication'] += rmspaces( line[82:125]) else: # movement data record 2.x (x != 1,2,3) raise osv.except_osv( _('Error') + 'R2006', _('\nMovement data records of type 2.%s are not supported ' ) % line[1]) elif line[0] == '3': if line[1] == '1': infoLine = {} infoLine['entryDate'] = statement['lines'][-1]['entryDate'] infoLine['type'] = 'information' infoLine['sequence'] = len(statement['lines']) + 1 infoLine['ref'] = rmspaces(line[2:10]) infoLine['transactionRef'] = rmspaces(line[10:31]) infoLine['transaction_family'] = rmspaces(line[32:34]) infoLine['transaction_code'] = rmspaces(line[34:36]) infoLine['transaction_category'] = rmspaces(line[36:39]) infoLine['communication'] = rmspaces(line[40:113]) statement['lines'].append(infoLine) elif line[1] == '2': if infoLine['ref'] != rmspaces(line[2:10]): raise osv.except_osv( _('Error') + 'R3004', _('CODA parsing error on information data record 3.2, seq nr %s! Please report this issue via your OpenERP support channel.' ) % line[2:10]) statement['lines'][-1]['communication'] += rmspaces( line[10:100]) elif line[1] == '3': if infoLine['ref'] != rmspaces(line[2:10]): raise osv.except_osv( _('Error') + 'R3005', _('CODA parsing error on information data record 3.3, seq nr %s! Please report this issue via your OpenERP support channel.' ) % line[2:10]) statement['lines'][-1]['communication'] += rmspaces( line[10:100]) elif line[0] == '4': comm_line = {} comm_line['type'] = 'communication' comm_line['sequence'] = len(statement['lines']) + 1 comm_line['ref'] = rmspaces(line[2:10]) comm_line['communication'] = rmspaces(line[32:112]) statement['lines'].append(comm_line) elif line[0] == '8': # new balance record statement['debit'] = line[41] statement['paperSeqNumber'] = rmspaces(line[1:4]) statement['balance_end_real'] = float(rmspaces( line[42:57])) / 1000 statement['balance_end_realDate'] = time.strftime( tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[57:63]), '%d%m%y')) if statement['debit'] == '1': # 1=Debit statement[ 'balance_end_real'] = -statement['balance_end_real'] if statement['balance_end_realDate']: period_id = self.pool.get('account.period').search( cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['balance_end_realDate']), ('date_stop', '>=', statement['balance_end_realDate']) ]) else: period_id = self.pool.get('account.period').search( cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['date']), ('date_stop', '>=', statement['date'])]) if not period_id and len(period_id) == 0: raise osv.except_osv( _('Error') + 'R0002', _("The CODA Statement New Balance date doesn't fall within a defined Accounting Period! Please create the Accounting Period for date %s for the company %s." ) % (statement['balance_end_realDate'], statement['journal_id'].company_id.name)) statement['period_id'] = period_id[0] elif line[0] == '9': statement['balanceMin'] = float(rmspaces(line[22:37])) / 1000 statement['balancePlus'] = float(rmspaces(line[37:52])) / 1000 if not statement.get('balance_end_real'): statement['balance_end_real'] = statement[ 'balance_start'] + statement[ 'balancePlus'] - statement['balanceMin'] for i, statement in enumerate(statements): statement['coda_note'] = '' balance_start_check_date = ( len(statement['lines']) > 0 and statement['lines'][0]['entryDate']) or statement['date'] cr.execute( 'SELECT balance_end_real \ FROM account_bank_statement \ WHERE journal_id = %s and date <= %s \ ORDER BY date DESC,id DESC LIMIT 1', (statement['journal_id'].id, balance_start_check_date)) res = cr.fetchone() balance_start_check = res and res[0] if balance_start_check == None: if statement['journal_id'].default_debit_account_id and ( statement['journal_id'].default_credit_account_id == statement['journal_id'].default_debit_account_id): balance_start_check = statement[ 'journal_id'].default_debit_account_id.balance else: raise osv.except_osv( _('Error'), _("Configuration Error in journal %s!\nPlease verify the Default Debit and Credit Account settings." ) % statement['journal_id'].name) if balance_start_check != statement['balance_start']: statement['coda_note'] = _( "The CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!" ) % (statement['description'] + ' #' + statement['paperSeqNumber'], statement['balance_start'], balance_start_check, statement['journal_id'].name) if not (statement.get('period_id')): raise osv.except_osv( _('Error') + ' R3006', _(' No transactions or no period in coda file !')) data = { 'name': statement['paperSeqNumber'], 'date': statement['date'], 'journal_id': statement['journal_id'].id, 'period_id': statement['period_id'], 'balance_start': statement['balance_start'], 'balance_end_real': statement['balance_end_real'], } statement['id'] = self.pool.get('account.bank.statement').create( cr, uid, data, context=context) for line in statement['lines']: if line['type'] == 'information': statement['coda_note'] = "\n".join([ statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Date: ' + str(line['entryDate']), 'Communication: ' + line['communication'], '' ]) elif line['type'] == 'communication': statement['coda_note'] = "\n".join([ statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Ref: ', 'Communication: ' + line['communication'], '' ]) elif line['type'] == 'normal': note = [] if 'counterpartyName' in line and line[ 'counterpartyName'] != '': note.append( _('Counter Party') + ': ' + line['counterpartyName']) else: line['counterpartyName'] = False if 'counterpartyNumber' in line and line[ 'counterpartyNumber'] != '': try: if int(line['counterpartyNumber']) == 0: line['counterpartyNumber'] = False except: pass if line['counterpartyNumber']: note.append( _('Counter Party Account') + ': ' + line['counterpartyNumber']) else: line['counterpartyNumber'] = False if 'counterpartyAddress' in line and line[ 'counterpartyAddress'] != '': note.append( _('Counter Party Address') + ': ' + line['counterpartyAddress']) line['name'] = "\n".join( filter( None, [line['counterpartyName'], line['communication']])) partner_id = None structured_com = "" bank_account_id = False if line['communication_struct'] and 'communication_type' in line and line[ 'communication_type'] == '101': structured_com = line['communication'] if 'counterpartyNumber' in line and line[ 'counterpartyNumber']: ids = self.pool.get('res.partner.bank').search( cr, uid, [('acc_number', '=', str( line['counterpartyNumber']))]) if ids: bank_account_id = ids[0] partner_id = self.pool.get( 'res.partner.bank').browse( cr, uid, bank_account_id, context=context).partner_id.id else: #create the bank account, not linked to any partner. The reconciliation will link the partner manually #chosen at the bank statement final confirmation time. try: type_model, type_id = self.pool.get( 'ir.model.data').get_object_reference( cr, uid, 'base', 'bank_normal') type_id = self.pool.get( 'res.partner.bank.type').browse( cr, uid, type_id, context=context) bank_code = type_id.code except ValueError: bank_code = 'bank' bank_account_id = self.pool.get( 'res.partner.bank').create( cr, uid, { 'acc_number': str(line['counterpartyNumber']), 'state': bank_code }, context=context) if 'communication' in line and line['communication'] != '': note.append( _('Communication') + ': ' + line['communication']) data = { 'name': line['name'], 'note': "\n".join(note), 'date': line['entryDate'], 'amount': line['amount'], 'partner_id': partner_id, 'statement_id': statement['id'], 'ref': structured_com, 'sequence': line['sequence'], 'bank_account_id': bank_account_id, } self.pool.get('account.bank.statement.line').create( cr, uid, data, context=context) if statement['coda_note'] != '': self.pool.get('account.bank.statement').write( cr, uid, [statement['id']], {'coda_note': statement['coda_note']}, context=context) model, action_id = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'account', 'action_bank_statement_tree') action = self.pool[model].browse(cr, uid, action_id, context=context) return { 'name': action.name, 'view_type': action.view_type, 'view_mode': action.view_mode, 'res_model': action.res_model, 'domain': action.domain, 'context': action.context, 'type': 'ir.actions.act_window', 'search_view_id': action.search_view_id.id, 'views': [(v.view_id.id, v.view_mode) for v in action.view_ids] }
class hr_holidays(osv.osv): _inherit = 'hr.holidays' _order = 'id desc' _columns = { 'holiday':fields.related('holiday_status_id', 'holiday', type='boolean', string='Holiday',store=True), 'fiscalyear_id':fields.many2one('hr.fiscalyear','Fiscal Year'), 'certificate':fields.binary('Certificate', help="Attach the proof against the leave request"), 'need_certificate':fields.related('holiday_status_id', 'need_certificate', type='boolean', string='Need Certificate', store=True), } def _get_fiscalyear(self, cr, uid, context=None): today = time.strftime("%Y-%m-%d") fiscalyear = self.pool.get('hr.fiscalyear').search(cr, uid, [('date_start','<=',today),('date_stop','>=',today),('state','=','draft')]) return fiscalyear and fiscalyear[0] or False _defaults = { 'fiscalyear_id':_get_fiscalyear, } def onchange_holiday_status_id(self, cr, uid, ids, holiday_status_id): result = {'value': {'holiday': False,'need_certificate':False}} if holiday_status_id: status = self.pool.get('hr.holidays.status').browse(cr, uid, holiday_status_id) result['value'] = {'holiday': status.holiday,'need_certificate':status.need_certificate} return result def months_between(self,date1,date2): if date1>date2: date1,date2=date2,date1 m1=date1.year*12+date1.month m2=date2.year*12+date2.month months=m2-m1 if date1.day>date2.day: months-=1 elif date1.day==date2.day: seconds1=date1.hour*3600+date1.minute+date1.second seconds2=date2.hour*3600+date2.minute+date2.second if seconds1>seconds2: months-=1 return months def validate_request(self, cr, uid, ids, context=None): context = context and dict(context) or {} for leave in self.browse(cr, uid, ids, context=context): status = leave.holiday_status_id employee = leave.employee_id if not status.holiday: if not employee.appointed_on: raise osv.except_osv(_('Configuration Error!'), _('Appointment Date not found for Employee - %s.')%(employee.name)) if leave.type == 'add': if leave.number_of_days_temp > status.max_days: raise osv.except_osv(_('Not Allowed!'), _('Maximum Allocation for this Leave Type is %s days.')%(status.max_days)) else: if status.need_certificate and (not leave.certificate): raise osv.except_osv(_('Not Attached!!'),_('Please attach medical certificate.')) if not context.has_key('no_check'): emp_joining = datetime.strptime(employee.appointed_on, '%Y-%m-%d') today = datetime.today() months = self.months_between(today, emp_joining) if months < status.eligible_months: raise osv.except_osv(_('Not Eligible!!'),_('You are not yet eligible for this leave type.')) return True def write(self, cr, uid, ids, vals, context=None): context = context and dict(context) or {} super(hr_holidays, self).write(cr, uid, ids, vals, context=context) context.update({'no_check':True}) self.validate_request(cr, uid, ids, context=context) return True def create(self, cr, uid, values, context=None): """ Override to avoid automatic logging of creation """ if context is None: context = {} context = dict(context, mail_create_nolog=True) if values.get('state') and values['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'): raise osv.except_osv(_('Warning!'), _('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % values.get('state')) leave = super(hr_holidays, self).create(cr, uid, values, context=context) self.validate_request(cr, uid, [leave], context=context) return leave
_('Wrong Certificate file format'), _('Be sure you have BEGIN CERTIFICATE string in your first line.' )) else: raise osv.except_osv( _('Unknown error'), _('X509 return this message:\n %s') % e[0]) wz.wsafip_request_id.write({'state': 'confirmed'}) _name = 'l10n_ar_wsafip.loadcert_config' _inherit = 'res.config.installer' _columns = { 'wsafip_request_id': fields.many2one('crypto.certificate', 'Certificate Request', required=True), 'wsafip_request_file': fields.binary('Download Signed Certificate Request', readonly=True), 'wsafip_request_filename': fields.char('Filename', readonly=True), 'wsafip_response_file': fields.binary('Upload Certificate', required=True), } _defaults = { 'wsafip_request_filename': 'request.csr', } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
fname = attachment.datas_fname target = open (path+'/'+fname, 'a') print "\n targetttttttttttttttttttttttt", target target.write(datas) target.close() paths.append(path+'/'+fname) so_name = self.read(cr, uid, attach_ids, ['res_name'], context=context)[0].get('res_name') print "paths*****",paths self.upload_document(paths, so_name) return True def create(self, cr, uid, vals, context=None): return super(ir_attachment, self).create(cr, uid, vals, context) _columns = { 'url': fields.text('Url'), 'file_upload' : fields.binary('File Upload'), 'datafile_of':fields.char('File of'), } class google_drive(osv.osv): _name = 'google.drive' def connect_google(self, cr, uid, email, password, context=None): source = 'Document List Python Sample' try: self.gd_client = gdata.docs.service.DocsService() self.gd_client.ClientLogin(email, password, source=source) self.gs_client = gdata.spreadsheet.service.SpreadsheetsService() self.gs_client.ClientLogin(email, password, source=source) except: raise osv.except_osv(_('Error!'),_("Authentication Failed..."))
res=self.search(cr,uid,[('id','<>',directory.id),('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id), ('revisionid', '=', revisionid)]) if len(res): return False if op=='create': res = self.search(cr, SUPERUSER_ID, [('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id), ('revisionid', '=', revisionid)]) if len(res): return False return True # Overridden methods for this entity _columns = { 'usedforspare': fields.boolean('Used for Spare',help="Drawings marked here will be used printing Spare Part Manual report."), 'revisionid': fields.integer('Revision Index', required=True), 'writable': fields.boolean('Writable'), 'datas': fields.function(_data_get,method=True,fnct_inv=_data_set,string='File Content',type="binary"), 'printout': fields.binary('Printout Content', help="Print PDF content."), 'preview': fields.binary('Preview Content', help="Static preview."), 'state':fields.selection(USED_STATES,'Status', help="The status of the product.", readonly="True", required=True), } _defaults = { 'usedforspare': lambda *a: False, 'revisionid': lambda *a: 0, 'writable': lambda *a: True, 'state': lambda *a: 'draft', } _sql_constraints = [ ('name_unique', 'unique (name,revisionid)', 'File name has to be unique!') # qui abbiamo la sicurezza dell'univocita del nome file ]
class hr_employee(osv.osv): _name = "hr.employee" _description = "Employee" _order = 'name_related' _inherits = {'resource.resource': "resource_id"} _inherit = ['mail.thread'] _mail_post_access = 'read' def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) _columns = { #we need a related field in order to be able to sort the employee by name 'name_related': fields.related('resource_id', 'name', type='char', string='Name', readonly=True, store=True), 'country_id': fields.many2one('res.country', 'Nationality'), 'birthday': fields.date("Date of Birth"), 'ssnid': fields.char('SSN No', size=32, help='Social Security Number'), 'sinid': fields.char('SIN No', size=32, help="Social Insurance Number"), 'identification_id': fields.char('Identification No', size=32), 'otherid': fields.char('Other Id', size=64), 'gender': fields.selection([('male', 'Male'), ('female', 'Female')], 'Gender'), 'marital': fields.selection([('single', 'Single'), ('married', 'Married'), ('widower', 'Widower'), ('divorced', 'Divorced')], 'Marital Status'), 'department_id': fields.many2one('hr.department', 'Department'), 'address_id': fields.many2one('res.partner', 'Working Address'), 'address_home_id': fields.many2one('res.partner', 'Home Address'), 'bank_account_id': fields.many2one('res.partner.bank', 'Bank Account Number', domain="[('partner_id','=',address_home_id)]", help="Employee bank salary account"), 'work_phone': fields.char('Work Phone', size=32, readonly=False), 'mobile_phone': fields.char('Work Mobile', size=32, readonly=False), 'work_email': fields.char('Work Email', size=240), 'work_location': fields.char('Office Location', size=32), 'notes': fields.text('Notes'), 'parent_id': fields.many2one('hr.employee', 'Manager'), 'category_ids': fields.many2many('hr.employee.category', 'employee_category_rel', 'emp_id', 'category_id', 'Tags'), 'child_ids': fields.one2many('hr.employee', 'parent_id', 'Subordinates'), 'resource_id': fields.many2one('resource.resource', 'Resource', ondelete='cascade', required=True), 'coach_id': fields.many2one('hr.employee', 'Coach'), 'job_id': fields.many2one('hr.job', 'Job Title'), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Photo", help="This field holds the image used as photo for the employee, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized photo", type="binary", multi="_get_image", store = { 'hr.employee': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized photo of the employee. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Smal-sized photo", type="binary", multi="_get_image", store = { 'hr.employee': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized photo of the employee. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'passport_id': fields.char('Passport No', size=64), 'color': fields.integer('Color Index'), 'city': fields.related('address_id', 'city', type='char', string='City'), 'login': fields.related('user_id', 'login', type='char', string='Login', readonly=1), 'last_login': fields.related('user_id', 'date', type='datetime', string='Latest Connection', readonly=1), } def _get_default_image(self, cr, uid, context=None): image_path = get_module_resource('hr', 'static/src/img', 'default_image.png') return tools.image_resize_image_big( open(image_path, 'rb').read().encode('base64')) defaults = { 'active': 1, 'image': _get_default_image, 'color': 0, } def copy_data(self, cr, uid, ids, default=None, context=None): if default is None: default = {} default.update({'child_ids': False}) return super(hr_employee, self).copy_data(cr, uid, ids, default, context=context) def _broadcast_welcome(self, cr, uid, employee_id, context=None): """ Broadcast the welcome message to all users in the employee company. """ employee = self.browse(cr, uid, employee_id, context=context) partner_ids = [] _model, group_id = self.pool['ir.model.data'].get_object_reference( cr, uid, 'base', 'group_user') if employee.user_id: company_id = employee.user_id.company_id.id elif employee.company_id: company_id = employee.company_id.id elif employee.job_id: company_id = employee.job_id.company_id.id elif employee.department_id: company_id = employee.department_id.company_id.id else: company_id = self.pool['res.company']._company_default_get( cr, uid, 'hr.employee', context=context) res_users = self.pool['res.users'] user_ids = res_users.search(cr, SUPERUSER_ID, [('company_id', '=', company_id), ('groups_id', 'in', group_id)], context=context) partner_ids = list( set(u.partner_id.id for u in res_users.browse( cr, SUPERUSER_ID, user_ids, context=context))) self.message_post( cr, uid, [employee_id], body= _('Welcome to %s! Please help him/her take the first steps with OpenERP!' ) % (employee.name), partner_ids=partner_ids, subtype='mail.mt_comment', context=context) return True def create(self, cr, uid, data, context=None): if context is None: context = {} if context.get("mail_broadcast"): context['mail_create_nolog'] = True employee_id = super(hr_employee, self).create(cr, uid, data, context=context) if context.get("mail_broadcast"): self._broadcast_welcome(cr, uid, employee_id, context=context) return employee_id def unlink(self, cr, uid, ids, context=None): resource_ids = [] for employee in self.browse(cr, uid, ids, context=context): resource_ids.append(employee.resource_id.id) return self.pool.get('resource.resource').unlink(cr, uid, resource_ids, context=context) def onchange_address_id(self, cr, uid, ids, address, context=None): if address: address = self.pool.get('res.partner').browse(cr, uid, address, context=context) return { 'value': { 'work_phone': address.phone, 'mobile_phone': address.mobile } } return {'value': {}} def onchange_company(self, cr, uid, ids, company, context=None): address_id = False if company: company_id = self.pool.get('res.company').browse(cr, uid, company, context=context) address = self.pool.get('res.partner').address_get( cr, uid, [company_id.partner_id.id], ['default']) address_id = address and address['default'] or False return {'value': {'address_id': address_id}} def onchange_department_id(self, cr, uid, ids, department_id, context=None): value = {'parent_id': False} if department_id: department = self.pool.get('hr.department').browse( cr, uid, department_id) value['parent_id'] = department.manager_id.id return {'value': value} def onchange_user(self, cr, uid, ids, user_id, context=None): work_email = False if user_id: work_email = self.pool.get('res.users').browse( cr, uid, user_id, context=context).email return {'value': {'work_email': work_email}} def action_follow(self, cr, uid, ids, context=None): """ Wrapper because message_subscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_subscribe_users(cr, uid, ids, context=context) def action_unfollow(self, cr, uid, ids, context=None): """ Wrapper because message_unsubscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_unsubscribe_users(cr, uid, ids, context=context) def get_suggested_thread(self, cr, uid, removed_suggested_threads=None, context=None): """Show the suggestion of employees if display_employees_suggestions if the user perference allows it. """ user = self.pool.get('res.users').browse(cr, uid, uid, context) if not user.display_employees_suggestions: return [] else: return super(hr_employee, self).get_suggested_thread(cr, uid, removed_suggested_threads, context) def _message_get_auto_subscribe_fields(self, cr, uid, updated_fields, auto_follow_fields=None, context=None): """ Overwrite of the original method to always follow user_id field, even when not track_visibility so that a user will follow it's employee """ if auto_follow_fields is None: auto_follow_fields = ['user_id'] user_field_lst = [] for name, column_info in self._all_columns.items(): if name in auto_follow_fields and name in updated_fields and column_info.column._obj == 'res.users': user_field_lst.append(name) return user_field_lst def _check_recursion(self, cr, uid, ids, context=None): level = 100 while len(ids): cr.execute( 'SELECT DISTINCT parent_id FROM hr_employee WHERE id IN %s AND parent_id!=id', (tuple(ids), )) ids = filter(None, map(lambda x: x[0], cr.fetchall())) if not level: return False level -= 1 return True _constraints = [ (_check_recursion, 'Error! You cannot create recursive hierarchy of Employee(s).', ['parent_id']), ]
return self.write(cr, uid, id, {'file_db_store' : value}, context=context) def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image, avoid_resize_medium=True) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) _columns = { 'name':fields.char('Image Title', translate=True, size=128, required=True), 'extention': fields.char('file extention', size=6), 'is_link':fields.boolean('Link?', help="Images can be linked from files on your file system or remote (Preferred)"), 'file_db_store':fields.binary('Image stored in database'), 'file':fields.function(_get_url_image, fnct_inv=_set_url_image, type="binary", method=True, filters='*.png,*.jpg,*.jpeg,*.gif'), 'url':fields.char('File Location', size=255), 'image': fields.binary("Image", help="This field holds the image used as image for the product, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'product.images': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of the product. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved, "\ "only when the image exceeds one of those sizes. Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={
class ir_import(orm.TransientModel): _inherit = 'base_import.import' _columns = { 'res_model': fields.char('Model'), 'file': fields.binary( 'File', help="File to check and/or import, raw binary (not base64)"), 'file_name': fields.char('File Name'), 'file_type': fields.char(string='File Type'), } def get_fields(self, cr, uid, model, context=None, depth=FIELDS_RECURSION_LIMIT): """ Recursively get fields for the provided model (through fields_get) and filter them according to importability The output format is a list of ``Field``, with ``Field`` defined as: .. class:: Field .. attribute:: id (str) A non-unique identifier for the field, used to compute the span of the ``required`` attribute: if multiple ``required`` fields have the same id, only one of them is necessary. .. attribute:: name (str) The field's logical (Odoo) name within the scope of its parent. .. attribute:: string (str) The field's human-readable name (``@string``) .. attribute:: required (bool) Whether the field is marked as required in the model. Clients must provide non-empty import values for all required fields or the import will error out. .. attribute:: fields (list(Field)) The current field's subfields. The database and external identifiers for m2o and m2m fields; a filtered and transformed fields_get for o2m fields (to a variable depth defined by ``depth``). Fields with no sub-fields will have an empty list of sub-fields. :param str model: name of the model to get fields form :param int landing: depth of recursion into o2m fields """ model_obj = self.pool[model] fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], }] fields_got = model_obj.fields_get(cr, uid, context=context) blacklist = orm.MAGIC_COLUMNS + [model_obj.CONCURRENCY_CHECK_FIELD] for name, field in fields_got.iteritems(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.itervalues())): continue f = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], } if field['type'] in ('many2many', 'many2one'): f['fields'] = [ dict(f, name='id', string=_("External ID")), dict(f, name='.id', string=_("Database ID")), ] elif field['type'] == 'one2many' and depth: f['fields'] = self.get_fields(cr, uid, field['relation'], context=context, depth=depth - 1) if self.user_has_groups(cr, uid, 'base.group_no_one'): f['fields'].append({ 'id': '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': [] }) fields.append(f) # TODO: cache on model? return fields def _read_file(self, file_type, record, options): # guess mimetype from file content mimetype = guess_mimetype(record.file) (file_extension, handler, req) = FILE_TYPE_DICT.get(mimetype, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(record, options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using guessed mimetype %s", record.file_name or '<unknown>', record.id, mimetype) # try reading with user-provided mimetype (file_extension, handler, req) = FILE_TYPE_DICT.get(file_type, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(record, options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using user-provided mimetype %s", record.file_name or '<unknown>', record.id, file_type) # fallback on file extensions as mime types can be unreliable (e.g. # software setting incorrect mime types, or non-installed software # leading to browser not sending mime types) if record.file_name: p, ext = os.path.splitext(record.file_name) if ext in EXTENSIONS: try: return getattr(self, '_read_' + ext[1:])(record, options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %s) using file extension", record.file_name, record.id) if req: raise ImportError( _("Unable to load \"{extension}\" file: requires Python module \"{modname}\"" ).format(extension=file_extension, modname=req)) raise ValueError( _("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX" ).format(file_type)) def _read_xls(self, record, options): book = xlrd.open_workbook(file_contents=record.file) return self._read_xls_book(book) def _read_xls_book(self, book): sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in itertools.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( unicode(cell.value ) if is_float else unicode(int(cell.value))) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple( cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT ) if is_datetime else dt. strftime(DEFAULT_SERVER_DATE_FORMAT)) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value)) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values _read_xlsx = _read_xls def _read_ods(self, record, options): doc = odf_ods_reader.ODSReader(file=io.BytesIO(record.file)) return (row for row in doc.getFirstSheet() if any(x for x in row if x.strip())) def _read_csv(self, record, options): """ Returns a CSV-parsed iterator of all empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_data = record.file # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') if encoding != 'utf-8': # csv module expect utf-8, see http://docs.python.org/2/library/csv.html csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = csv.reader(StringIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return ([item.decode('utf-8') for item in row] for row in csv_iterator if any(x for x in row if x.strip())) def _match_header(self, header, fields, options): """ Attempts to match a given header to a field of the imported model. :param str header: header name from the CSV file :param fields: :param dict options: :returns: an empty list if the header couldn't be matched, or all the fields to traverse :rtype: list(Field) """ string_match = None for field in fields: # FIXME: should match all translations & original # TODO: use string distance (levenshtein? hamming?) if header.lower() == field['name'].lower(): return [field] if header.lower() == field['string'].lower(): # matching string are not reliable way because # strings have no unique constraint string_match = field if string_match: # this behavior is only applied if there is no matching field['name'] return [string_match] if '/' not in header: return [] # relational field path traversal = [] subfields = fields # Iteratively dive into fields tree for section in header.split('/'): # Strip section in case spaces are added around '/' for # readability of paths match = self._match_header(section.strip(), subfields, options) # Any match failure, exit if not match: return [] # prep subfields for next iteration within match[0] field = match[0] subfields = field['fields'] traversal.append(field) return traversal def _match_headers(self, rows, fields, options): """ Attempts to match the imported model's fields to the titles of the parsed CSV file, if the file is supposed to have headers. Will consume the first line of the ``rows`` iterator. Returns a pair of (None, None) if headers were not requested or the list of headers and a dict mapping cell indices to key paths in the ``fields`` tree :param Iterator rows: :param dict fields: :param dict options: :rtype: (None, None) | (list(str), dict(int: list(str))) """ if not options.get('headers'): return None, None headers = next(rows) return headers, { index: [ field['name'] for field in self._match_header(header, fields, options) ] or None for index, header in enumerate(headers) } def parse_preview(self, cr, uid, id, options, count=10, context=None): """ Generates a preview of the uploaded files, and performs fields-matching between the import's file data and the model's columns. If the headers are not requested (not options.headers), ``matches`` and ``headers`` are both ``False``. :param id: identifier of the import :param int count: number of preview lines to generate :param options: format-specific options. CSV: {encoding, quoting, separator, headers} :type options: {str, str, str, bool} :returns: {fields, matches, headers, preview} | {error, preview} :rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str} """ (record, ) = self.browse(cr, uid, [id], context=context) fields = self.get_fields(cr, uid, record.res_model, context=context) try: rows = self._read_file(record.file_type, record, options) headers, matches = self._match_headers(rows, fields, options) # Match should have consumed the first row (iif headers), get # the ``count`` next rows for preview preview = list(itertools.islice(rows, count)) assert preview, "CSV file seems to have no content" return { 'fields': fields, 'matches': matches or False, 'headers': headers or False, 'preview': preview, } except Exception, e: # Due to lazy generators, UnicodeDecodeError (for # instance) may only be raised when serializing the # preview to a list in the return. _logger.debug("Error during parsing preview", exc_info=True) preview = None if record.file_type == 'text/csv': preview = record.file[:ERROR_PREVIEW_BYTES].decode( 'iso-8859-1') return { 'error': str(e), # iso-8859-1 ensures decoding will always succeed, # even if it yields non-printable characters. This is # in case of UnicodeDecodeError (or csv.Error # compounded with UnicodeDecodeError) 'preview': preview, }
class report_retrocession_wizard(orm.TransientModel): _name = "report.retrocession.wizard" _description = 'Retrocessions Reports Generator' _columns = { 'model': fields.char('Model', size=128, required=True), 'report': fields.selection( AVAILABLE_REPORTS, 'Report', required=True, ), 'mandate_ids': fields.text('IDS', required=True), 'year': fields.char('Year', size=128, required=True), 'mandate_selected': fields.integer('Selected Mandates'), 'yearly_count': fields.integer('Yearly Retrocessions Selected'), 'monthly_count': fields.integer('Monthly Retrocessions Selected'), 'yearly_print': fields.integer('Yearly Retrocessions To Print'), 'monthly_print': fields.integer('Monthly Retrocessions To Print'), 'total_mandates': fields.integer('Mandates To Print'), 'data': fields.binary('XLS', readonly=True), 'export_filename': fields.char('Export XLS Filename', size=128), } def default_get(self, cr, uid, flds, context=None): """ To get default values for the object. """ context = context or {} ids = [] model = context.get('active_model', False) if context.get('active_domain'): active_domain = context.get('active_domain') ids = self.pool.get(model).search(cr, uid, active_domain, context=context) elif context.get('active_ids'): ids = context.get('active_ids') or ( context.get('active_id') and [context.get('active_id')]) or [] res = { 'model': model, 'mandate_ids': str(ids), 'year': datetime.date.today().strftime("%Y"), 'mandate_selected': len(ids), 'report': context.get('document', 'certificates'), } return res def mandate_selection_analysis(self, cr, uid, year, model, ids, mode='onchange', context=None): """ =============== mandate_selection_analysis ==============) Analyse mandate selection and give an overview of expected results """ monthly_ids = self.pool[model].search(cr, uid, [ ('retrocession_mode', '=', 'month'), ('id', 'in', ids), ('active', '<=', True), ]) yearly_ids = self.pool[model].search(cr, uid, [ ('retrocession_mode', '=', 'year'), ('id', 'in', ids), ('active', '<=', True), ]) retro_pool = self.pool['retrocession'] foreign_key = retro_pool.get_relation_column_name(cr, uid, model, context=context) monthly_print_ids = [ data[foreign_key][0] for data in retro_pool.search_read( cr, uid, [('year', '=', year), (foreign_key, 'in', monthly_ids), ('state', 'in', ['validated', 'done']), ('active', '<=', True), ('amount_paid', '>', 0)], fields=[foreign_key], context=context) ] yearly_print_ids = [ data[foreign_key][0] for data in retro_pool.search_read( cr, uid, [('year', '=', year), (foreign_key, 'in', yearly_ids), ('state', 'in', ['validated', 'done']), ('active', '<=', True), ('amount_paid', '>', 0)], fields=[foreign_key], context=context) ] if mode == 'ids': res = monthly_print_ids + yearly_print_ids else: total = len(yearly_print_ids) + len(monthly_print_ids) res = { 'total_mandates': total, 'yearly_count': len(yearly_ids), 'yearly_print': len(yearly_print_ids), 'monthly_count': len(monthly_ids), 'monthly_print': len(monthly_print_ids), } return res def onchange_year(self, cr, uid, ids, year, model, mandate_ids, context=None): return { 'value': self.mandate_selection_analysis(cr, uid, year, model, eval(mandate_ids), context=context) } def print_report(self, cr, uid, ids, context=None): """ ======================= print_report ======================= Print report for valid selected mandates """ wizard = self.browse(cr, uid, ids, context=context)[0] if wizard.report == 'certificates': return self.print_certificates(cr, uid, wizard, context=context) elif wizard.report == 'fractionations': return self.print_fractionations(cr, uid, wizard, context=context) def _get_mandate_retrocession_amounts(self, cr, uid, mandate_model, mandate_id, year, context=None): """ ================================= _get_mandate_retrocession_amounts ================================= Return amount_to_pay and amount_paid total for all retrocession of a given mandate for a specific year """ retro_pool = self.pool.get('retrocession') foreign_key = retro_pool.get_relation_column_name(cr, uid, mandate_model, context=context) data = retro_pool.search_read(cr, uid, [(foreign_key, '=', mandate_id), ('year', '=', year), ('state', 'in', ['validated', 'done']), ('active', '<=', True)], ['amount_total', 'amount_paid'], context=context) amount_total = sum([record['amount_total'] for record in data]) amount_paid = sum([record['amount_paid'] for record in data]) return amount_total, amount_paid def _get_fractionation_data(self, cr, uid, mandate_ids, mandate_model, assembly_model, year, context=None): """ ======================= _get_fractionation_data ======================= Return mandate data and instance data (aggregation) needed to generate Excel sheets """ assembly_key = self.pool[mandate_model].get_relation_column_name( cr, uid, assembly_model, context=context) mandates_data = {} instances_data = {} for mandate in self.pool[mandate_model].browse(cr, uid, mandate_ids, context=context): fractionation = mandate[assembly_key].fractionation_id \ or mandate.mandate_category_id.fractionation_id if not fractionation: continue level_dict = {} for line in fractionation.fractionation_line_ids: level_dict[line.power_level_id.id] = line.percentage instance_id = mandate.partner_id.int_instance_id.id amount_total, amount_paid = self._get_mandate_retrocession_amounts( cr, uid, mandate_model, mandate.id, year, context=context) inst_split, pl_split = self._split_amount(cr, uid, amount_paid, level_dict, instance_id, context=context) for instance_id in inst_split: data = inst_split[instance_id] if not instances_data.get(instance_id, False): instances_data[instance_id] = data else: instances_data[instance_id]['Amount'] += data['Amount'] data = {} data['Mandate Category'] = mandate.mandate_category_id.name data['Assembly'] = mandate[assembly_key].name data['Start Date'] = self.pool['res.lang'].format_date( cr, uid, mandate.start_date, context=context) data['End Date'] = self.pool['res.lang'].format_date( cr, uid, mandate.end_date or mandate.deadline_date, context=context) data['Representative'] = mandate.partner_id.name data['Retrocession Mode'] = mandate.retrocession_mode data['Amount Due'] = amount_total data['Amount Paid'] = amount_paid data['split'] = pl_split mandates_data[mandate.id] = data return instances_data, mandates_data def _split_amount(self, cr, uid, amount, rules, instance_id, context=None): """ ============================ _split_amount_by_power_level ============================ Return amount splitted by power level """ inst_split, pl_split = {}, {} rest = amount while True: instance = self.pool['int.instance'].browse(cr, uid, instance_id, context=context) power_level_id = instance.power_level_id.id power_level_name = instance.power_level_id.name if power_level_id in rules: percentage = rules[power_level_id] split_value = round((amount * percentage) / 100, 2) rest -= split_value pl_split[power_level_id] = dict(id=instance.id, name=instance.name, amount=split_value) inst_split[instance_id] = { 'Instance': instance.name, 'Amount': split_value, 'Power Level': power_level_name } if not instance.parent_id: break else: instance_id = instance.parent_id.id if rest > 0: pl_split['Unfractioned'] = rest inst_split['Unfractioned'] = { 'Instance': 'Unfractioned Amount', 'Amount': rest, 'Power Level': '' } return inst_split, pl_split def _extract_power_level_ids(self, cr, uid, mandates_data, context=None): """ ========================= _extract_power_level_ids ========================= Return list of power level ids needed for report header """ ids = [] for mandate_key in mandates_data: ids.extend([ key for key in mandates_data[mandate_key]['split'] if key != 'Unfractioned' ]) return list(set(ids)) def print_certificates(self, cr, uid, wizard, context=None): """ ======================= print_certificates ======================= Print certificates for valid selected mandates """ mandate_ids = self.mandate_selection_analysis(cr, uid, wizard.year, wizard.model, eval(wizard.mandate_ids), mode='ids', context=context) context['active_ids'] = mandate_ids secretariat_dict = {} need_signature = {} retro_amounts = {} for mandate in self.pool[wizard.model].read(cr, uid, mandate_ids, ['retro_instance_id'], context=context): instance_id = mandate['retro_instance_id'][0] default_instance_id = self.pool.get('int.instance').get_default( cr, uid, context=context) need_signature[mandate['id']] = instance_id == default_instance_id int_ass_pool = self.pool['int.assembly'] assembly_ids = int_ass_pool.search( cr, uid, [('instance_id', '=', instance_id), ('is_secretariat', '=', True)], context=context) if not assembly_ids: secretariat_name = False else: secretariat_name = int_ass_pool.read( cr, uid, [assembly_ids[0]], ['name'], context=context)[0]['name'] secretariat_dict[mandate['id']] = secretariat_name amount_total, amount_paid = self._get_mandate_retrocession_amounts( cr, uid, wizard.model, mandate['id'], wizard.year, context=context) retro_amounts[mandate['id']] = amount_paid \ if amount_paid <= amount_total else amount_total data = { 'model': wizard.model, 'year': wizard.year, 'secretariat': secretariat_dict, 'signature': need_signature, 'amounts': retro_amounts, } return self.pool['report'].get_action( cr, uid, [], 'mozaik_retrocession.report_payment_certificate', data=data, context=context) def print_fractionations(self, cr, uid, wizard, context=None): """ ======================= print_fractionations ======================= Generate xls file about fractionations for valid selected mandates """ if wizard.model == 'sta.mandate': assembly_model = 'sta.assembly' else: assembly_model = 'ext.assembly' mandate_ids = self.mandate_selection_analysis(cr, uid, wizard.year, wizard.model, eval(wizard.mandate_ids), mode='ids', context=context) context['active_ids'] = mandate_ids inst_data, mandates_data = self._get_fractionation_data( cr, uid, mandate_ids, wizard.model, assembly_model, wizard.year, context=context) power_level_ids = self._extract_power_level_ids(cr, uid, mandates_data, context=context) xls_wbk = xlwt.Workbook() xls_sheet = xls_wbk.add_sheet(_('Mandates')) style_string = "font: bold on; border: top thin, right thin,\ bottom thin, left thin;align: horiz center" headerstyle = xlwt.easyxf(style_string) report_header = FRACTIONATION_REPORTS_MANDATES_HEADER[:] pl_start_index = len(report_header) pl_index = {} index = 0 for power_level in self.pool['int.power.level'].read(cr, uid, power_level_ids, ['name'], context=context): report_header.append(power_level['name']) pl_index[power_level['id']] = pl_start_index + index index += 2 report_header.append('Unfractioned') pl_index['Unfractioned'] = pl_start_index + index step = 0 for icol, column in enumerate(report_header): if icol < pl_start_index or column == 'Unfractioned': icol += step xls_sheet.write(0, icol, column, headerstyle) else: # power level data need 2 columns icol += step next_col = icol + 1 xls_sheet.write_merge(0, 0, icol, next_col, column, headerstyle) step += 1 xls_sheet.set_panes_frozen(True) xls_sheet.set_horz_split_pos(1) xls_sheet.set_remove_splits(True) step = 0 for idata, key in enumerate(mandates_data): data = mandates_data[key] irow = idata + 1 for icol, column in enumerate(report_header): if icol < pl_start_index: xls_sheet.write(irow, icol, data.get(column, '')) else: break # write power level data for pl_id in data['split']: icol = pl_index[pl_id] value = data['split'][pl_id] if pl_id != 'Unfractioned': xls_sheet.write(irow, icol, value.get('name', '')) icol += 1 xls_sheet.write(irow, icol, value.get('amount', '')) else: xls_sheet.write(irow, icol, value) xls_sheet = xls_wbk.add_sheet(_('Instances')) report_header = FRACTIONATION_REPORTS_INSTANCE_HEADER[:] for icol, column in enumerate(report_header): xls_sheet.write(0, icol, column, headerstyle) xls_sheet.set_panes_frozen(True) xls_sheet.set_horz_split_pos(1) xls_sheet.set_remove_splits(True) irow = 1 for instance_id in inst_data: data = inst_data[instance_id] for icol, column in enumerate(report_header): xls_sheet.write(irow, icol, data[column]) irow += 1 file_data = StringIO() xls_wbk.save(file_data) out = base64.encodestring(file_data.getvalue()) filename = _("Fractionations") + '_' + wizard.year + '.xls' self.write(cr, uid, wizard.id, { 'data': out, 'export_filename': filename }, context=context) return { 'name': 'Fractionations Report', 'type': 'ir.actions.act_window', 'res_model': 'report.retrocession.wizard', 'view_mode': 'form', 'view_type': 'form', 'res_id': wizard.id, 'views': [(False, 'form')], 'target': 'new', }
class asset_extra(osv.osv): _name = "asset.extra" _columns = { 'name': fields.char('Asset Extra Comptable Name', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'code': fields.char('Reference', size=32, readonly=True, states={'draft':[('readonly',False)]}), 'asset_desc' : fields.char("Description", size=512, readonly=True, states={'draft':[('readonly',False)]}), 'purchase_value': fields.float('Purchase Value', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'currency_id': fields.many2one('res.currency','Currency',required=False, readonly=True, states={'draft':[('readonly',False)]}), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'note': fields.text('Note'), 'purchase_date': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'state': fields.selection([('draft','Draft'), ('open','Running'), ('approve', "Waiting Approval"), ('close','Close'), ], 'Status', required=True, copy=False, help="When an asset is created, the status is 'Draft'.\n" \ "If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \ "You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status."), 'active': fields.boolean('Active'), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True, states={'draft':[('readonly',False)]}), 'group_id': fields.many2one('asset.extra.group', 'Asset Group', change_default=True, readonly=True, states={'draft':[('readonly',False)]}), 'asset_class_id': fields.many2one('account.asset.class', 'Asset Class', change_default=True, readonly=True, states={'draft':[('readonly',False)]}), 'purchase_date1': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'image': fields.binary("Image", help="This field holds the image used as image for the Asset, limited to 1024x1024px."), 'asset_number': fields.char('Asset Number', required=True,copy=False, readonly=True, states={'draft':[('readonly',False)]}), 'location': fields.char('Location', readonly=True, states={'draft':[('readonly',False)]}), 'user_id': fields.many2one('res.users', 'Responsible ', readonly=True, states={'draft':[('readonly',False)]}), 'asset_location_id' : fields.many2one('asset.location','Location'), 'employee_id': fields.many2one("hr.employee", "PIC Asset", required=False, readonly=True, states={"draft": [("readonly", False)]}), 'department_id' : fields.many2one("hr.department", "Department"), 'distrik_id' : fields.many2one("hr.distrik", "Distrik"), 'active' : fields.boolean('Active'), 'log_asset_ids' : fields.one2many('asset.extra.log','asset_extra_id','Log'), 'close_reason': fields.text("To Close Reasons", required=False, states={"approve": [("required", True)]}), 'date_last_check' : fields.date('Last Check'), 'origin' : fields.char("Source Document", size=128), } _defaults = { 'purchase_date1': lambda obj, cr, uid, context: time.strftime('%Y-%m-%d'), 'asset_number': lambda obj, cr, uid, context: '/', 'user_id': lambda obj, cr, uid, context: uid, 'active' : True, 'state': 'draft', 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'asset.extra',context=context), } def onchange_company_id(self, cr, uid, ids, company_id=False, context=None): val = {} if company_id: company = self.pool.get('res.company').browse(cr, uid, company_id, context=context) if company.currency_id.company_id and company.currency_id.company_id.id != company_id: val['currency_id'] = False else: val['currency_id'] = company.currency_id.id return {'value': val} def onchange_employee_id (self, cr, uid, ids, employee_id): depart={} distrik={} for onchange in self.pool.get('hr.employee').browse(cr, uid, [employee_id], context=None): print '1111111' depart = onchange.department_id distrik = onchange.distrik_id return {'value':{ 'department_id': depart, 'distrik_id' : distrik, }} # def create(self, cr, uid, vals, context=None): # if context is None: # context = {} # if vals.get('asset_number', '/') == '/': # vals['asset_number'] = self.pool.get('ir.sequence').get(cr, uid, 'asset.extra') or '/' # new_id = super(asset_extra, self).create(cr, uid, vals, context) # return new_id def validate(self, cr, uid, ids, context=None): if context is None: context = {} return self.write(cr, uid, ids, { 'state':'open', 'asset_number':self.pool.get('ir.sequence').get(cr, uid, 'asset.extra') or '/' }, context) def set_to_close(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'approve'}, context=context) def appr_to_close(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'close'}, context=context) def set_to_draft(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
class stock_import_inventory(osv.osv_memory): _name = "stock.import.inventory" _description = "Import Inventory" def _default_location(self, cr, uid, ids, context=None): try: loc_model, location_id = self.pool.get("ir.model.data").get_object_reference( cr, uid, "stock", "stock_location_stock" ) except ValueError, e: return False return location_id or False _columns = { "location_id": fields.many2one("stock.location", "Location", required=True), "import_file": fields.binary("File", filters="*.xls"), # to consider the product current inventory or not, if yes then add the current inventory to the upload excel quantity as the quantity to do physical inventory "consider_inventory": fields.boolean("Consider Current Inventory", select=True), "all_done": fields.boolean("All Data Imported", readonly=True, select=True), "result_line": fields.one2many("stock.import.inventory.result", "import_id", "Importing Result", readonly=True), } _defaults = {"location_id": _default_location} def view_init(self, cr, uid, fields_list, context=None): """ Creates view dynamically and adding fields at runtime. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view with new columns.
class LibraryBook(osv.osv): _name = 'library.book.book' _description = 'Book Info' _log_access = True def _image_resize_image_medium(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): return_dict = dict() return_dict['image_medium'] = tools.image_resize_image_medium(obj.image, size=(275, 145)) result[obj.id] = return_dict return result def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result def _set_image(self, cr, uid, ids, name, value, args, context=None): return self.write(cr, uid, [ids], {'image': tools.image_resize_image_big(value)}, context=context) def _has_image(self, cr, uid, ids, name, args, context=None): result = {} for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = obj.image != False return result def _is_borrowable(self, cr, uid, ids, name, args, context=None): result = {} for id in ids: record_ids = self.get_current_borrow_record(cr, uid, id, context) if len(record_ids): result[id] = False else: result[id] = True return result def get_current_borrow_record(self, cr, uid, ids, context=None): ids = [ids] if isinstance(ids, int) else ids return self.pool.get('library.book.record').search(cr, uid, [('book_id', 'in', ids), ('is_returned', '=', False)]) _columns = { 'code': fields.char(size=64, string='Code', required=True), 'name': fields.char(size=256, string='Book Name', required=True), 'image': fields.binary("Image", help="This field holds the image used as avatar for this book, limited to 1024x1024px"), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'library.book.book': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'library.book.book': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }), 'has_image': fields.function(_has_image, type="boolean", string='Have Image'), 'category': fields.many2one('library.book.category', required=True, string='Category'), 'type': fields.many2one('library.book.type', required=True, string='Type'), 'author': fields.char(size=256, string='Book Author'), 'publisher': fields.char(size=256, string='Publisher'), 'price': fields.float(digits=(16, 2), string='Price'), 'purchase_date': fields.date(string='Purchase Date'), 'comment': fields.text(string='Comment'), 'state': fields.selection(string='State', selection=[('in_store', 'In Store'), ('borrowed', 'Borrowed'), ('scrap', 'Scrap'), ('lost', 'Lost')]), 'record_ids': fields.one2many('library.book.record', 'book_id', string='Records'), 'is_borrowable': fields.function(_is_borrowable, type='boolean', string='Is Borrowable'), } _defaults = { 'state': 'in_store', 'purchase_date': lambda *a: str(datetime.date.today()), } _sql_constraints = [ ('book_code_unique', 'unique (code)', 'The code of the book must be unique!') ] def mark_as_scrap(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'scrap'}, context=context) return True def mark_as_lost(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'lost'}, context=context) return True def sync_book_status(self, cr, uid, ids, recover=False, context=None): ids = [ids] if isinstance(ids, int) else ids for book in self.browse(cr, uid, ids, context=context): record_ids = self.get_current_borrow_record(cr, uid, book.id, context=context) if recover or book.state not in ['scrap', 'lost']: if len(record_ids): self.write(cr, uid, book.id, {'state': 'borrowed'}, context=context) else: self.write(cr, uid, book.id, {'state': 'in_store'}, context=context) else: return False return True def recover(self, cr, uid, ids, context=None): return self.sync_book_status(cr, uid, ids, recover=True, context=context)
class mail_group(osv.Model): """ A mail_group is a collection of users sharing messages in a discussion group. The group mechanics are based on the followers. """ _description = 'Discussion group' _name = 'mail.group' _mail_flat_thread = False _inherit = ['mail.thread'] _inherits = {'mail.alias': 'alias_id'} def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) _columns = { 'name': fields.char('Name', required=True, translate=True), 'description': fields.text('Description'), 'menu_id': fields.many2one('ir.ui.menu', string='Related Menu', required=True, ondelete="cascade"), 'public': fields.selection([('public', 'Public'), ('private', 'Private'), ('groups', 'Selected Group Only')], 'Privacy', required=True, help='This group is visible by non members. \ Invisible groups can add members through the invite button.' ), 'group_public_id': fields.many2one('res.groups', string='Authorized Group'), 'group_ids': fields.many2many('res.groups', rel='mail_group_res_group_rel', id1='mail_group_id', id2='groups_id', string='Auto Subscription', help="Members of those groups will automatically added as followers. "\ "Note that they will be able to manage their subscription manually "\ "if necessary."), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Photo", help="This field holds the image used as photo for the group, limited to 1024x1024px."), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized photo", type="binary", multi="_get_image", store={ 'mail.group': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized photo of the group. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized photo", type="binary", multi="_get_image", store={ 'mail.group': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized photo of the group. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True, help="The email address associated with this group. New emails received will automatically " "create new topics."), } def _get_default_employee_group(self, cr, uid, context=None): ref = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'base', 'group_user') return ref and ref[1] or False def _get_default_image(self, cr, uid, context=None): image_path = openerp.modules.get_module_resource( 'mail', 'static/src/img', 'groupdefault.png') return tools.image_resize_image_big( open(image_path, 'rb').read().encode('base64')) _defaults = { 'public': 'groups', 'group_public_id': _get_default_employee_group, 'image': _get_default_image, } def _generate_header_description(self, cr, uid, group, context=None): header = '' if group.description: header = '%s' % group.description if group.alias_id and group.alias_name and group.alias_domain: if header: header = '%s<br/>' % header return '%sGroup email gateway: %s@%s' % (header, group.alias_name, group.alias_domain) return header def _subscribe_users(self, cr, uid, ids, context=None): for mail_group in self.browse(cr, uid, ids, context=context): partner_ids = [] for group in mail_group.group_ids: partner_ids += [user.partner_id.id for user in group.users] self.message_subscribe(cr, uid, ids, partner_ids, context=context) def create(self, cr, uid, vals, context=None): if context is None: context = {} # get parent menu menu_parent = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'mail', 'mail_group_root') menu_parent = menu_parent and menu_parent[1] or False # Create menu id mobj = self.pool.get('ir.ui.menu') menu_id = mobj.create(cr, SUPERUSER_ID, { 'name': vals['name'], 'parent_id': menu_parent }, context=context) vals['menu_id'] = menu_id # Create group and alias create_context = dict(context, alias_model_name=self._name, alias_parent_model_name=self._name, mail_create_nolog=True) mail_group_id = super(mail_group, self).create(cr, uid, vals, context=create_context) group = self.browse(cr, uid, mail_group_id, context=context) self.pool.get('mail.alias').write( cr, uid, [group.alias_id.id], { "alias_force_thread_id": mail_group_id, 'alias_parent_thread_id': mail_group_id }, context) group = self.browse(cr, uid, mail_group_id, context=context) # Create client action for this group and link the menu to it ref = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'mail', 'action_mail_group_feeds') if ref: search_ref = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'mail', 'view_message_search') params = { 'search_view_id': search_ref and search_ref[1] or False, 'domain': [ ('model', '=', 'mail.group'), ('res_id', '=', mail_group_id), ], 'context': { 'default_model': 'mail.group', 'default_res_id': mail_group_id, }, 'res_model': 'mail.message', 'thread_level': 1, 'header_description': self._generate_header_description(cr, uid, group, context=context), 'view_mailbox': True, 'compose_placeholder': 'Send a message to the group', } cobj = self.pool.get('ir.actions.client') newref = cobj.copy(cr, SUPERUSER_ID, ref[1], default={ 'params': str(params), 'name': vals['name'] }, context=context) mobj.write(cr, SUPERUSER_ID, menu_id, { 'action': 'ir.actions.client,' + str(newref), 'mail_group_id': mail_group_id }, context=context) if vals.get('group_ids'): self._subscribe_users(cr, uid, [mail_group_id], context=context) return mail_group_id def unlink(self, cr, uid, ids, context=None): groups = self.browse(cr, uid, ids, context=context) # Cascade-delete mail aliases as well, as they should not exist without the mail group. mail_alias = self.pool.get('mail.alias') alias_ids = [group.alias_id.id for group in groups if group.alias_id] # Delete mail_group res = super(mail_group, self).unlink(cr, uid, ids, context=context) # Delete alias mail_alias.unlink(cr, SUPERUSER_ID, alias_ids, context=context) # Cascade-delete menu entries as well self.pool.get('ir.ui.menu').unlink( cr, SUPERUSER_ID, [group.menu_id.id for group in groups if group.menu_id], context=context) return res def write(self, cr, uid, ids, vals, context=None): result = super(mail_group, self).write(cr, uid, ids, vals, context=context) if vals.get('group_ids'): self._subscribe_users(cr, uid, ids, context=context) # if description, name or alias is changed: update client action if vals.get('description') or vals.get('name') or vals.get( 'alias_id') or vals.get('alias_name'): cobj = self.pool.get('ir.actions.client') for action in [ group.menu_id.action for group in self.browse(cr, uid, ids, context=context) ]: new_params = action.params new_params[ 'header_description'] = self._generate_header_description( cr, uid, group, context=context) cobj.write(cr, SUPERUSER_ID, [action.id], {'params': str(new_params)}, context=context) # if name is changed: update menu if vals.get('name'): mobj = self.pool.get('ir.ui.menu') mobj.write( cr, SUPERUSER_ID, [ group.menu_id.id for group in self.browse(cr, uid, ids, context=context) ], {'name': vals.get('name')}, context=context) return result def action_follow(self, cr, uid, ids, context=None): """ Wrapper because message_subscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_subscribe_users(cr, uid, ids, context=context) def action_unfollow(self, cr, uid, ids, context=None): """ Wrapper because message_unsubscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_unsubscribe_users(cr, uid, ids, context=context) def get_suggested_thread(self, cr, uid, removed_suggested_threads=None, context=None): """Show the suggestion of groups if display_groups_suggestions if the user perference allows it.""" user = self.pool.get('res.users').browse(cr, uid, uid, context) if not user.display_groups_suggestions: return [] else: return super(mail_group, self).get_suggested_thread(cr, uid, removed_suggested_threads, context) def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None): res = super(mail_group, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context) group = self.browse(cr, uid, id, context=context) try: headers = eval(res.get('headers', '{}')) except Exception: headers = {} headers['Precedence'] = 'list' if group.alias_domain and group.alias_name: headers['List-Id'] = '%s.%s' % (group.alias_name, group.alias_domain) headers['List-Post'] = '<mailto:%s@%s>' % (group.alias_name, group.alias_domain) res['headers'] = '%s' % headers return res
class LibraryBookWizard(osv.osv_memory): _name = 'library.book.book.wizard' _inherit = 'library.book.book' _description = 'Book Info Wizard' def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result def _set_image(self, cr, uid, ids, name, value, args, context=None): return self.write(cr, uid, [ids], {'image': tools.image_resize_image_big(value)}, context=context) def _has_image(self, cr, uid, ids, name, args, context=None): result = {} for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = obj.image != False return result _columns = { 'image': fields.binary("Image", help="This field holds the image used as avatar for this book, limited to 1024x1024px"), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'library.book.book.wizard': ( lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'library.book.book.wizard': ( lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }), 'has_image': fields.function(_has_image, type="boolean", string='Have Image'), } def default_get(self, cr, uid, fields, context=None): """ This function gets default values """ res = super(LibraryBookWizard, self).default_get(cr, uid, fields, context=context) if context is None: context = {} record_id = context and context.get('active_id', False) or False if not record_id: return res elif context['to_bought']: wish_book = self.pool.get('library.book.wish').browse(cr, uid, record_id, context=context) if 'name' in fields: res['name'] = wish_book.name if 'author' in fields: res['author'] = wish_book.author if 'publisher' in fields: res['publisher'] = wish_book.publisher if 'price' in fields: res['price'] = wish_book.price if 'comment' in fields: res['comment'] = wish_book.comment res['image'] = wish_book.image return res else: return res def save(self, cr, uid, ids, context): self_record = self.browse(cr, uid, ids[0], context) book_obj = self.pool.get('library.book.book') image = self_record.image values = { 'code': self_record.code, 'name': self_record.name, 'category': self_record.category.id if self_record.category else None, 'type': self_record.type.id if self_record.type else None, 'author': self_record.author, 'publisher': self_record.publisher, 'price': self_record.price, 'purchase_date': self_record.purchase_date, 'comment': self_record.comment, 'image': self_record.image, } book_id = book_obj.create(cr, uid, values, context=context) self.pool.get('library.book.wish').write(cr, uid, context['active_id'], {'state': 'bought', 'link_book_id': book_id}, context=context) return True
class res_partner(osv.Model): """Inherited res_partner""" # The line above is the Python’s way to document # your objects (like classes) _inherit = 'res.partner' # Se penso utilizar un field function pero entra en conflicto con año def _bithday_date(self, cr, uid, ids, field, arg, context=None): result = {} for res_partner in self.browse(cr, uid, ids, context=context): vat_split = '' if res_partner.is_company: vat_split = res_partner.vat_split[3:] else: vat_split = res_partner.vat_split[4:] final = vat_split[0:2] + '-' + vat_split[2:4] + '-' + vat_split[4:6] birth_date = datetime.strptime(final, "%Y-%m-%d") result[res_partner.id] = birth_date return result def _days(self, cursor, user_id, context=None): return (('1', _('Monday')), ('2', _('Tuesday')), ('3', _('Wednesday')), ('4', _('Thursday')), ('5', _('Friday')), ('6', _('Saturday'))) _columns = { 'business_name': fields.char(string="Business Name", size=256), 'commercial_email': fields.char(string="Commercial Email", size=256), 'curp': fields.char(string="CURP", size=18), 'course_business': fields.char(string="Course of Business", size=256), 'business_manager': fields.char(string="Business Manager", size=256), 'purchasing_manager': fields.char(string="Purchasing Manager", size=256), 'payment_responsible': fields.char(string="Responsible for Payments", size=256), 'photo_business': fields.binary(string="Photo Business"), 'photo_IFE': fields.binary(string="Photo IFE"), 'photo_authorized_signature': fields.binary(string="Photo Authorized Signature"), #'birthday_date' : fields.function(_bithday_date, type='date', string="Birthday Date"), 'birthday_date': fields.date(type='date', string="Birthday Date"), 'aniversary_date': fields.date(type='date', string="Aniversary Date"), 'geophysics_address': fields.char(string="Geophysics Address", size=256), 'review_morning_hours': fields.char(string="review in the morning hours", size=5), 'review_afternoon_hours': fields.char(string="review in the afternoon hours", size=5), 'payment_morning_hours': fields.char(string="payment in the morning hours", size=5), 'payment_afternoon_hours': fields.char(string="payment in the afternoon hours", size=5), 'review_date': fields.selection(_days, 'Dias'), 'payment_date': fields.selection(_days, 'Dias'), 'clasificacion': fields.many2one('clasificacion', ondelete='set null', string="Clasificacion"), }
class complete_sales(orm.TransientModel): _name = 'complete.sales' _description = 'Complete Sales Form' _defaults = { 'report_type' : 'invoice', #'company_id' : '1', 'date_rep' : lambda *a: datetime.date.today().strftime('%Y-%m-%d'), 'date_ini' : lambda *a: datetime.date.today().strftime('%Y-%m-%d'), 'date_fin' : lambda *a: datetime.date.today().strftime('%Y-%m-%d'), } _columns = { 'report_type' : fields.selection([('invoice', 'Invoice'), ('product', 'Product')], 'Report Type', required=True), 'company_id' : fields.many2one('res.company', 'Company', required=True), 'date_rep' : fields.date('Close date', required=True), 'date_ini' : fields.date('Initial date', required=True), 'date_fin' : fields.date('Finish date', required=True), 'filename' : fields.char('File Name', size=255, readonly=True), 'data' : fields.binary('File', readonly=True), } def default_get(self, cr, uid, fields, context=None): res = super(complete_sales, self).default_get(cr, uid, fields, context=context) res['company_id'] = 1 return res def eSg(x, cadena): nueva_cadena = '' if cadena > '': for l in cadena: x = l if x == u'Ñ': x = 'N' elif x == u'ñ': x = 'ñ' elif x == u'Á': x = 'A' elif x == u'É': x = 'E' elif x == u'Í': x = 'I' elif x == u'Ó': x = 'O' elif x == u'Ú': x = 'U' elif x == u'Ü': x = 'U' elif x == u'á': x = 'a' elif x == u'é': x = 'e' elif x == u'í': x = 'i' elif x == u'ó': x = 'o' elif x == u'ú': x = 'u' nueva_cadena = nueva_cadena + x if nueva_cadena == '': nueva_cadena = str(cadena) return nueva_cadena def execute_query(self, cr, uid, ids, context=None): # Crea objeto complete sales obj_sales = self.pool['complete.sales'].browse(cr, uid, ids[0], context=context) if not obj_sales: raise osv.except_osv(_("Sales"), _("You can't generate csv file, because this document don't have sales.")) report = obj_sales.report_type comp_id = obj_sales.company_id.id date_ini = obj_sales.date_ini date_fin = obj_sales.date_fin date_cls = obj_sales.date_rep #-- Encabezado de Ventas Integrales por Factura strquery_1 = ( "SELECT * FROM ( " "SELECT DISTINCT rp2.name as Vendedor, ccs.name Zona, ai.move_id, ai.partner_id, rp.name Cliente, rp.vat_split RFC, aj.name Sucursal, " "ai.number Factura, ai.type TipoDocto, ai.state Estatus, ai.date_invoice FechaFactura, ai.date_due Vencimiento, " "ai.amount_tax / CASE ai.amount_untaxed WHEN 0 THEN 1 ELSE ai.amount_untaxed END TasaImpuestos, ai.amount_untaxed Subtotal, " "ai.amount_tax Impuestos, ai.amount_total Total, ai.residual SaldoActual, payment.last_rec_date DiaPago, " "ai.amount_total - fn_get_payment_acum(ai.move_id, payment.date_payment) SaldoCorte, get_categ_firstlevel(pt.categ_id) Familia " "FROM account_invoice ai INNER JOIN res_partner rp ON (ai.partner_id = rp.id) INNER JOIN account_invoice_line ail ON (ail.invoice_id = ai.id) " "INNER JOIN product_template pt ON (ail.product_id = pt.id) INNER JOIN product_category pc ON (pt.categ_id = pc.id) " "INNER JOIN res_users ru ON (ai.user_id = ru.id) INNER JOIN res_partner rp2 ON (ru.partner_id = rp2.id) " "LEFT JOIN crm_case_section ccs ON (ai.section_id = ccs.id) INNER JOIN account_journal aj ON (ai.journal_id = aj.id)" "LEFT JOIN fn_get_payment(ai.move_id, '" + date_cls + "') Payment ON (ai.move_id = payment.move_id) " "WHERE ai.date_invoice BETWEEN '" + date_ini + "' AND '" + date_fin + "' AND ai.type IN ('out_invoice') AND ai.state IN ('open','paid') " "UNION " "SELECT DISTINCT rp2.name as Vendedor, ccs.name Zona, ai.move_id, ai.partner_id, rp.name Cliente, rp.vat_split RFC, aj.name Sucursal, " "ai.number Factura, ai.type TipoDocto, ai.state Estatus, ai.date_invoice FechaFactura, ai.date_due Vencimiento, " "-ai.amount_tax / CASE ai.amount_untaxed WHEN 0 THEN 1 ELSE ai.amount_untaxed END TasaImpuestos, -ai.amount_untaxed Subtotal, " "-ai.amount_tax Impuestos, -ai.amount_total Total, -ai.residual SaldoActual, payment.last_rec_date DiaPago, " "-(ai.amount_total - fn_get_payment_acum(ai.move_id, payment.date_payment)) SaldoCorte, get_categ_firstlevel(pt.categ_id) Familia " "FROM account_invoice ai INNER JOIN res_partner rp ON (ai.partner_id = rp.id) INNER JOIN account_invoice_line ail ON (ail.invoice_id = ai.id) " "INNER JOIN product_template pt ON (ail.product_id = pt.id) INNER JOIN product_category pc ON (pt.categ_id = pc.id) " "INNER JOIN res_users ru ON (ai.user_id = ru.id) INNER JOIN res_partner rp2 ON (ru.partner_id = rp2.id) " "LEFT JOIN crm_case_section ccs ON (ai.section_id = ccs.id) INNER JOIN account_journal aj ON (ai.journal_id = aj.id) " "LEFT JOIN fn_get_payment(ai.move_id, '" + date_cls + "') Payment ON (ai.move_id = payment.move_id) " "WHERE ai.date_invoice BETWEEN '" + date_ini + "' AND '" + date_fin + "' AND ai.type IN ('out_refund') AND ai.state IN ('open','paid')) tbl " "ORDER BY Cliente, FechaFactura, DiaPago" ) #--va el detallado de Ventas Integrales por Producto strquery_2 = ( "SELECT * FROM (" "SELECT rp2.name as Vendedor, ccs.name Zona, ai.move_id, ai.partner_id, rp.name Cliente, rp.vat_split RFC, aj.name Sucursal, ai.number Factura, " "ai.type TipoDocto, ai.state Estatus, ai.date_invoice FechaFactura, ai.date_due Vencimiento, get_categ_firstlevel(pt.categ_id) Familia, " "pt.name NombreProducto, get_categ_secondlevel(pt.categ_id,1) Linea, pc.name Proveedor, ail.quantity Cantidad, " "ail.price_subtotal Venta, 0 Devolucion, 0 Bonificacion " "FROM account_invoice ai INNER JOIN res_partner rp ON (ai.partner_id = rp.id) INNER JOIN account_invoice_line ail ON (ail.invoice_id = ai.id) " "INNER JOIN product_template pt ON (ail.product_id = pt.id) INNER JOIN product_category pc ON (pt.categ_id = pc.id) " "INNER JOIN res_users ru ON (ai.user_id = ru.id) INNER JOIN res_partner rp2 ON (ru.partner_id = rp2.id) " "LEFT JOIN crm_case_section ccs ON (ai.section_id = ccs.id) " "INNER JOIN account_journal aj ON (ai.journal_id = aj.id) " "LEFT JOIN fn_get_payment(ai.move_id, '" + date_cls + "') Payment ON (ai.move_id = payment.move_id) " "WHERE ai.date_invoice BETWEEN '" + date_ini + "' AND '" + date_fin + "' AND ai.type IN ('out_invoice') AND ai.state IN ('open','paid') " "UNION " "SELECT rp2.name as Vendedor, ccs.name Zona, ai.move_id, ai.partner_id, rp.name Cliente, rp.vat_split RFC, aj.name Sucursal, ai.number Factura, " "ai.type TipoDocto, ai.state Estatus, ai.date_invoice FechaFactura, ai.date_due Vencimiento, get_categ_firstlevel(pt.categ_id) Familia, " "pt.name NombreProducto, get_categ_secondlevel(pt.categ_id,1) Linea, pc.name Proveedor, CASE WHEN ai.origin IS NULL THEN 0 ELSE -ail.quantity END Cantidad, " "0 Venta, CASE WHEN ai.origin IS NOT NULL THEN -ail.price_subtotal ELSE 0 END Devolucion, CASE WHEN ai.origin IS NULL THEN -ail.price_subtotal ELSE 0 END Bonificacion " "FROM account_invoice ai INNER JOIN res_partner rp ON (ai.partner_id = rp.id) INNER JOIN account_invoice_line ail ON (ail.invoice_id = ai.id) " "INNER JOIN product_template pt ON (ail.product_id = pt.id) INNER JOIN product_category pc ON (pt.categ_id = pc.id) " "INNER JOIN res_users ru ON (ai.user_id = ru.id) INNER JOIN res_partner rp2 ON (ru.partner_id = rp2.id) " "LEFT JOIN crm_case_section ccs ON (ai.section_id = ccs.id) INNER JOIN account_journal aj ON (ai.journal_id = aj.id) " "LEFT JOIN fn_get_payment(ai.move_id, '" + date_cls + "') Payment ON (ai.move_id = payment.move_id) " "WHERE ai.date_invoice BETWEEN '" + date_ini + "' AND '" + date_fin + "' AND ai.type IN ('out_refund') AND ai.state IN ('open','paid')) tbl " "ORDER BY Cliente, FechaFactura" ) strquery = '' output = '' result = {} encabezado = '' if report == 'invoice': strquery = strquery_1 encabezado = "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" % ( _('vendedor'), _('zona'),_('move_id'),_('partner_id'),_('cliente'),_('rfc,sucursal'),_('factura'),_('tipodocto'),_('estatus'),_('fechafactura'),_('vencimiento'),_('tasaimpuestos'),_('subtotal'),_('impuestos'),_('total'),_('saldoactual'),_('diapago'),_('saldocorte'),_('familia')) else: strquery = strquery_2 encabezado = "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s" % (_('vendedor'),_('zona'),_('move_id'),_('partner_id'),_('cliente'),_('rfc'),_('sucursal'),_('factura'),_('tipodocto'),_('estatus'),_('fechafactura'),_('vencimiento'),_('familia'),_('nombreproducto'),_('linea'),_('proveedor'),_('cantidad'),_('venta'),_('devolucion'),_('bonificacion')) cr.execute(strquery) registros = cr.fetchall() filename = '' if registros: #asign name of file #filepath = "%s" % ('/home/jorge-medina/pyerp7/parts/extras/dme/complete_sales/data/') filepath = '%s/%s/' % (os.path.dirname(os.path.abspath('')),'local_modules/complete_sales/data') filename = "%s.%s" % ('reporte_' + report + '_' + date_cls, 'csv') filefull = "%s" % (filepath + filename) #Header output = encabezado output += "\n" #Abre archivo de salida CSV csvsalida = open(filefull, 'w') #iterate move lines for move in registros: # Genera la linea de salida #line = "%s, %s, ..." % (move[0].decode('UTF-8'), move[1].decode('UTF-8'), ...) line = '' x=0 while x<20: dato = self.eSg(move[x]) if move[x] != '': dato = '"' + dato + '"' line += dato else: line += ',' if x<=18: line += ',' x = x + 1 line = self.eSg(line) csvsalida.write(line + '\n') output += line + '\n' csvsalida.close() #encode base64 out = base64.encodestring(output) #save in data base self.write(cr, uid, ids, {'data':out, 'filename':filename}, context=context) dummy, view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'complete_sales', 'complete_sales_view_data') return {'name': _("Complete Sales"), 'res_model':"complete.sales", 'src_model':"complete.sales", 'view_mode':"form", 'target':"new", 'key2':"client_action_multi", 'multi':"True", 'res_id':ids[0], 'view_id':view_id, 'type': 'ir.actions.act_window', } else: return self.pool.get('warning').info(cr, uid, title='Reports', message=("there is no information with parameters provided."))
class account_voucher(osv.osv): _inherit = 'account.voucher' _columns = { 'import_file': fields.binary('Import File (*.csv)', readonly=True, states={'draft': [('readonly', False)]}), 'import_amount': fields.float('Import Amount', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft': [('readonly', False)]}), 'mismatch': fields.integer('Import Mismatched', readonly=True, states={'draft': [('readonly', False)]}), 'mismatch_list': fields.char('Mismatch List', readonly=True, states={'draft': [('readonly', False)]}), } def create(self, cr, uid, vals, context=None): if vals.get('import_amount', False): vals.update({'amount': vals['import_amount']}) if vals.get('mismatch', False): vals.update({'mismatch': vals['mismatch']}) if vals.get('mismatch_list', False): vals.update({'mismatch_list': vals['mismatch_list']}) return super(account_voucher, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): if vals.get('import_amount', False): vals.update({'amount': vals['import_amount']}) if vals.get('mismatch', False): vals.update({'mismatch': vals['mismatch']}) if vals.get('mismatch_list', False): vals.update({'mismatch_list': vals['mismatch_list']}) return super(account_voucher, self).write(cr, uid, ids, vals, context=context) def onchange_import_file(self, cr, uid, ids, import_file, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None): # Prepare Import FIle Data if not import_file: res = { 'value': { 'import_amount': False, 'mismatch': False, 'mismatch_list': False } } return res # Read file file_list = base64.decodestring(import_file).split('\n') payment_lines = {} amount = 0.0 for line in file_list: if line != '': data = line.split(',') payment_lines.update({data[0]: float(data[1])}) amount += float(data[1]) if context is None: context = {} ctx = context.copy() ctx.update({'date': date}) #read the voucher rate with the right date in the context currency_id = currency_id or self.pool.get('res.company').browse( cr, uid, company_id, context=ctx).currency_id.id voucher_rate = self.pool.get('res.currency').read(cr, uid, currency_id, ['rate'], context=ctx)['rate'] ctx.update({ 'voucher_special_currency': payment_rate_currency_id, 'voucher_special_currency_rate': rate * voucher_rate }) res = self.recompute_voucher_lines_csv(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, payment_lines, context=ctx) vals = self.onchange_rate(cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx) for key in vals.keys(): res[key].update(vals[key]) vals = {'value': {'import_amount': amount}} for key in vals.keys(): res[key].update(vals[key]) return res # The original recompute_voucher_lines() do not aware of withholding and csv import file. # Here we will re-adjust it. As such, the amount allocation will be reduced and carry to the next lines. def recompute_voucher_lines_csv(self, cr, uid, ids, partner_id, journal_id, price, currency_id, ttype, date, payment_lines, context=None): res = super(account_voucher, self).recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, price, currency_id, ttype, date, context=context) line_cr_ids = res['value']['line_cr_ids'] line_dr_ids = res['value']['line_dr_ids'] lines = line_cr_ids + line_dr_ids # # # This part simply calculate the advance_and_discount variable # move_line_obj = self.pool.get('account.move.line') # advance_and_discount = {} # for line in lines: # move_line = move_line_obj.browse(cr, uid, line['move_line_id']) # invoice = move_line.invoice # if invoice: # adv_disc_param = self.pool.get('account.voucher.line').get_adv_disc_param(cr, uid, invoice) # # Add to dict # advance_and_discount.update({invoice.id: adv_disc_param}) # # End # # Match payment_lines with lines's move_line_id move_line_obj = self.pool.get('account.move.line') payment_lines, mismatch, mismatch_list = self.matched_payment_lines( payment_lines, lines) for line in lines: amount, amount_wht = 0.0, 0.0 if line['move_line_id'] in payment_lines: # Amount to reconcile, always positive value -> make abs(..) amount_alloc = abs(payment_lines[line['move_line_id']]) or 0.0 # ** Only if amount_alloc > 0, Calculate withholding amount ** if amount_alloc: adv_disc_param = {} move_line = move_line_obj.browse(cr, uid, line['move_line_id']) invoice = move_line.invoice if invoice: adv_disc_param = self.pool.get( 'account.voucher.line').get_adv_disc_param( cr, uid, invoice) # Test to get full wht first original_amount, original_wht_amt = self.pool.get( 'account.voucher.line')._get_amount_wht( cr, uid, partner_id, line['move_line_id'], line['amount_original'], line['amount_original'], adv_disc_param, context=context) amount, amount_wht = self._get_amount_wht_ex( cr, uid, partner_id, line['move_line_id'], line['amount_original'], original_wht_amt, amount_alloc, adv_disc_param, context=context) # Adjust remaining line['amount'] = amount + amount_wht line['amount_wht'] = -amount_wht line['reconcile'] = line['amount'] == line['amount_unreconciled'] vals = { 'value': { 'mismatch': mismatch, 'mismatch_list': mismatch_list } } for key in vals.keys(): res[key].update(vals[key]) return res def matched_payment_lines(self, payment_lines, move_lines): new_payment_lines = {} mismatch = 0 mismatch_list = False for key in payment_lines.keys(): matched = False for move_line in move_lines: if key == move_line['name'] or key == move_line['reference']: new_payment_lines.update( {move_line['move_line_id']: payment_lines[key]}) matched = True break if not matched: if not mismatch_list: mismatch_list = key else: mismatch_list = ('%s,%s') % (mismatch_list, key) mismatch += 1 return new_payment_lines, mismatch, mismatch_list
class crm_lead(format_address, osv.osv): """ CRM Lead Case """ _inherit = "crm.lead" _rec_name = 'lead_full_name' _defaults = { 'sequence': lambda self,cr,uid,context={}: self.pool.get('ir.sequence').get(cr, uid, 'crm.demo') + ' / ' + datetime.strptime(str(date.today()), "%Y-%m-%d").strftime("%B, %Y") , } def create(self, cr, uid, vals, context=None): print '\n create of crm lead',vals,context return super(crm_lead, self).create(cr, uid, vals, context=context) def onchange_cust_state_2(self, cr, uid, ids, state_id, context=None): if state_id: country_id=self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id return {'value': {'cust_country_id_2': country_id}} return {} def convert_lead_to_dealer(self, cr, uid, ids, context): result = {} print '\n convert_lead_to_dealer',ids,context if ids: lead_obj = self.browse(cr, uid, ids, context=context) print '\n cont to dealer',lead_obj values = { 'name' : lead_obj.name, 'last_name' : lead_obj.last_name, 'comp_title' : lead_obj.partner_title.id, 'marital_status' : lead_obj.marital_status, 'personal_no' : lead_obj.personal_no, 'street' : lead_obj.street, 'street2' : lead_obj.street2, 'city' : lead_obj.city, 'state_id' : lead_obj.state_id.id, 'zip' : lead_obj.zip, 'country_id' : lead_obj.country_id.id, 'home_phone' : lead_obj.phone, 'home_phone2' : lead_obj.home_phone, 'work_phone' : lead_obj.work_phone, 'fname' : lead_obj.partner_first_name, 'lname' : lead_obj.partner_last_name, 'address_same_yes' : lead_obj.address_same_yes, 'street_2' : lead_obj.street_2, 'street2_2' : lead_obj.street2_2, 'city_2' : lead_obj.city_2, 'state_id_2' : lead_obj.state_id_2.id, 'zip_2' : lead_obj.zip_2, 'country_id_2' : lead_obj.country_id_2.id, 'partner_work_phone' : lead_obj.partner_work_phone, 'partner_home_phone' : lead_obj.partner_home_phone, 'partner_cell_phone' : lead_obj.partner_cell_phone, 'partner_email' : lead_obj.partner_email, 'allergies_yes' : lead_obj.allergies_yes, 'pets_yes' : lead_obj.pets_yes, 'children' : lead_obj.children, 'child_1' : lead_obj.child_1, 'child_2' : lead_obj.child_2, 'child_3' : lead_obj.child_3, 'child_4' : lead_obj.child_4, } dealer_id = self.pool.get('res.dealer').create(cr, uid, values) print '\n lead to dealer',dealer_id return result def onchange_state_id_2(self, cr, uid, ids, state_id, context=None): if state_id: country_id=self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id return {'value': {'country_id_2': country_id}} return {} def onchange_stage_id(self, cr, uid, ids, stage_id, context=None): if not stage_id: return {'value': {}} stage = self.pool.get('crm.case.stage').browse(cr, uid, stage_id, context=context) if not stage.on_change: return {'value': {}} vals = {'probability': stage.probability} if stage.name == 'Appointment Set': vals['apt_attrs'] = True else: vals['apt_attrs'] = False if stage.probability >= 100 or (stage.probability == 0 and stage.sequence > 1): vals['date_closed'] = fields.datetime.now() return {'value': vals} def _get_stage_id(self, cr, uid, ids, field_name, arg, context=None): res = {} if ids: for lead in self.browse(cr, uid, ids): res[lead.id] = lead.stage_id.name print '\n get stage id',res return res def _select_objects(self, cr, uid, context=None): model_pool = self.pool.get('ir.model') ids = model_pool.search(cr, uid, [], limit=None) res = model_pool.read(cr, uid, ids, ['model', 'name']) return [(r['model'], r['name']) for r in res] + [('', '')] def _combine(self, cr, uid, ids, field_name, args, context=None): values = {} print '\n full name of lead function',ids,field_name,context,args if (context and context.get('default_type') == 'lead') or (context and context.get('stage_type') == 'lead'): for id in ids: rec = self.browse(cr, uid, [id], context=context)[0] values[id] = {} values[id] = '%s %s' % (rec.name, rec.last_name) return values elif (context and context.get('default_type') == 'opportunity') or (context and context.get('stage_type') == 'opportunity'): for id in ids: rec = self.browse(cr, uid, [id], context=context)[0] values[id] = {} values[id] = '%s %s' % (rec.name, rec.last_name) return values def _appointment_records_of_lead(self, cr, uid, ids, field_name, arg, context=None): """Return the list of appointment for this lead and his child leads.""" res = {} Calendar_event_pool = self.pool.get('calendar.event') lead_ids = [] for lead in self.browse(cr, uid, ids, context=context): lead_ref = 'crm.lead'+','+str(lead.id) lead_ids = self.search(cr, uid, [('referred_by', '=', lead_ref)]) lead_ids.append(lead.id) appointment_ids = Calendar_event_pool.search(cr, uid, [('lead_id', 'in', lead_ids)]) res[lead.id] = appointment_ids return res def _lead_records_of_lead(self, cr, uid, ids, field_name, arg, context=None): """Return the list of leads referred by this lead.""" res = {} lead_ids = [] for lead in self.browse(cr, uid, ids, context=context): if lead.referred_by and str(lead.referred_by).split('(')[0] == 'crm.lead': lead_ids.append(lead.referred_by.id) res[lead.id] = lead_ids return res def _check_country_code(self, cr, uid, ids, context=None): print '\n _check_country_code',ids,context return True _constraints = [ (_check_country_code , 'Error! Wrong format for country code.', ['home_phone_code', 'phone_code']) ] _columns = { 'priority': fields.selection(AVAILABLE_PRIORITIES, 'Priority', select=True), 'name': fields.char('Name', select=1, readonly=False), 'sequence' : fields.char('Name', select=1, readonly=True), 'last_name': fields.char('Name', select=1, readonly=False, required=True), 'lead_full_name' : fields.function(_combine, string="Name", type='char', method=True, arg=('name', 'last_name'), store={ 'crm.lead': (lambda self, cr, uid, ids, c={}: ids, ['name', 'last_name'], 10), },), 'partner_title': fields.many2one('res.partner.title', 'Title'), 'company_owner': fields.boolean('Company Owner?'), 'company_name': fields.many2one('res.company', 'Company'), 'customer_code' : fields.char('Existing Customer Code'), 'home_phone_code' : fields.char('Code'), 'phone_code' : fields.char('Code'), 'mobile_code' : fields.char('Code'), 'home_phone' : fields.char('Home Phone (2)'), 'mobile_phone_code' : fields.char('Code'), 'mobile_phone' : fields.char('Mobile Phone (2)'), 'work_phone_code' : fields.char('Code'), 'work_phone' : fields.char('Work Phone'), 'email_2' : fields.char('Email (2)'), 'place_emp': fields.char('Place of Employment'), 'job_title': fields.char('Job Title'), 'marital_status': fields.selection( [('single', 'Single'), ('engaged', 'Engaged'), ('married', 'Married'), ('partners', 'Partners'), ('couple', 'Couple'), ('widower', 'Widower'), ('divorced', 'Divorced')], string='Marital Status', copy=False), 'personal_no_code' : fields.char('Code'), 'personal_no' : fields.char('Personal No'), 'cust_street_2': fields.char('Street'), 'cust_street2_2': fields.char('Street2'), 'cust_city_2': fields.char('City'), 'cust_country_id_2': fields.many2one('res.country', 'Country', ondelete='restrict'), 'cust_state_id_2': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'cust_zip_2': fields.char('Zip', size=24, change_default=True), 'image': fields.binary("Image", help="This field holds the image used as avatar for this contact, limited to 1024x1024px"), 'source_id' : fields.many2one('crm.tracking.source', string="Lead Source"), 'dealer_id' : fields.many2one('res.dealer', string="Dealer"), 'lead_type': fields.many2one('crm.lead.type', 'Lead type'), 'own_rent': fields.selection( [('own', 'Own'), ('rent', 'Rent')], string='Own or Rent?', copy=False), 'more_comp' : fields.boolean('More Companies?'), 'partner_first_name': fields.char('First Name'), 'partner_last_name': fields.char('Last Name'), 'partner_email': fields.char('Email'), 'partner_function': fields.char('Job Title'), 'partner_place_emp': fields.char('Place of Employment'), 'partner_cell_phone_code' : fields.char('Code'), 'partner_cell_phone': fields.char('Cell Phone'), 'partner_home_phone_code' : fields.char('Code'), 'partner_home_phone': fields.char('Home Phone'), 'partner_work_phone_code' : fields.char('Code'), 'partner_work_phone': fields.char('Work Phone'), 'allergies_yes': fields.selection([('yes', 'Yes'), ('no', 'No')], string="Allergies"), 'pets_yes': fields.selection([('yes', 'Yes'), ('no', 'No')], string="Pets"), 'children' : fields.boolean('Children'), 'child_1': fields.char('Child 1'), 'child_2': fields.char('Child 2'), 'child_3': fields.char('Child 3'), 'child_4': fields.char('Child 4'), 'appointment_type': fields.selection( [('owner', 'Owner')], 'Appointment Type', copy=False), 'referred_by': fields.reference('Referred By', [('res.partner', 'Partner'),('res.dealer', 'Dealer'), ('crm.lead', 'Lead')]), 'lead_type': fields.many2one('crm.lead.type', 'Lead type'), 'dealer': fields.many2one('res.dealer', 'Dealer'), 'sponsor': fields.many2one('res.partner', 'Sponsor'), 'event_source': fields.many2one('crm.tracking.source', string='Event'), 'section_id': fields.many2one('crm.case.section', string='Sales Team'), 'comment_other': fields.text('Notes'), 'comment': fields.text('Notes'), 'company_id_1': fields.many2one('res.company', 'Company'), 'comp1_title': fields.many2one('res.partner.title', 'Title'), 'company_act_1': fields.text('Company Activity Domain'), 'comp1_fiscal_id': fields.char('Fiscal ID'), 'comp1_registry': fields.char('Registration Number'), 'comp1_street': fields.char('Street'), 'comp1_street2': fields.char('Street2'), 'comp1_country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), 'comp1_state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'comp1_zip': fields.char('ZIP', size=24, change_default=True), 'comp1_city': fields.char('CITY'), 'company_id_2': fields.many2one('res.company', 'Company'), 'comp2_title': fields.many2one('res.partner.title', 'Title'), 'company_act_2': fields.text('Company Activity Domain'), 'comp2_fiscal_id': fields.char('Fiscal ID'), 'comp2_registry': fields.char('Registration Number'), 'comp2_street': fields.char('Street'), 'comp2_street2': fields.char('Street2'), 'comp2_country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), 'comp2_state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'comp2_zip': fields.char('ZIP', size=24, change_default=True), 'comp2_city': fields.char('CITY'), 'company_id_3': fields.many2one('res.company', 'Company'), 'comp3_title': fields.many2one('res.partner.title', 'Title'), 'company_act_3': fields.text('Company Activity Domain'), 'comp3_fiscal_id': fields.char('Fiscal ID'), 'comp3_registry': fields.char('Registration Number'), 'comp3_street': fields.char('Street'), 'comp3_street2': fields.char('Street2'), 'comp3_country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), 'comp3_state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'comp3_zip': fields.char('ZIP', size=24, change_default=True), 'comp3_city': fields.char('CITY'), 'company_id_4': fields.many2one('res.company', 'Company'), 'comp4_title': fields.many2one('res.partner.title', 'Title'), 'company_act_4': fields.text('Company Activity Domain'), 'comp4_fiscal_id': fields.char('Fiscal ID'), 'comp4_registry': fields.char('Registration Number'), 'comp4_street': fields.char('Street'), 'comp4_street2': fields.char('Street2'), 'comp4_country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), 'comp4_state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'comp4_zip': fields.char('ZIP', size=24, change_default=True), 'comp4_city': fields.char('CITY'), 'address_same_yes': fields.selection([('yes', 'Yes'), ('no', 'No')]), 'street_2': fields.char('Street'), 'street2_2': fields.char('Street2'), 'city_2': fields.char('City'), 'country_id_2': fields.many2one('res.country', 'Country', ondelete='restrict'), 'state_id_2': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'zip_2': fields.char('Zip', size=24, change_default=True), 'result_id' : fields.many2one('appointment.result', 'Result'), 'lead_id' : fields.many2one('crm.lead', 'Leads'), 'appt_type' : fields.many2one('appt.type', 'Appt Type'), 'date_set' : fields.date('Date Set'), 'date_sold' : fields.date('Date Sold'), 'serial_no' : fields.many2one('stock.production.lot', 'Serial #'), 'dealer_id': fields.many2one('res.dealer', 'Dealer'), 'dealer_position' : fields.many2one('dealer.position', 'Position'), 'advance_date' : fields.date('Advance Date'), 'sale_amt' : fields.char('Sale'), 'tax_rate' : fields.char('Tax Rate'), 'total_tax' : fields.char('Total Tax'), 'total_sale' : fields.char('Total Sale'), 'sponsor_id_2': fields.many2one('res.partner', 'Sponsor'), 'lead_dealer_id': fields.many2one('res.dealer', 'Lead Dealer'), 'assistant_id' : fields.many2one('res.users', 'Assistant'), 'ride_along_id' : fields.many2one('res.users', 'Ride-Along'), 'set_by' : fields.many2one('res.users', 'Set By'), 'prob_with_sale' : fields.many2one('sale.problem', 'Problem With Sale'), 'sale_comment' : fields.text('Sale Comments'), 'pay_type': fields.many2one('account.payment.term', 'Payment Type'), 'option' : fields.char('Options'), 'down_payment' : fields.char('Down Payment'), 'down_type': fields.many2one('payment.type', 'Down Type'), 'amount_financed' : fields.char('Amount Financed'), 'filling_fee' : fields.char('Filling Fee'), 'reserve_amt' : fields.char('Reserve Amount'), 'discount_amt' : fields.char('Discount Amount'), 'sac_disc' : fields.char('S.A.C Discount'), 'balance_due' : fields.char('Balance Due'), 'net_sale' : fields.char('Net Sale'), 'comment_warranty' : fields.text('Notes'), 'comment_service' : fields.text('Notes'), 'leads_ids': fields.function(_lead_records_of_lead, type="one2many", obj='crm.lead', string="Leads"), 'appointment_ids': fields.function(_appointment_records_of_lead, type="one2many", obj='calendar.event', string="Appointments"), # 'appointment_ids' : fields.one2many('calendar.event', 'lead_id', string="Appointments", readonly=True), 'active_campaign_ids' : fields.one2many('crm.campaign', 'lead_id', string="Campaigns", domain=[('days_left', '!=', 0)]), 'past_campaign_ids' : fields.one2many('crm.campaign', 'lead_id', string="Campaigns", domain=[('days_left', '=', 0)]), 'call_history_ids' : fields.one2many('crm.phonecall', 'opportunity_id', string="Logged Calls", readonly=True), 'gift_ids': fields.one2many('gift.line', 'lead_id', 'Gifts', copy=False), 'customer_acc_inv' : fields.one2many('account.invoice', 'lead_id', string="Customer Invoice Info", readonly=True, domain=[('type', '=', 'out_invoice')]), 'customer_payment' : fields.one2many('account.voucher', 'lead_id', string="Customer Payment Info", readonly=True, domain=[('type', '=', 'receipt')]), 'prod_warr_ids': fields.one2many('product.warranty', 'lead_warr_id', 'Warranty Revision', copy=False), 'prod_service_ids' : fields.one2many('product.warranty', 'lead_service_id', 'Service Revision', copy=False), 'lead_type_button' : fields.char('Lead Type'), 'next_action' : fields.date('Next Action', readonly=True), 'button_stage': fields.function(_get_stage_id, type="char", readonly=True, string='Stage'), 'km': fields.char('KM'), 'apt_attrs' : fields.boolean('Apt Attrs'), 'lead_source': fields.selection( [('owner', 'Owner'), ('non owner', 'Non Owner'), ('mail exibits', 'Mail Exibits'), ('personal', 'Personal')], string='Lead Source'), 'default_add_1' : fields.boolean('Default'), 'default_add_2' : fields.boolean('Default'), 'default_add_3' : fields.boolean('Default'), 'supervisor_id' : fields.many2one('res.dealer', 'Supervisor'), 'assistant_id' : fields.many2one('res.dealer', 'Assistant'), 'ride_along_id' : fields.many2one('crm.lead', 'Ride-Along'), 'start_datetime' : fields.datetime('Starting at', required=True), 'stop_datetime' : fields.datetime('Ending at', required=True), 'alarm_ids': fields.many2many('calendar.alarm', 'calendar_alarm_crm_lead_rel', string='Reminders', ondelete="restrict", copy=False), 'attendee_ids': fields.one2many('calendar.attendee', 'event_id', 'Attendees', ondelete='cascade'), 'order_line' : fields.one2many('product.order.line', 'lead_id', 'Products'), } # _defaults = { # 'name': lambda self,cr,uid,context={}: self.pool.get('ir.sequence').get(cr, uid, 'crm.demo'), # } # def on_change_partner_id(self, cr, uid, ids, partner_id, context=None): # values = {} # if partner_id: # partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context) # values = { # 'street': partner.street, # 'street2': partner.street2, # 'city': partner.city, # 'state_id': partner.state_id and partner.state_id.id or False, # 'country_id': partner.country_id and partner.country_id.id or False, # 'zip': partner.zip, # } # return {'value': values} def show_google_map(self, cr, uid, ids, context): view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_lead_extended', 'view_crm_lead_map') view_id = view_ref and view_ref[1] or False, return { 'type': 'ir.actions.act_window', 'name': 'Lead Location on Map', 'res_model': 'crm.lead', 'res_id': ids[0], 'view_id': view_id, 'view_type': 'form', 'view_mode': 'form', 'target': 'new', 'nodestroy': True, } def onchange_lead_type(self, cr, uid, ids, lead_type, context=None): values = {} print '\n lead type ',ids,lead_type,context if lead_type: lead_type = self.pool.get('crm.lead.type').browse(cr, uid, lead_type).name values = { 'lead_type_button': lead_type, } return {'value': values} def onchange_referred_by(self, cr, uid, ids, referred_by, context=None): result = {} if referred_by: model = referred_by.split(",")[0] id = referred_by.split(",")[1] if model == 'res.partner': result['value'] = {'lead_source' : 'owner'} else: result['value'] = {'lead_source' : 'non owner'} print '\n onchange_referred_by',ids,referred_by,context,result return result def onchange_add(self, cr, uid, ids, address_same_yes, context=None): result = {} partner_obj = self.browse(cr, uid, ids) if address_same_yes == 'yes': result['value'] = {'street_2': partner_obj.street, 'street2_2': partner_obj.street2, 'city_2' : partner_obj.city, 'country_id_2' : partner_obj.country_id, 'state_id_2' : partner_obj.state_id, 'zip_2' : partner_obj.zip} return result def onchange_comp1(self, cr, uid, ids, company, context=None): result = {} company_obj = self.pool.get('res.company').browse(cr, uid, company) result['value'] = {'comp1_street': company_obj.street, 'comp1_street2': company_obj.street2, 'comp1_country_id': company_obj.country_id, 'comp1_state_id': company_obj.state_id, 'comp1_zip': company_obj.zip, 'comp1_city' : company_obj.city, 'comp1_registry' : company_obj.company_registry, 'company_id_1' : company, 'lead_type1' : company_obj.city} return result def onchange_comp2(self, cr, uid, ids, company, context=None): result = {} company_obj = self.pool.get('res.company').browse(cr, uid, company) result['value'] = {'comp2_street': company_obj.street, 'comp2_street2': company_obj.street2, 'comp2_country_id': company_obj.country_id, 'comp2_state_id': company_obj.state_id, 'comp2_zip': company_obj.zip, 'comp2_city' : company_obj.city, 'comp2_registry' : company_obj.company_registry} return result def onchange_comp3(self, cr, uid, ids, company, context=None): result = {} company_obj = self.pool.get('res.company').browse(cr, uid, company) result['value'] = {'comp3_street': company_obj.street, 'comp3_street2': company_obj.street2, 'comp3_country_id': company_obj.country_id, 'comp3_state_id': company_obj.state_id, 'comp3_zip': company_obj.zip, 'comp3_city' : company_obj.city, 'comp3_vat' : company_obj.vat, 'comp3_registry' : company_obj.company_registry} return result def onchange_comp4(self, cr, uid, ids, company, context=None): result = {} company_obj = self.pool.get('res.company').browse(cr, uid, company) result['value'] = {'comp4_street': company_obj.street, 'comp4_street2': company_obj.street2, 'comp4_country_id': company_obj.country_id, 'comp4_state_id': company_obj.state_id, 'comp4_zip': company_obj.zip, 'comp4_city' : company_obj.city, 'comp4_vat' : company_obj.vat, 'comp4_registry' : company_obj.company_registry} return result def action_schedule_meeting(self, cr, uid, ids, context=None): return super(crm_lead, self).action_schedule_meeting(cr, uid, ids, context=context) def onchange_dealer(self, cr, uid, ids, dealer, context=None): print '\n onchange of partner',ids,context,self,dealer result = {} Dealer_pool = self.pool.get('res.dealer') if dealer: dealer_obj = Dealer_pool.browse(cr, uid, dealer) result['value'] = {'sponsor' : dealer_obj.sponsor.id, 'section_id' : dealer_obj.team_name.id} print '\n onchange dealer',result return result def do_sendmail(self, cr, uid, ids, context=None): for demo in self.browse(cr, uid, ids, context): current_user = self.pool['res.users'].browse(cr, uid, uid, context=context) if current_user.email: if self.pool['calendar.attendee']._send_mail_to_attendees(cr, uid, [att.id for att in demo.attendee_ids], email_from=current_user.email, context=context): self.message_post(cr, uid, demo.id, body=_("An invitation email has been sent to attendee(s)"), subtype="calendar.subtype_invitation", context=context) return
class riba_file_export(orm.TransientModel): _progressivo = 0 _assuntrice = 0 _sia = 0 _data = 0 _valuta = 0 _supporto = 0 _totale = 0 _creditore = 0 _descrizione = '' _codice = '' _comune_provincia_debitor = '' def _RecordIB(self, sia_assuntrice, abi_assuntrice, data_creazione, nome_supporto, codice_divisa): #record di testa self._sia = sia_assuntrice.rjust(5, '0') self._assuntrice = abi_assuntrice.rjust(5, '0') self._data = data_creazione.rjust(6, '0') self._valuta = codice_divisa[0:1] self._supporto = nome_supporto.ljust(20, ' ') return " IB" + self._sia + self._assuntrice + self._data + self._supporto + " " * 74 + self._valuta + " " * 6 + "\r\n" def _Record14(self, scadenza, importo, abi_assuntrice, cab_assuntrice, conto, abi_domiciliataria, cab_domiciliataria, sia_credit, codice_cliente): self._totale += importo return " 14" + str(self._progressivo).rjust( 7, '0') + " " * 12 + scadenza + "30000" + str( int(round(importo * 100))).rjust( 13, '0' ) + "-" + abi_assuntrice.rjust(5, '0') + cab_assuntrice.rjust( 5, '0') + conto.ljust(12, '0') + abi_domiciliataria.rjust( 5, '0') + cab_domiciliataria.rjust( 5, '0') + " " * 12 + str(sia_credit).rjust( 5, '0') + "4" + codice_cliente.ljust( 16) + " " * 6 + self._valuta + "\r\n" def _Record20( self, ragione_soc1_creditore, indirizzo_creditore, cap_citta_creditore, ref_creditore, ): self._creditore = ragione_soc1_creditore.ljust(24) return " 20" + str(self._progressivo).rjust( 7, '0') + self._creditore[0:24] + indirizzo_creditore.ljust( 24)[0:24] + cap_citta_creditore.ljust(24)[ 0:24] + ref_creditore.ljust(24)[0:24] + " " * 14 + "\r\n" def _Record30(self, nome_debitore, codice_fiscale_debitore): return " 30" + str(self._progressivo).rjust( 7, '0') + nome_debitore.ljust( 60)[0:60] + codice_fiscale_debitore.ljust( 16, ' ') + " " * 34 + "\r\n" def _Record40(self, indirizzo_debitore, cap_debitore, comune_debitore, provincia_debitore, descrizione_domiciliataria=""): self._comune_provincia_debitor = comune_debitore + provincia_debitore.rjust( 25 - len(comune_debitore), ' ') return " 40" + str(self._progressivo).rjust( 7, '0' ) + indirizzo_debitore.ljust(30)[0:30] + str(cap_debitore).rjust( 5, '0' ) + self._comune_provincia_debitor + descrizione_domiciliataria.ljust( 50)[0:50] + "\r\n" def _Record50(self, importo_debito, invoice_ref, data_invoice, partita_iva_creditore): self._descrizione = 'PER LA FATTURA N. ' + invoice_ref + ' DEL ' + data_invoice + ' IMP ' + str( importo_debito) return " 50" + str(self._progressivo).rjust( 7, '0') + self._descrizione.ljust( 80)[0:80] + " " * 10 + partita_iva_creditore.ljust( 16, ' ') + " " * 4 + "\r\n" def _Record51(self, numero_ricevuta_creditore): return " 51" + str(self._progressivo).rjust( 7, '0') + str(numero_ricevuta_creditore).rjust( 10, '0') + self._creditore[0:20] + " " * 80 + "\r\n" def _Record70(self): return " 70" + str(self._progressivo).rjust(7, '0') + " " * 110 + "\r\n" def _RecordEF(self): #record di coda return " EF" + self._sia + self._assuntrice + self._data + self._supporto + " " * 6 + str( self._progressivo ).rjust(7, '0') + str(int(round(self._totale * 100))).rjust( 15, '0') + "0" * 15 + str(int(self._progressivo) * 7 + 2).rjust( 7, '0') + " " * 24 + self._valuta + " " * 6 + "\r\n" def _creaFile(self, intestazione, ricevute_bancarie): accumulatore = self._RecordIB(intestazione[0], intestazione[1], intestazione[4], intestazione[5], intestazione[6]) for value in ricevute_bancarie: #estraggo le ricevute dall'array self._progressivo = self._progressivo + 1 accumulatore = accumulatore + self._Record14( value[1], value[2], intestazione[1], intestazione[2], intestazione[3], value[9], value[10], intestazione[0], value[12]) accumulatore = accumulatore + self._Record20( intestazione[7], intestazione[8], intestazione[9], intestazione[10]) accumulatore = accumulatore + self._Record30(value[3], value[4]) accumulatore = accumulatore + self._Record40( value[5], value[6], value[7], value[8], value[11]) accumulatore = accumulatore + self._Record50( value[2], value[13], value[14], intestazione[11]) accumulatore = accumulatore + self._Record51(value[0]) accumulatore = accumulatore + self._Record70() accumulatore = accumulatore + self._RecordEF() self._progressivo = 0 self._totale = 0 return accumulatore def act_getfile(self, cr, uid, ids, context=None): active_ids = context and context.get('active_ids', []) order_obj = self.pool.get('riba.distinta').browse(cr, uid, active_ids, context=context)[0] credit_bank = order_obj.config.bank_id name_company = order_obj.config.company_id.partner_id.name if not credit_bank.iban: raise orm.except_orm('Error', _('No IBAN specified')) credit_abi = credit_bank.iban[5:10] credit_cab = credit_bank.iban[10:15] credit_conto = credit_bank.iban[-12:] if not credit_bank.codice_sia: raise orm.except_orm( 'Error', _('No SIA Code specified for: ') + name_company) credit_sia = credit_bank.codice_sia # credit_account = credit_bank.iban[15:27] dataemissione = datetime.datetime.now().strftime("%d%m%y") nome_supporto = datetime.datetime.now().strftime( "%d%m%y%H%M%S") + credit_sia creditor_address = order_obj.config.company_id.partner_id # creditor_street = creditor_address.street or '' creditor_city = creditor_address.city or '' # creditor_province = creditor_address.province.code or '' if not order_obj.config.company_id.partner_id.vat and not order_obj.config.company_id.partner_id.fiscalcode: raise orm.except_orm( 'Error', _('No VAT or Fiscalcode specified for: ') + name_company) array_testata = [ credit_sia, credit_abi, credit_cab, credit_conto, dataemissione, nome_supporto, 'E', name_company, creditor_address.street or '', creditor_address.zip or '' + ' ' + creditor_city, order_obj.config.company_id.partner_id.ref or '', order_obj.config.company_id.partner_id.vat and order_obj.config.company_id.partner_id.vat[2:] or order_obj.config.company_id.partner_id.fiscalcode, ] arrayRiba = [] for line in order_obj.line_ids: debit_bank = line.bank_id debitor_address = line.partner_id debitor_street = debitor_address.street or '' debitor_zip = debitor_address.zip or '' if not debit_bank.iban: raise orm.except_orm( 'Error', _('No IBAN specified for ') + line.partner_id.name) debit_abi = debit_bank.iban[5:10] debit_cab = debit_bank.iban[10:15] debitor_city = debitor_address.city and debitor_address.city.ljust( 23)[0:23] or '' debitor_province = debitor_address.province.code or '' if not line.due_date: # ??? VERIFICARE due_date = '000000' else: due_date = datetime.datetime.strptime( line.due_date[:10], '%Y-%m-%d').strftime("%d%m%y") if not line.partner_id.vat and not line.partner_id.fiscalcode: raise orm.except_orm( 'Error', _('No VAT or Fiscal code specified for ') + line.partner_id.name) if not (debit_bank.bank and debit_bank.bank.name or debit_bank.bank_name): raise orm.except_orm( 'Error', _('No debit_bank specified for ') + line.partner_id.name) Riba = [ line.sequence, due_date, line.amount, line.partner_id.name, line.partner_id.vat and line.partner_id.vat[2:] or line.partner_id.fiscalcode, debitor_street, debitor_zip, debitor_city, debitor_province, debit_abi, debit_cab, debit_bank.bank and debit_bank.bank.name or debit_bank.bank_name, line.partner_id.ref or '', #line.move_line_id.name, line.invoice_number, #datetime.datetime.strptime(line.distinta_id.date_created, '%Y-%m-%d').strftime("%d/%m/%Y"), line.invoice_date, ] arrayRiba.append(Riba) out = base64.encodestring( self._creaFile(array_testata, arrayRiba).encode("utf8")) self.write(cr, uid, ids, { 'state': 'get', 'riba_txt': out }, context=context) model_data_obj = self.pool.get('ir.model.data') view_rec = model_data_obj.get_object_reference( cr, uid, 'account_ricevute_bancarie', 'wizard_riba_file_export') view_id = view_rec and view_rec[1] or False return { 'view_type': 'form', 'view_id': [view_id], 'view_mode': 'form', 'res_model': 'riba.file.export', 'res_id': ids[0], 'type': 'ir.actions.act_window', 'target': 'new', 'context': context, } _name = "riba.file.export" _columns = { 'state': fields.selection(( ('choose', 'choose'), # choose accounts ('get', 'get'), # get the file )), 'riba_txt': fields.binary('File', readonly=True), } _defaults = { 'state': lambda *a: 'choose', }
class ReportSelection(osv.TransientModel): _name = 'wiz.report.selection' _description = 'for excel report selection' _columns = { 'select': fields.selection([ ('additional', 'Additional Invoice Report'), ('monitoring', 'Monitoring Report'), ('tax_break', 'Tax Break-Up Invoice'), ], required="1"), 'report': fields.binary('report data', readonly=True), 'report_name': fields.char('file name', store=True, readonly=True), # 'data_invoice': fields.many2one('account.invoice', string="Invoice Ref",), } @api.multi def select_report_type(self): if self.select == 'additional': return self.additional_excel_report() elif self.select == 'monitoring': return self.monitoring_invoive() elif self.select == 'tax_break': return self.tax_break_inv() else: raise ValueError('No Report Type Selected') # additional report function excel report @api.multi def additional_excel_report(self): active_id = self.env.context.get('active_id', False) records = self.env['account.invoice'].search( ([('id', '=', active_id)])) # header format borders_header = Borders() borders_header.left = 3 borders_header.right = 3 borders_header.top = 3 borders_header.bottom = 3 fnt_header = Font() fnt_header.name = 'Arial' fnt_header.colour_index = 4 fnt_header.bold = True a_header = Alignment() a_header.horz = Alignment.HORZ_CENTER a_header.vert = Alignment.VERT_CENTER # -------- # total amount formating a = Alignment() a.horz = Alignment.HORZ_RIGHT b = Borders() b.top = 1 total_format = XFStyle() total_format.borders = b total_format.alignment = a # ----------------- wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet('Additional') ws.protect = True fp = cStringIO.StringIO() row = 10 col = 0 header_style = XFStyle() header_style.font = fnt_header header_style.borders = borders_header header_style.alignment = a_header sub_header_style = xlwt.easyxf( "font: name Calibri size 13 px, bold on, height 200;") value_style = xlwt.easyxf( "font: name Calibri size 15 px, height 200 ;") ws.write_merge(0, 0, 0, 10, 'Additional Invoice', header_style) ws.write(2, 1, records.partner_id.name, sub_header_style) ws.write(5, 5, 'STN', sub_header_style) ws.write(5, 6, '17-00-3764-757-19', sub_header_style) ws.write(5, 1, 'NTN', sub_header_style) ws.write(5, 2, records.NTN, value_style) ws.write(7, 1, 'INVOICE REF #', sub_header_style) ws.write(7, 4, records.id, value_style) ws.write(9, 0, 'Description', sub_header_style) ws.write(9, 6, 'Quantity', sub_header_style) ws.write(9, 8, 'Unit Price', sub_header_style) ws.write(9, 10, 'Amount', sub_header_style) for data in records.invoice_line: if data: ws.write(row, 0, data.name, value_style) ws.write(row, 6, data.quantity, value_style) ws.write(row, 8, data.price_unit, value_style) ws.write(row, 10, data.price_subtotal, value_style) row += 1 ws.write(24, 8, 'Total', sub_header_style) ws.write(24, 10, round(records.amount_total), total_format) wb.save(fp) out = base64.encodestring(fp.getvalue()) self.write({'report': out, 'report_name': 'additional.xls'}) return { 'type': 'ir.actions.act_window', 'res_model': 'wiz.report.selection', 'view_mode': 'form', 'view_type': 'form', 'res_id': self.id, 'views': [(False, 'form')], } # monitoring excel report funtion @api.multi def monitoring_invoive(self): active_id = self.env.context.get('active_id', False) records = self.env['account.invoice'].search( ([('id', '=', active_id)])) # header format borders_header = Borders() borders_header.left = 1 borders_header.right = 1 borders_header.top = 1 borders_header.bottom = 1 fnt_header = Font() fnt_header.name = 'Arial' fnt_header.colour_index = 4 fnt_header.bold = True a_header = Alignment() a_header.horz = Alignment.HORZ_CENTER a_header.vert = Alignment.VERT_CENTER # -------- # total amount formating a = Alignment() a.horz = Alignment.HORZ_RIGHT b = Borders() b.top = 1 total_format = XFStyle() total_format.borders = b total_format.alignment = a # ----------------- wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet('Monitoring') ws.protect = True fp = cStringIO.StringIO() row = 10 col = 0 header_style = XFStyle() header_style.font = fnt_header header_style.borders = borders_header header_style.alignment = a_header sub_header_style = xlwt.easyxf( "font: name Calibri size 13 px, bold on, height 200;") value_style = xlwt.easyxf( "font: name Calibri size 15 px, height 200 ;") ws.write_merge(0, 0, 0, 8, 'Monitoring Invoice', header_style) ws.write(2, 1, records.partner_id.name, sub_header_style) ws.write(5, 1, 'NTN', sub_header_style) ws.write(5, 2, records.NTN, value_style) ws.write(7, 1, 'INVOICE REF #', sub_header_style) ws.write(7, 3, records.id, value_style) # one to many field data ws.write(9, 0, 'Description', sub_header_style) ws.write(9, 6, 'Billing Period', sub_header_style) ws.write(9, 8, 'Amount', sub_header_style) # For cond on Description for line in records.invoice_line: if line.product_id.name == 'Monitoring charges' and records.partner_id.name == 'National Bank of Pakistan': ws.write(row, 0, 'Monitoring charges Including Provincial Sales Tax', value_style) row += 1 elif line.product_id.name == 'Monitoring charges' and records.partner_id.name == 'National Bank of Pakistan': ws.write(row, 0, 'Monitoring charges Including Provincial Sales Tax', value_style) row = +1 # for condition on billing period for line in records.invoice_line: if line.product_id.name == 'Monitoring charges': ws.write(row, 6, records.to, value_style) ws.write(row, 7, records.to, value_style) row += 1 ws.write(24, 6, 'Total', sub_header_style) ws.write(24, 8, round(records.amount_total), total_format) wb.save(fp) out = base64.encodestring(fp.getvalue()) self.write({'report': out, 'report_name': 'monitoring invoice.xls'}) return { 'type': 'ir.actions.act_window', 'res_model': 'wiz.report.selection', 'view_mode': 'form', 'view_type': 'form', 'res_id': self.id, 'views': [(False, 'form')], } # tax excel report funtion @api.multi def tax_break_inv(self): active_id = self.env.context.get('active_id', False) records = self.env['account.invoice'].search( ([('id', '=', active_id)])) # header format borders_header = Borders() borders_header.left = 1 borders_header.right = 1 borders_header.top = 1 borders_header.bottom = 1 fnt_header = Font() fnt_header.name = 'Arial' fnt_header.colour_index = 4 fnt_header.bold = True a_header = Alignment() a_header.horz = Alignment.HORZ_CENTER a_header.vert = Alignment.VERT_CENTER header_style = XFStyle() header_style.font = fnt_header header_style.borders = borders_header header_style.alignment = a_header # -------- # total amount formating a = Alignment() a.horz = Alignment.HORZ_RIGHT b = Borders() b.top = 1 total_format = XFStyle() total_format.borders = b total_format.alignment = a wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet('Tax Break-Up') ws.protect = True fp = cStringIO.StringIO() row = 10 col = 0 sub_header_style = xlwt.easyxf( "font: name Calibri size 13 px, bold on, height 200;") value_style = xlwt.easyxf( "font: name Calibri size 15 px, height 200 ;") ws.write_merge(0, 0, 0, 14, 'Tax Break-UP Invoice', header_style) ws.write(2, 1, records.partner_id.name, sub_header_style) ws.write(5, 1, 'NTN', sub_header_style) ws.write(5, 2, records.NTN, value_style) ws.write(7, 1, 'INVOICE REF #', sub_header_style) ws.write(7, 4, records.id, value_style) # one to many field data ws.write(9, 0, 'Description', sub_header_style) ws.write(9, 6, 'Quantity', sub_header_style) ws.write(9, 8, 'Unit Price', sub_header_style) ws.write(9, 10, 'Tax Rate', sub_header_style) ws.write(9, 12, 'Tax Amount', sub_header_style) ws.write(9, 14, 'Amount', sub_header_style) coline = 0 for data in records.invoice_line: if data: ws.write(row, 0, data.name, value_style) ws.write(row, 6, data.quantity, value_style) ws.write(row, 8, data.price_unit, value_style) ws.write(row, 10, data.invoice_line_tax_id.name, value_style) ws.write( row, 12, data.quantity * data.price_unit * data.invoice_line_tax_id.amount, value_style) ws.write(row, 14, data.price_subtotal + data.tax_amount, value_style) row += 1 ws.write(20, 10, 'Total Without Tax', sub_header_style) ws.write(20, 12, round(records.amount_untaxed), total_format) ws.write(22, 10, 'Taxes', sub_header_style) ws.write(22, 12, round(records.amount_tax), total_format) ws.write(24, 10, 'Total', sub_header_style) ws.write(24, 12, round(records.amount_total), total_format) wb.save(fp) out = base64.encodestring(fp.getvalue()) self.write({'report': out, 'report_name': 'Tax Break-Up Invoice.xls'}) return { 'type': 'ir.actions.act_window', 'res_model': 'wiz.report.selection', 'view_mode': 'form', 'view_type': 'form', 'res_id': self.id, 'views': [(False, 'form')], }
class wizard_invoice_facturae_txt_v6(osv.TransientModel): _name = 'wizard.invoice.facturae.txt.v6' def _get_month_selection(self, cr, uid, context=None): months_selection = [ (1, _('January')), (2, _('February')), (3, _('March')), (4, _('April')), (5, _('May')), (6, _('June')), (7, _('July')), (8, _('August')), (9, _('September')), (10, _('October')), (11, _('November')), (12, _('December')), ] return months_selection _columns = { 'month': fields.selection(_get_month_selection, 'Month', type="integer", help='Month to filter'), 'year': fields.integer('Year', help='Year to filter'), 'date_start': fields.datetime('Initial Date', help='Initial date for filter'), 'date_end': fields.datetime('Finished date', help='Finished date for filter'), 'invoice_ids': fields.many2many('account.invoice', 'invoice_facturae_txt_rel', 'invoice_id', 'facturae_id', "Invoice's", domain="[('type', 'in', ['out_invoice', 'out_refund'] )]", help="Invoice's that meet with the filter"), 'facturae': fields.binary("File Electronic Invoice's", readonly=True), 'facturae_fname': fields.char('File Name', size=64), 'note': fields.text('Log', readonly=True), } def _get_facturae_fname(self, cr, uid, context=None): if context is None: context = {} return context.get('facturae_fname', 0) def _get_facturae(self, cr, uid, context=None): if context is None: context = {} return context.get('facturae', 0) def _get_note(self, cr, uid, context=None): if context is None: context = {} return context.get('note', 0) _defaults = { 'month': lambda*a: int(time.strftime("%m"))-1, 'year': lambda*a: int(time.strftime("%Y")), 'date_start': lambda*a: (datetime.datetime.strptime(time.strftime( '%Y-%m-01 00:00:00'), '%Y-%m-%d 00:00:00') - relativedelta( months=1)).strftime('%Y-%m-%d %H:%M:%S'), 'date_end': lambda*a: time.strftime('%Y-%m-%d 23:59:59'), 'facturae_fname': _get_facturae_fname, 'facturae': _get_facturae, 'note': _get_note, } def get_invoices_date(self, cr, uid, ids, context=None): invoice_ids = [] data = self.read(cr, uid, ids, context=context)[0] # invoice_ids.append(19) if not context: context = {} invoice_obj = self.pool.get('account.invoice') if data['invoice_ids']: invoice_ids = [] else: invoice_ids = data['invoice_ids'] date_start = data['date_start'] date_end = data['date_end'] # context.update( {'date': date_start.strftime("%Y-%m-%d")} ) invoice_ids.extend( invoice_obj.search(cr, uid, [ ('type', 'in', ['out_invoice', 'out_refund']), ('state', 'in', ['open', 'paid', 'cancel']), ('invoice_datetime', '>=', date_start), ('invoice_datetime', '<', date_end), ('internal_number', '<>', False), ], order='invoice_datetime', context=context) ) self.write(cr, uid, ids, {'invoice_ids': [ (6, 0, invoice_ids)]}, context=None) ir_model_data = self.pool.get('ir.model.data') form_res = ir_model_data.get_object_reference( cr, uid, 'l10n_mx_facturae', 'view_wizard_invoice_facturae_txt_v6_form') form_id = form_res and form_res[1] or False return { 'type': 'ir.actions.act_window', 'name': 'Electronic Invoice - Report Monthly TXT', 'res_model': 'wizard.invoice.facturae.txt.v6', 'nodestroy': True, 'target': 'new', 'res_id': ids[0], 'views': [(form_id, 'form')], } def get_invoices_month(self, cr, uid, ids, context=None): invoice_ids = [] data = self.read(cr, uid, ids, context=context)[0] if not context: context = {} invoice_obj = self.pool.get('account.invoice') invoice_ids = data['invoice_ids'] year = data['year'] month = int(data['month']) date_start = datetime.datetime(year, month, 1, 0, 0, 0) date_end = date_start + relativedelta(months=1) context.update({'date': date_start.strftime("%Y-%m-%d")}) invoice_ids.extend( invoice_obj.search(cr, uid, [ ('type', 'in', ['out_invoice', 'out_refund']), ('state', 'in', ['open', 'paid', 'cancel']), ('invoice_datetime', '>=', date_start.strftime( "%Y-%m-%d %H:%M:%S")), ('invoice_datetime', '<', date_end.strftime( "%Y-%m-%d %H:%M:%S")), ('internal_number', '<>', False), ], order='invoice_datetime', context=context) ) self.write(cr, uid, ids, {'invoice_ids': [ (6, 0, invoice_ids)]}, context=None) ir_model_data = self.pool.get('ir.model.data') form_res = ir_model_data.get_object_reference( cr, uid, 'l10n_mx_facturae', 'view_wizard_invoice_facturae_txt_v6_form') form_id = form_res and form_res[1] or False return { 'type': 'ir.actions.act_window', 'name': 'Electronic Invoice - Report Monthly TXT', 'res_model': 'wizard.invoice.facturae.txt.v6', 'nodestroy': True, 'target': 'new', 'res_id': ids[0], 'views': [(form_id, 'form')], } def create_facturae_txt(self, cr, uid, ids, context=None): obj_model = self.pool.get('ir.model.data') data = self.read(cr, uid, ids, context=context)[0] if not context: context = {} invoice_obj = self.pool.get('account.invoice') invoice_ids = data['invoice_ids'] if invoice_ids: txt_data, fname = invoice_obj._get_facturae_invoice_txt_data( cr, uid, invoice_ids, context=context) if txt_data: txt_data = base64.encodestring(txt_data) context.update({'facturae': txt_data, 'facturae_fname': fname, 'note': _("Open the file & check that the information is \ correct, folios, RFC, amounts & reported status. \nPlease \ make sure that not this reporting folios that not belong \ to electronic invoice's (you can delete in the file \ directly).\nTIP: Remember that this file too contains \ folios of credit note.")}) form_res = obj_model.get_object_reference(cr, uid, 'l10n_mx_facturae', 'view_wizard_invoice_facturae_txt_v6_form2') form_id = form_res and form_res[1] or False return { 'name': 'Monthly Report Ready', 'view_mode': 'form', 'res_model': 'wizard.invoice.facturae.txt.v6', 'views': [(form_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', 'context': context, } return True
class reports_laboratory(osv.osv_memory): _name = "reports.laboratory" _inherit = ['mail.thread', 'ir.needaction_mixin'] _columns = { 'name': fields.text('Instrucciones'), 'date': fields.date('Fecha de reporte'), 'date_ini': fields.date('Fecha Inicio'), 'date_fin': fields.date('Fecha Final'), 'xls_file_name': fields.char('xls file name', size=128), 'xls_file': fields.binary('Archivo', readonly=True), 'user_id': fields.many2one('res.users', "Responsable"), } _defaults = { 'name': "Se creara un archivo .xls con el reporte seleccionado.", 'date': lambda *a: time.strftime('%Y-%m-%d'), 'user_id': lambda obj, cr, uid, context: uid, } #~ Función que crea la hoja de calculo para el informe mensual def action_create_report(self, cr, uid, ids, context=None): data = self.browse(cr, uid, ids[0], context=context) # Creamos la hoja de calculo workbook = xlwt.Workbook(encoding='utf-8') sheet_principal = workbook.add_sheet('Informe Laboratorio de Diseño', cell_overwrite_ok=True) # Creamos la Hoja principal self.create_principal_sheet(cr, uid, sheet_principal, data, context) # Creamos el nombre del archivo name = "Informe Laboratorio de Diseño.xls" # Creamos la ruta con el nombre del archivo donde se guardara root = "/tmp/" + str(name) # Guardamos la hoja de calculo en la ruta antes creada workbook.save(root) sprint_file = base64.b64encode(open("/tmp/%s" % (name), 'rb').read()) # Creamos el Archivo adjunto al sprint data_attach = { 'name': name, 'datas': sprint_file, 'datas_fname': name, 'description': 'Informe Mensual Laboratorio', 'res_model': 'reports.laboratory', 'res_id': ids[0], } self.pool.get('ir.attachment').create(cr, uid, data_attach, context=context) # Se guarda el archivo para poder descargarlo self.write(cr, uid, ids, { 'xls_file': sprint_file, 'xls_file_name': name }) return True #~ Función que llena la hoja con los datos correspondientes para el informe mensual def create_principal_sheet(self, cr, uid, sheet, data, context={}): horas = 0 horas_con = 0 asistentes = 0 company_ids = [] ban = False #ESTILOS styleT = xlwt.easyxf(( 'font: height 260, bold 1, color black; alignment: horizontal center;' )) styleTa = xlwt.easyxf( ('font: height 200, color black; alignment: horizontal center;')) styleTT = xlwt.easyxf(( 'font: height 220, bold 1, color black; alignment: horizontal center;' )) styleGA = xlwt.easyxf(( 'font: height 220, bold 1, color white; alignment: horizontal center; pattern: pattern solid, fore_colour blue_gray;' )) styleG = xlwt.easyxf(( 'font: height 220, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour yellow;' )) styleV = xlwt.easyxf(( 'font: height 220, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour green;' )) style_n = xlwt.easyxf( ('font: height 160, color black; alignment: horizontal left')) style_B = xlwt.easyxf(( 'font: height 190, bold 1, color black; alignment: horizontal left' )) #CABECERA #~ locale.setlocale(locale.LC_ALL, "es_ES") sheet.write_merge( 0, 0, 0, 11, ("INSTITUTO HIDALGUENSE DE COMPETITIVIDAD EMPRESARIAL"), styleT) sheet.write_merge(2, 2, 0, 11, (time.strftime( '%d de %B del %Y', time.strptime(data.date, '%Y-%m-%d'))), styleT) sheet.write_merge( 4, 4, 0, 11, ("Reporte correspondiente del " + time.strftime( '%d-%m-%Y', time.strptime(data.date_ini, '%Y-%m-%d')) + " al " + time.strftime('%d-%m-%Y', time.strptime(data.date_fin, '%Y-%m-%d'))), styleT) #~ LABORATORIO sheet.write_merge(6, 6, 2, 9, ("Laboratorio de Diseño y desarrollo de producto"), styleGA) sheet.write_merge(7, 7, 2, 9, "Asesorías empresariales", styleG) advices_ids = self.pool.get('advices.laboratory').search( cr, uid, [('date', '>=', data.date_ini), ('date', '<=', data.date_fin)], context=None) app = 0 nami = 0 cor = 0 logo = 0 eti = 0 enva = 0 foto = 0 emba = 0 empa = 0 ven = 0 tresd = 0 pro = 0 cata = 0 hombres = 0 mujeres = 0 for line in self.pool.get('advices.laboratory').browse(cr, uid, advices_ids, context=None): ro = self.pool.get('companies.ihce').browse( cr, uid, line.company_id.id, context) if ro.sexo == 'M': hombres = hombres + 1 if ro.sexo == 'F': mujeres = mujeres + 1 if line.option == '1': app += 1 elif line.service.id == 17: nami += 1 elif line.service.id == 1: cor += 1 elif line.service.id == 2: logo += 1 elif line.service.id == 12: logo += 1 elif line.service.id == 5: eti += 1 elif line.service.id == 15: eti += 1 elif line.service.id == 11: enva += 1 elif line.service.id == 9: foto += 1 elif line.service.id == 6: emba += 1 elif line.service.id == 16: empa += 1 elif line.service.id == 4: ven += 1 elif line.service.id == 7: tresd += 1 elif line.service.id == 10: cata += 1 else: if line.service.id == 3: pro += 1 sheet.write_merge(8, 8, 2, 8, "Naming", style_n) sheet.write(8, 9, nami, style_n) sheet.write_merge(9, 9, 2, 8, "Imágen Corporativa", style_n) sheet.write(9, 9, cor, style_n) sheet.write_merge(10, 10, 2, 8, "Diseño y rediseño de logotipo", style_n) sheet.write(10, 9, logo, style_n) sheet.write_merge(11, 11, 2, 8, "Diseño y rediseño de etiqueta", style_n) sheet.write(11, 9, eti, style_n) sheet.write_merge(12, 12, 2, 8, "Diseño de envase", style_n) sheet.write(12, 9, enva, style_n) sheet.write_merge(13, 13, 2, 8, "Fotografía del producto", style_n) sheet.write(13, 9, foto, style_n) sheet.write_merge(14, 14, 2, 8, "Embalaje", style_n) sheet.write(14, 9, emba, style_n) sheet.write_merge(15, 15, 2, 8, "Empaque", style_n) sheet.write(15, 9, empa, style_n) sheet.write_merge(16, 16, 2, 8, "Punto de venta", style_n) sheet.write(16, 9, ven, style_n) sheet.write_merge(17, 17, 2, 8, "Diseño 3D", style_n) sheet.write(17, 9, tresd, style_n) sheet.write_merge(18, 18, 2, 8, "Prototipado rápido", style_n) sheet.write(18, 9, pro, style_n) sheet.write_merge(19, 19, 2, 8, "Diseño de Folletos y Catálogos", style_n) sheet.write(19, 9, cata, style_n) sheet.write_merge(20, 20, 2, 8, "Aplicaciones", style_n) sheet.write(20, 9, app, style_n) sheet.write_merge(21, 21, 2, 8, "Total", style_B) sheet.write(21, 9, len(advices_ids), style_B) sheet.write_merge( 23, 23, 6, 9, "H: " + str(hombres) + " M: " + str(mujeres), style_B) sheet.write_merge(25, 25, 2, 9, ("Laboratorio de Diseño y desarrollo de producto"), styleGA) sheet.write_merge(26, 26, 2, 9, "Servicios empresariales", styleG) services_ids = self.pool.get('desing.laboratory').search( cr, uid, [('date_fin', '>=', data.date_ini), ('date_fin', '<=', data.date_fin)], context=None) app = 0 cor = 0 logo = 0 eti = 0 enva = 0 foto = 0 emba = 0 empa = 0 ven = 0 tresd = 0 pro = 0 cata = 0 total = 0 hombres2 = 0 mujeres2 = 0 my_list = [] for row in self.pool.get('desing.laboratory').browse(cr, uid, services_ids, context=None): if row.state == 'done' or row.state == 'pre_done': if row.company_id.id not in my_list: my_list.append(row.company_id.id) ro2 = self.pool.get('companies.ihce').browse( cr, uid, row.company_id.id, context) if ro2.sexo == 'M': hombres2 = hombres2 + 1 if ro2.sexo == 'F': mujeres2 = mujeres2 + 1 if row.app: app += 1 elif row.service.id == 1: cor += 1 elif row.service.id == 2: logo += 1 elif row.service.id == 12: logo += 1 elif row.service.id == 5: eti += 1 elif row.service.id == 15: eti += 1 elif row.service.id == 11: enva += 1 elif row.service.id == 9: foto += 1 elif row.service.id == 6: emba += 1 elif row.service.id == 16: empa += 1 elif row.service.id == 4: ven += 1 elif row.service.id == 7: tresd += 1 elif row.service.id == 10: cata += 1 else: if row.service.id == 3: pro += 1 total = total + 1 sheet.write_merge(27, 27, 2, 8, "Imagen Corporativa", style_n) sheet.write(27, 9, cor, style_n) sheet.write_merge(28, 28, 2, 8, "Diseño y rediseño de logotipo", style_n) sheet.write(28, 9, logo, style_n) sheet.write_merge(29, 29, 2, 8, "Diseño y rediseño de etiqueta", style_n) sheet.write(29, 9, eti, style_n) sheet.write_merge(30, 30, 2, 8, "Diseño de Envase", style_n) sheet.write(30, 9, enva, style_n) sheet.write_merge(31, 31, 2, 8, "Fotografía de producto", style_n) sheet.write(31, 9, foto, style_n) sheet.write_merge(32, 32, 2, 8, "Embalaje", style_n) sheet.write(32, 9, emba, style_n) sheet.write_merge(33, 33, 2, 8, "Empaque", style_n) sheet.write(33, 9, empa, style_n) sheet.write_merge(34, 34, 2, 8, "Punto de Venta", style_n) sheet.write(34, 9, ven, style_n) sheet.write_merge(35, 35, 2, 8, "Diseño 3D", style_n) sheet.write(35, 9, tresd, style_n) sheet.write_merge(36, 36, 2, 8, "Prototipado rápido", style_n) sheet.write(36, 9, pro, style_n) sheet.write_merge(37, 37, 2, 8, "Diseño de Folletos y Catálogosjkjk", style_n) sheet.write(37, 9, cata, style_n) sheet.write_merge(38, 38, 2, 8, "Aplicaciones opo", style_n) sheet.write(38, 9, app, style_n) sheet.write_merge(39, 39, 2, 8, "Total", style_B) sheet.write(39, 9, total, style_B) sheet.write_merge( 40, 40, 6, 9, "H: " + str(hombres2) + " M: " + str(mujeres2), style_B) #~ A prtir de aqui se agregan las notas del crm que son marcadas como importantes y las fotografias que se hayan adjuntado al proyecto de crm actividades = self.pool.get('crm.project.ihce').search( cr, uid, [('date', '>=', data.date_ini), ('date', '<=', data.date_fin), ('priority', '=', '1'), ('area', '=', 10), ('state', '=', 'd-done')], context=None) if actividades: sheet.write_merge(42, 42, 1, 10, "ACTIVIDADES RELEVANTES", styleTT) con = 1 col = 43 style_na = xlwt.easyxf( ('font: height 175, color black; alignment: horizontal left')) for row in self.pool.get('crm.project.ihce').browse(cr, uid, actividades, context=None): if not row.notes: sheet.write_merge( col, (col + 2), 1, 10, str(con) + ".- " + row.name.encode('utf-8') + " " + str(row.date), style_na) else: sheet.write_merge( col, (col + 2), 1, 10, str(con) + ".- " + row.name.encode('utf-8') + " " + str(row.date) + " " + row.notes.encode('utf-8'), style_na) col = col + 4 j = 1 fotos_ids = self.pool.get('ir.attachment').search( cr, uid, [('res_model', '=', 'crm.project.ihce'), ('res_id', '=', row.id)], context=None) ro = 1 for line in self.pool.get('ir.attachment').browse( cr, uid, fotos_ids): name = line.name.split('.') #~ Convertimos y guardamos la imagen en formato bmp try: img = Image.open( "/var/www/img/%s" % (line.name)).convert("RGB").save("/var/www/img/" + str(name[0]) + ".bmp") #~ Se muestra la imagen if j == 3: ro = 1 j = 1 col = col + 16 sheet.insert_bitmap( "/var/www/img/" + str(name[0]) + ".bmp", col, ro) ro = ro + 4 j = j + 1 except: print "" col = col + 16 con = con + 1 return sheet #~ Función para obtener el nombre del mes def meses(self, cr, uid, val, context=None): mes = "" if val == '01': mes = "ENERO" elif val == '02': mes = "FEBRERO" elif val == '03': mes = "MARZO" elif val == '04': mes = "ABRIL" elif val == '05': mes = "MAYO" elif val == '06': mes = "JUNIO" elif val == '07': mes = "JULIO" elif val == '08': mes = "AGOSTO" elif val == '09': mes = "SEPTIEMBRE" elif val == '10': mes = "OCTUBRE" elif val == '11': mes = "NOVIEMBRE" elif val == '12': mes = "DICIEMBRE" return mes
class res_partner(osv.osv, format_address): _description = 'Partner' _name = "res.partner" def _address_display(self, cr, uid, ids, name, args, context=None): res = {} for partner in self.browse(cr, uid, ids, context=context): res[partner.id] = self._display_address(cr, uid, partner, context=context) return res def _get_image(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = tools.image_get_resized_images(obj.image) return result def _get_tz_offset(self, cr, uid, ids, name, args, context=None): result = dict.fromkeys(ids, False) for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = datetime.datetime.now(pytz.timezone(obj.tz or 'GMT')).strftime('%z') return result def _set_image(self, cr, uid, id, name, value, args, context=None): return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context) def _has_image(self, cr, uid, ids, name, args, context=None): result = {} for obj in self.browse(cr, uid, ids, context=context): result[obj.id] = obj.image != False return result def _commercial_partner_compute(self, cr, uid, ids, name, args, context=None): """ Returns the partner that is considered the commercial entity of this partner. The commercial entity holds the master data for all commercial fields (see :py:meth:`~_commercial_fields`) """ result = dict.fromkeys(ids, False) for partner in self.browse(cr, uid, ids, context=context): current_partner = partner while not current_partner.is_company and current_partner.parent_id: current_partner = current_partner.parent_id result[partner.id] = current_partner.id return result def _display_name_compute(self, cr, uid, ids, name, args, context=None): context = dict(context or {}) context.pop('show_address', None) context.pop('show_address_only', None) context.pop('show_email', None) return dict(self.name_get(cr, uid, ids, context=context)) # indirections to avoid passing a copy of the overridable method when declaring the function field _commercial_partner_id = lambda self, *args, **kwargs: self._commercial_partner_compute(*args, **kwargs) _display_name = lambda self, *args, **kwargs: self._display_name_compute(*args, **kwargs) _commercial_partner_store_triggers = { 'res.partner': (lambda self,cr,uid,ids,context=None: self.search(cr, uid, [('id','child_of',ids)], context=dict(active_test=False)), ['parent_id', 'is_company'], 10) } _display_name_store_triggers = { 'res.partner': (lambda self,cr,uid,ids,context=None: self.search(cr, uid, [('id','child_of',ids)], context=dict(active_test=False)), ['parent_id', 'is_company', 'name'], 10) } _order = "display_name" _columns = { 'name': fields.char('Name', size=128, required=True, select=True), 'display_name': fields.function(_display_name, type='char', string='Name', store=_display_name_store_triggers, select=True), 'date': fields.date('Date', select=1), 'title': fields.many2one('res.partner.title', 'Title'), 'parent_id': fields.many2one('res.partner', 'Related Company', select=True), 'child_ids': fields.one2many('res.partner', 'parent_id', 'Contacts', domain=[('active','=',True)]), # force "active_test" domain to bypass _search() override 'ref': fields.char('Contact Reference', size=64, select=1), 'lang': fields.selection(_lang_get, 'Language', help="If the selected language is loaded in the system, all documents related to this contact will be printed in this language. If not, it will be English."), 'tz': fields.selection(_tz_get, 'Timezone', size=64, help="The partner's timezone, used to output proper date and time values inside printed reports. " "It is important to set a value for this field. You should use the same timezone " "that is otherwise used to pick and render date and time values: your computer's timezone."), 'tz_offset': fields.function(_get_tz_offset, type='char', size=5, string='Timezone offset', invisible=True), 'user_id': fields.many2one('res.users', 'Salesperson', help='The internal user that is in charge of communicating with this contact if any.'), 'vat': fields.char('TIN', size=32, help="Tax Identification Number. Check the box if this contact is subjected to taxes. Used by the some of the legal statements."), 'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'), 'website': fields.char('Website', size=64, help="Website of Partner or Company"), 'comment': fields.text('Notes'), 'category_id': fields.many2many('res.partner.category', id1='partner_id', id2='category_id', string='Tags'), 'credit_limit': fields.float(string='Credit Limit'), 'ean13': fields.char('EAN13', size=13), 'active': fields.boolean('Active'), 'customer': fields.boolean('Customer', help="Check this box if this contact is a customer."), 'supplier': fields.boolean('Supplier', help="Check this box if this contact is a supplier. If it's not checked, purchase people will not see it when encoding a purchase order."), 'employee': fields.boolean('Employee', help="Check this box if this contact is an Employee."), 'function': fields.char('Job Position', size=128), 'type': fields.selection([('default', 'Default'), ('invoice', 'Invoice'), ('delivery', 'Shipping'), ('contact', 'Contact'), ('other', 'Other')], 'Address Type', help="Used to select automatically the right address according to the context in sales and purchases documents."), 'street': fields.char('Street', size=128), 'street2': fields.char('Street2', size=128), 'zip': fields.char('Zip', change_default=True, size=24), 'city': fields.char('City', size=128), 'state_id': fields.many2one("res.country.state", 'State', ondelete='restrict'), 'country_id': fields.many2one('res.country', 'Country', ondelete='restrict'), 'email': fields.char('Email', size=240), 'phone': fields.char('Phone', size=64), 'fax': fields.char('Fax', size=64), 'mobile': fields.char('Mobile', size=64), 'birthdate': fields.char('Birthdate', size=64), 'is_company': fields.boolean('Is a Company', help="Check if the contact is a company, otherwise it is a person"), 'use_parent_address': fields.boolean('Use Company Address', help="Select this if you want to set company's address information for this contact"), # image: all image fields are base64 encoded and PIL-supported 'image': fields.binary("Image", help="This field holds the image used as avatar for this contact, limited to 1024x1024px"), 'image_medium': fields.function(_get_image, fnct_inv=_set_image, string="Medium-sized image", type="binary", multi="_get_image", store={ 'res.partner': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Medium-sized image of this contact. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views."), 'image_small': fields.function(_get_image, fnct_inv=_set_image, string="Small-sized image", type="binary", multi="_get_image", store={ 'res.partner': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10), }, help="Small-sized image of this contact. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required."), 'has_image': fields.function(_has_image, type="boolean"), 'company_id': fields.many2one('res.company', 'Company', select=1), 'color': fields.integer('Color Index'), 'user_ids': fields.one2many('res.users', 'partner_id', 'Users'), 'contact_address': fields.function(_address_display, type='char', string='Complete Address'), # technical field used for managing commercial fields 'commercial_partner_id': fields.function(_commercial_partner_id, type='many2one', relation='res.partner', string='Commercial Entity', store=_commercial_partner_store_triggers) } def _default_category(self, cr, uid, context=None): if context is None: context = {} if context.get('category_id'): return [context['category_id']] return False def _get_default_image(self, cr, uid, is_company, context=None, colorize=False): img_path = openerp.modules.get_module_resource('base', 'static/src/img', ('company_image.png' if is_company else 'avatar.png')) with open(img_path, 'rb') as f: image = f.read() # colorize user avatars if not is_company: image = tools.image_colorize(image) return tools.image_resize_image_big(image.encode('base64')) def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): if (not view_id) and (view_type=='form') and context and context.get('force_email', False): view_id = self.pool['ir.model.data'].get_object_reference(cr, user, 'base', 'view_partner_simple_form')[1] res = super(res_partner,self).fields_view_get(cr, user, view_id, view_type, context, toolbar=toolbar, submenu=submenu) if view_type == 'form': res['arch'] = self.fields_view_get_address(cr, user, res['arch'], context=context) return res _defaults = { 'active': True, 'lang': lambda self, cr, uid, ctx: ctx.get('lang', 'en_US'), 'tz': lambda self, cr, uid, ctx: ctx.get('tz', False), 'customer': True, 'category_id': _default_category, 'company_id': lambda self, cr, uid, ctx: self.pool['res.company']._company_default_get(cr, uid, 'res.partner', context=ctx), 'color': 0, 'is_company': False, 'type': 'contact', # type 'default' is wildcard and thus inappropriate 'use_parent_address': False, 'image': False, } _constraints = [ (osv.osv._check_recursion, 'You cannot create recursive Partner hierarchies.', ['parent_id']), ] def copy(self, cr, uid, id, default=None, context=None): if default is None: default = {} default['user_ids'] = False name = self.read(cr, uid, [id], ['name'], context)[0]['name'] default.update({'name': _('%s (copy)') % name}) return super(res_partner, self).copy(cr, uid, id, default, context) def onchange_type(self, cr, uid, ids, is_company, context=None): value = {} value['title'] = False if is_company: value['use_parent_address'] = False domain = {'title': [('domain', '=', 'partner')]} else: domain = {'title': [('domain', '=', 'contact')]} return {'value': value, 'domain': domain} def onchange_address(self, cr, uid, ids, use_parent_address, parent_id, context=None): def value_or_id(val): """ return val or val.id if val is a browse record """ return val if isinstance(val, (bool, int, long, float, basestring)) else val.id result = {} if parent_id: if ids: partner = self.browse(cr, uid, ids[0], context=context) if partner.parent_id and partner.parent_id.id != parent_id: result['warning'] = {'title': _('Warning'), 'message': _('Changing the company of a contact should only be done if it ' 'was never correctly set. If an existing contact starts working for a new ' 'company then a new contact should be created under that new ' 'company. You can use the "Discard" button to abandon this change.')} if use_parent_address: parent = self.browse(cr, uid, parent_id, context=context) address_fields = self._address_fields(cr, uid, context=context) result['value'] = dict((key, value_or_id(parent[key])) for key in address_fields) else: result['value'] = {'use_parent_address': False} return result def onchange_state(self, cr, uid, ids, state_id, context=None): if state_id: country_id = self.pool['res.country.state'].browse(cr, uid, state_id, context).country_id.id return {'value':{'country_id':country_id}} return {} def _check_ean_key(self, cr, uid, ids, context=None): for partner_o in self.pool['res.partner'].read(cr, uid, ids, ['ean13',]): thisean=partner_o['ean13'] if thisean and thisean!='': if len(thisean)!=13: return False sum=0 for i in range(12): if not (i % 2): sum+=int(thisean[i]) else: sum+=3*int(thisean[i]) if math.ceil(sum/10.0)*10-sum!=int(thisean[12]): return False return True # _constraints = [(_check_ean_key, 'Error: Invalid ean code', ['ean13'])] def _update_fields_values(self, cr, uid, partner, fields, context=None): """ Returns dict of write() values for synchronizing ``fields`` """ values = {} for field in fields: column = self._all_columns[field].column if column._type == 'one2many': raise AssertionError('One2Many fields cannot be synchronized as part of `commercial_fields` or `address fields`') if column._type == 'many2one': values[field] = partner[field].id if partner[field] else False elif column._type == 'many2many': values[field] = [(6,0,[r.id for r in partner[field] or []])] else: values[field] = partner[field] return values def _address_fields(self, cr, uid, context=None): """ Returns the list of address fields that are synced from the parent when the `use_parent_address` flag is set. """ return list(ADDRESS_FIELDS) def update_address(self, cr, uid, ids, vals, context=None): address_fields = self._address_fields(cr, uid, context=context) addr_vals = dict((key, vals[key]) for key in address_fields if key in vals) if addr_vals: return super(res_partner, self).write(cr, uid, ids, addr_vals, context) def _commercial_fields(self, cr, uid, context=None): """ Returns the list of fields that are managed by the commercial entity to which a partner belongs. These fields are meant to be hidden on partners that aren't `commercial entities` themselves, and will be delegated to the parent `commercial entity`. The list is meant to be extended by inheriting classes. """ return ['vat'] def _commercial_sync_from_company(self, cr, uid, partner, context=None): """ Handle sync of commercial fields when a new parent commercial entity is set, as if they were related fields """ if partner.commercial_partner_id != partner: commercial_fields = self._commercial_fields(cr, uid, context=context) sync_vals = self._update_fields_values(cr, uid, partner.commercial_partner_id, commercial_fields, context=context) partner.write(sync_vals) def _commercial_sync_to_children(self, cr, uid, partner, context=None): """ Handle sync of commercial fields to descendants """ commercial_fields = self._commercial_fields(cr, uid, context=context) sync_vals = self._update_fields_values(cr, uid, partner.commercial_partner_id, commercial_fields, context=context) sync_children = [c for c in partner.child_ids if not c.is_company] for child in sync_children: self._commercial_sync_to_children(cr, uid, child, context=context) return self.write(cr, uid, [c.id for c in sync_children], sync_vals, context=context) def _fields_sync(self, cr, uid, partner, update_values, context=None): """ Sync commercial fields and address fields from company and to children after create/update, just as if those were all modeled as fields.related to the parent """ # 1. From UPSTREAM: sync from parent if update_values.get('parent_id') or update_values.get('use_parent_address'): # 1a. Commercial fields: sync if parent changed if update_values.get('parent_id'): self._commercial_sync_from_company(cr, uid, partner, context=context) # 1b. Address fields: sync if parent or use_parent changed *and* both are now set if partner.parent_id and partner.use_parent_address: onchange_vals = self.onchange_address(cr, uid, [partner.id], use_parent_address=partner.use_parent_address, parent_id=partner.parent_id.id, context=context).get('value', {}) partner.update_address(onchange_vals) # 2. To DOWNSTREAM: sync children if partner.child_ids: # 2a. Commercial Fields: sync if commercial entity if partner.commercial_partner_id == partner: commercial_fields = self._commercial_fields(cr, uid, context=context) if any(field in update_values for field in commercial_fields): self._commercial_sync_to_children(cr, uid, partner, context=context) # 2b. Address fields: sync if address changed address_fields = self._address_fields(cr, uid, context=context) if any(field in update_values for field in address_fields): domain_children = [('parent_id', '=', partner.id), ('use_parent_address', '=', True)] update_ids = self.search(cr, uid, domain_children, context=context) self.update_address(cr, uid, update_ids, update_values, context=context) def _handle_first_contact_creation(self, cr, uid, partner, context=None): """ On creation of first contact for a company (or root) that has no address, assume contact address was meant to be company address """ parent = partner.parent_id address_fields = self._address_fields(cr, uid, context=context) if parent and (parent.is_company or not parent.parent_id) and len(parent.child_ids) == 1 and \ any(partner[f] for f in address_fields) and not any(parent[f] for f in address_fields): addr_vals = self._update_fields_values(cr, uid, partner, address_fields, context=context) parent.update_address(addr_vals) if not parent.is_company: parent.write({'is_company': True}) def _clean_website(self, website): (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(website) if not scheme: if not netloc: netloc, path = path, '' website = urlparse.urlunparse(('http', netloc, path, params, query, fragment)) return website def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] #res.partner must only allow to set the company_id of a partner if it #is the same as the company of all users that inherit from this partner #(this is to allow the code from res_users to write to the partner!) or #if setting the company_id to False (this is compatible with any user company) if vals.get('website'): vals['website'] = self._clean_website(vals['website']) if vals.get('company_id'): for partner in self.browse(cr, uid, ids, context=context): if partner.user_ids: user_companies = set([user.company_id.id for user in partner.user_ids]) if len(user_companies) > 1 or vals['company_id'] not in user_companies: raise osv.except_osv(_("Warning"),_("You can not change the company as the partner/user has multiple user linked with different companies.")) result = super(res_partner,self).write(cr, uid, ids, vals, context=context) for partner in self.browse(cr, uid, ids, context=context): self._fields_sync(cr, uid, partner, vals, context) return result def create(self, cr, uid, vals, context=None): if vals.get('website'): vals['website'] = self._clean_website(vals['website']) new_id = super(res_partner, self).create(cr, uid, vals, context=context) partner = self.browse(cr, uid, new_id, context=context) self._fields_sync(cr, uid, partner, vals, context) self._handle_first_contact_creation(cr, uid, partner, context) return new_id def open_commercial_entity(self, cr, uid, ids, context=None): """ Utility method used to add an "Open Company" button in partner views """ partner = self.browse(cr, uid, ids[0], context=context) return {'type': 'ir.actions.act_window', 'res_model': 'res.partner', 'view_mode': 'form', 'res_id': partner.commercial_partner_id.id, 'target': 'new', 'flags': {'form': {'action_buttons': True}}} def open_parent(self, cr, uid, ids, context=None): """ Utility method used to add an "Open Parent" button in partner views """ partner = self.browse(cr, uid, ids[0], context=context) return {'type': 'ir.actions.act_window', 'res_model': 'res.partner', 'view_mode': 'form', 'res_id': partner.parent_id.id, 'target': 'new', 'flags': {'form': {'action_buttons': True}}} def name_get(self, cr, uid, ids, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] res = [] for record in self.browse(cr, uid, ids, context=context): name = record.name if record.parent_id and not record.is_company: name = "%s, %s" % (record.parent_id.name, name) if context.get('show_address_only'): name = self._display_address(cr, uid, record, without_company=True, context=context) if context.get('show_address'): name = name + "\n" + self._display_address(cr, uid, record, without_company=True, context=context) name = name.replace('\n\n','\n') name = name.replace('\n\n','\n') if context.get('show_email') and record.email: name = "%s <%s>" % (name, record.email) res.append((record.id, name)) return res def _parse_partner_name(self, text, context=None): """ Supported syntax: - 'Raoul <*****@*****.**>': will find name and email address - otherwise: default, everything is set as the name """ emails = tools.email_split(text) if emails: email = emails[0] name = text[:text.index(email)].replace('"', '').replace('<', '').strip() else: name, email = text, '' return name, email def name_create(self, cr, uid, name, context=None): """ Override of orm's name_create method for partners. The purpose is to handle some basic formats to create partners using the name_create. If only an email address is received and that the regex cannot find a name, the name will have the email value. If 'force_email' key in context: must find the email address. """ if context is None: context = {} name, email = self._parse_partner_name(name, context=context) if context.get('force_email') and not email: raise osv.except_osv(_('Warning'), _("Couldn't create contact without email address!")) if not name and email: name = email rec_id = self.create(cr, uid, {self._rec_name: name or email, 'email': email or False}, context=context) return self.name_get(cr, uid, [rec_id], context)[0] def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): """ Override search() to always show inactive children when searching via ``child_of`` operator. The ORM will always call search() with a simple domain of the form [('parent_id', 'in', [ids])]. """ # a special ``domain`` is set on the ``child_ids`` o2m to bypass this logic, as it uses similar domain expressions if len(args) == 1 and len(args[0]) == 3 and args[0][:2] == ('parent_id','in') \ and args[0][2] != [False]: context = dict(context or {}, active_test=False) return super(res_partner, self)._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count, access_rights_uid=access_rights_uid) def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): if not args: args = [] if name and operator in ('=', 'ilike', '=ilike', 'like', '=like'): self.check_access_rights(cr, uid, 'read') where_query = self._where_calc(cr, uid, args, context=context) self._apply_ir_rules(cr, uid, where_query, 'read', context=context) from_clause, where_clause, where_clause_params = where_query.get_sql() where_str = where_clause and (" WHERE %s AND " % where_clause) or ' WHERE ' # search on the name of the contacts and of its company search_name = name if operator in ('ilike', 'like'): search_name = '%%%s%%' % name if operator in ('=ilike', '=like'): operator = operator[1:] unaccent = get_unaccent_wrapper(cr) query = """SELECT id FROM res_partner {where} ({email} {operator} {percent} OR {display_name} {operator} {percent}) ORDER BY {display_name} """.format(where=where_str, operator=operator, email=unaccent('email'), display_name=unaccent('display_name'), percent=unaccent('%s')) where_clause_params += [search_name, search_name] if limit: query += ' limit %s' where_clause_params.append(limit) cr.execute(query, where_clause_params) ids = map(lambda x: x[0], cr.fetchall()) if ids: return self.name_get(cr, uid, ids, context) else: return [] return super(res_partner,self).name_search(cr, uid, name, args, operator=operator, context=context, limit=limit) def find_or_create(self, cr, uid, email, context=None): """ Find a partner with the given ``email`` or use :py:method:`~.name_create` to create one :param str email: email-like string, which should contain at least one email, e.g. ``"Raoul Grosbedon <*****@*****.**>"``""" assert email, 'an email is required for find_or_create to work' emails = tools.email_split(email) if emails: email = emails[0] ids = self.search(cr, uid, [('email','ilike',email)], context=context) if not ids: return self.name_create(cr, uid, email, context=context)[0] return ids[0] def _email_send(self, cr, uid, ids, email_from, subject, body, on_error=None): partners = self.browse(cr, uid, ids) for partner in partners: if partner.email: tools.email_send(email_from, [partner.email], subject, body, on_error) return True def email_send(self, cr, uid, ids, email_from, subject, body, on_error=''): while len(ids): self.pool['ir.cron'].create(cr, uid, { 'name': 'Send Partner Emails', 'user_id': uid, 'model': 'res.partner', 'function': '_email_send', 'args': repr([ids[:16], email_from, subject, body, on_error]) }) ids = ids[16:] return True def address_get(self, cr, uid, ids, adr_pref=None, context=None): """ Find contacts/addresses of the right type(s) by doing a depth-first-search through descendants within company boundaries (stop at entities flagged ``is_company``) then continuing the search at the ancestors that are within the same company boundaries. Defaults to partners of type ``'default'`` when the exact type is not found, or to the provided partner itself if no type ``'default'`` is found either. """ adr_pref = set(adr_pref or []) if 'default' not in adr_pref: adr_pref.add('default') result = {} visited = set() for partner in self.browse(cr, uid, filter(None, ids), context=context): current_partner = partner while current_partner: to_scan = [current_partner] # Scan descendants, DFS while to_scan: record = to_scan.pop(0) visited.add(record) if record.type in adr_pref and not result.get(record.type): result[record.type] = record.id if len(result) == len(adr_pref): return result to_scan = [c for c in record.child_ids if c not in visited if not c.is_company] + to_scan # Continue scanning at ancestor if current_partner is not a commercial entity if current_partner.is_company or not current_partner.parent_id: break current_partner = current_partner.parent_id # default to type 'default' or the partner itself default = result.get('default', partner.id) for adr_type in adr_pref: result[adr_type] = result.get(adr_type) or default return result def view_header_get(self, cr, uid, view_id, view_type, context): res = super(res_partner, self).view_header_get(cr, uid, view_id, view_type, context) if res: return res if not context.get('category_id', False): return False return _('Partners: ')+self.pool['res.partner.category'].browse(cr, uid, context['category_id'], context).name def main_partner(self, cr, uid): ''' Return the id of the main partner ''' model_data = self.pool['ir.model.data'] return model_data.browse(cr, uid, model_data.search(cr, uid, [('module','=','base'), ('name','=','main_partner')])[0], ).res_id def _display_address(self, cr, uid, address, without_company=False, context=None): ''' The purpose of this function is to build and return an address formatted accordingly to the standards of the country where it belongs. :param address: browse record of the res.partner to format :returns: the address formatted in a display that fit its country habits (or the default ones if not country is specified) :rtype: string ''' # get the information that will be injected into the display format # get the address format address_format = address.country_id and address.country_id.address_format or \ "%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s" args = { 'state_code': address.state_id and address.state_id.code or '', 'state_name': address.state_id and address.state_id.name or '', 'country_code': address.country_id and address.country_id.code or '', 'country_name': address.country_id and address.country_id.name or '', 'company_name': address.parent_id and address.parent_id.name or '', } for field in self._address_fields(cr, uid, context=context): args[field] = getattr(address, field) or '' if without_company: args['company_name'] = '' elif address.parent_id: address_format = '%(company_name)s\n' + address_format return address_format % args
class reporte_consultorias(osv.osv_memory): _name = "reporte.consultorias" _inherit = ['mail.thread', 'ir.needaction_mixin'] _columns = { 'name': fields.text('Instrucciones'), 'type': fields.selection([('completo', 'Completo'), ('rango', 'Por fecha')], 'Tipo de reporte'), 'date': fields.date('Fecha de reporte'), 'date_ini': fields.date('Fecha Inicio'), 'date_fin': fields.date('Fecha Final'), 'xls_file_name': fields.char('xls file name', size=128), 'xls_file': fields.binary('Archivo', readonly=True), 'user_id': fields.many2one('res.users', "Responsable"), } _defaults = { 'name': "Se creara un archivo .xls con el reporte seleccionado.", 'date': lambda *a: time.strftime('%Y-%m-%d'), 'user_id': lambda obj, cr, uid, context: uid, } #~ Función que crea la hoja de calculo para el reportes def action_create_report(self, cr, uid, ids, context=None): # Creamos la hoja de calculo workbook = xlwt.Workbook(encoding='utf-8') sheet_principal = workbook.add_sheet('Consultorias', cell_overwrite_ok=True) # Creamos la Hoja principal self.create_principal_sheet(cr, uid, ids, sheet_principal, context) # Creamos el nombre del archivo name = "Consultorias.xls" # Creamos la ruta con el nombre del archivo donde se guardara root = "/tmp/" + str(name) # Guardamos la hoja de calculo en la ruta antes creada workbook.save(root) sprint_file = base64.b64encode(open("/tmp/%s" % (name), 'rb').read()) # Creamos el Archivo adjunto al sprint data_attach = { 'name': name, 'datas': sprint_file, 'datas_fname': name, 'description': 'Reporte Consultorias', 'res_model': 'reporte.consultorias', 'res_id': ids[0], } self.pool.get('ir.attachment').create(cr, uid, data_attach, context=context) # Se guarda el archivo para poder descargarlo self.write(cr, uid, ids, { 'xls_file': sprint_file, 'xls_file_name': name }) return True #~ Función que llena la hoja con los datos correspondientes del reporte def create_principal_sheet(self, cr, uid, ids, sheet, context={}): data = self.browse(cr, uid, ids[0], context=context) # Damos tamaños a las columnas sheet.col(0).width = 1500 sheet.col(1).width = 1500 sheet.col(2).width = 10000 sheet.col(3).width = 10000 sheet.col(4).width = 2000 sheet.col(5).width = 4000 sheet.col(6).width = 4000 sheet.col(7).width = 7000 sheet.col(8).width = 3000 sheet.col(9).width = 7000 sheet.col(10).width = 3500 #ESTILOS styleT = xlwt.easyxf(( 'font: height 260, bold 1, color black; alignment: horizontal center;' )) styleG = xlwt.easyxf(( 'font: height 200, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour yellow;' )) style = xlwt.easyxf(( 'font: height 180, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour gray25;' )) style2 = xlwt.easyxf(( 'font: height 180, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour coral;' )) style3 = xlwt.easyxf(( 'font: height 180, bold 1, color black; alignment: horizontal center; pattern: pattern solid, fore_colour green;' )) style_n = xlwt.easyxf( ('font: height 160, color black; alignment: horizontal center')) #CABECERA sheet.write_merge( 0, 0, 0, 10, ("SECRETARÍA DE DESARROLLO ECONÓMICO DEL ESTADO DE HIDALGO"), styleT) sheet.write_merge( 1, 1, 0, 10, ("INSTITUTO HIDALGUENSE DE COMPETITIVIDAD EMPRESARIAL"), styleT) sheet.write_merge(2, 2, 0, 10, ("DIRECCIÓN DE ACOMPAÑAMIENTO EMPRESARIAL"), styleT) sheet.write_merge(3, 3, 0, 10, ("Formación de Capital Humano"), styleT) sheet.write_merge(4, 4, 0, 10, ("Consultorías"), styleT) #TITULOS sheet.write(6, 0, 'CONTROL', style) sheet.write(6, 1, 'No.', style) sheet.write(6, 2, 'NOMBRE DE LA EMPRESA', style) sheet.write(6, 3, 'NOMBRE DEL PARTICIPANTE', style) sheet.write(6, 4, 'SEXO', style) sheet.write(6, 5, 'MUNICIPIO', style) sheet.write(6, 6, 'SECTOR', style) sheet.write(6, 7, 'DESCRIPCION DEL SERVICIO DE CONSULTORIA ESPECIALIZADA', style) sheet.write(6, 8, 'HORAS', style) sheet.write(6, 9, ' NOMBRE DEL CONSULTOR', style) sheet.write(6, 10, 'MES', style) i = 7 a = 1 b = 1 meses = [ '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12' ] if data.type == 'completo': courses_ids = self.pool.get('date.courses').search( cr, uid, [('state', '=', 'done'), ('dependence', '=', 'ihce'), ('type', '=', 'consultoria')], order='date ASC') else: courses_ids = self.pool.get('date.courses').search( cr, uid, [('state', '=', 'done'), ('date', '>=', data.date_ini), ('date', '<=', data.date_fin), ('dependence', '=', 'ihce'), ('type', '=', 'consultoria')], order='date ASC') for row in self.pool.get('date.courses').browse( cr, uid, courses_ids, context): mes1 = self.month(cr, uid, row.date[5:7], context) if i == 5: sheet.write_merge(4, 4, 0, 10, (mes1), styleG) else: if mes != mes1: sheet.write_merge(i, i, 0, 10, (mes1), styleG) i += 1 b = 1 for line in self.pool.get('company.line').search( cr, uid, [('course_id', '=', row.id)]): li = self.pool.get('company.line').browse(cr, uid, line) ro = self.pool.get('companies.ihce').browse( cr, uid, li.contact_id.id, context) sheet.write(i, 0, a, style_n) sheet.write(i, 1, b, style_n) sheet.write(i, 2, (ro.parent_id.name.encode('utf-8')) or '', style_n) sheet.write(i, 3, ro.name.encode('utf-8') or '', style_n) sheet.write(i, 4, ro.sexo or '', style_n) sheet.write(i, 5, ro.town.name or '', style_n) sheet.write(i, 6, ro.parent_id.sector.name or '', style_n) sheet.write(i, 7, row.name.encode('utf-8') or '', style_n) sheet.write(i, 8, row.hours_training or '', style_n) sheet.write(i, 9, row.supplier_id.name.encode('utf-8') or '', style_n) sheet.write(i, 10, mes1 + '-' + str(row.date[0:4]) or '', style_n) b = b + 1 a = a + 1 i = i + 1 for line in self.pool.get('list.new.persons').search( cr, uid, [('course_id', '=', row.id)]): li = self.pool.get('list.new.persons').browse(cr, uid, line) sheet.write(i, 0, a, style_n) sheet.write(i, 1, b, style_n) sheet.write(i, 2, li.name.encode('utf-8') or '', style_n) sheet.write(i, 3, '', style_n) sheet.write(i, 4, li.sexo or '', style_n) sheet.write(i, 5, li.town.name or '', style_n) sheet.write(i, 6, '', style_n) sheet.write(i, 7, row.name.encode('utf-8') or '', style_n) sheet.write(i, 8, row.hours_training or '', style_n) sheet.write(i, 9, row.supplier_id.name.encode('utf-8') or '', style_n) sheet.write(i, 10, mes1 + '-' + str(row.date[0:4]) or '', style_n) b = b + 1 a = a + 1 i = i + 1 mes = mes1 return sheet #~ Función para obtener el nombre del mes def month(self, cr, uid, val, context=None): mes = "" if val == '01': mes = "ENERO" elif val == '02': mes = "FEBRERO" elif val == '03': mes = "MARZO" elif val == '04': mes = "ABRIL" elif val == '05': mes = "MAYO" elif val == '06': mes = "JUNIO" elif val == '07': mes = "JULIO" elif val == '08': mes = "AGOSTO" elif val == '09': mes = "SEPTIEMBRE" elif val == '10': mes = "OCTUBRE" elif val == '11': mes = "NOVIEMBRE" elif val == '12': mes = "DICIEMBRE" return mes
class manuscrito(orm.Model): _name = 'res.manuscrito' _description = 'Manuscrito' _columns = { 'autor': fields.many2one('res.partner', 'Autor', track_visibility='onchange',required=True, select=True),domain="[('author','=',True)]"), 'partner_id': fields.many2one('res.partner', 'Partner', ondelete='set null', track_visibility='onchange', select=True, help="Linked partner (optional). Usually created when converting the lead."), 'titulo': fields.char('Título', size=50, required=True), 'isbn':fields.char('ISBN', size=30, required=True), 'formato':fields.char('Formato', size=30), 'genero':fields.selection([('ciencia-ficcion','Ciencia-Ficcion'),('novela','Novela'),('poesia','Poesía'),('cuento','Cuento'),('historia','Historia'),('miedo','Miedo'),('otro','Otros')],'Género', required=True), 'email':fields.char('E-MAIL',size=20), 'comment': fields.text('Descripción'), 'image': fields.binary("Image", help="Select image here"), 'date': fields.date('Date', select=1), 'idioma':fields.selection([('cas','Castellano'),('en','Inglés'),('fr','Francés')],'Idioma'), 'state': fields.selection([('recibo', 'Acuse recibo'),('eval', 'Evaluación'),('confirmacion','Pendiente confirmación'),('cancelled', 'Cancelado'),('firma', 'Firma Contrato'),('corregir', 'Corrección'),('reenvio', 'Visto bueno autor'),('envio imprenta', 'Enviado a imprenta'), ('done', 'Hecho')]), } def set_recibo(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'eval'}, context=context) def set_evaluar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'confirmacion'}, context=context) def set_aceptar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'firma'}, context=context) def set_firmar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'corregir'}, context=context) def set_corregir(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'reenvio'}, context=context) def set_visto(self, cr, uid, ids, context=None):
class export_csv(orm.TransientModel): _name = 'export.csv' _description = 'Export CSV Wizard' _columns = { 'export_file': fields.binary('Csv', readonly=True), 'export_filename': fields.char('Export CSV Filename', size=128), } def _get_csv_rows(self, cr, uid, context=None): """ Get the rows (header) for the specified model. """ hdr = [ _('Number'), _('Name'), _('Lastname'), _('Firstname'), _('Usual Lastname'), _('Usual Firstname'), _('Co-residency Line 1'), _('Co-residency Line 2'), _('Internal Instance'), _('Power Level'), _('State'), _('Reference'), _('Birth Date'), _('Gender'), _('Tongue'), _('Main Address'), _('Unauthorized Address'), _('Vip Address'), _('Street2'), _('Street'), _('Zip'), _('City'), _('Country Code'), _('Country'), _('Main Phone'), _('Unauthorized Phone'), _('Vip Phone'), _('Phone'), _('Main Mobile'), _('Unauthorized Mobile'), _('Vip Mobile'), _('Mobile'), _('Main Fax'), _('Unauthorized Fax'), _('Vip Fax'), _('Fax'), _('Main Email'), _('Unauthorized Email'), _('Vip Email'), _('Email'), _('Website'), _('Secondary Website'), _('Local voluntary'), _('Regional voluntary'), _('National voluntary'), _('Local only'), ] return [_get_utf8(col) for col in hdr] def _get_order_by(self, order_by): r_order_by = "ORDER BY p.id" if order_by: if order_by == "identifier" or order_by == "technical_name": r_order_by = "ORDER BY p.%s" % order_by else: r_order_by =\ "ORDER BY country_name, final_zip, p.technical_name" return r_order_by @api.cr_uid_context def _get_csv_values(self, cr, uid, obj, obfuscation, context=None): """ Get the values of the specified obj taking into account the VIP obfuscation principle """ export_values = [ obj.get('identifier'), _get_utf8(obj.get('name')), _get_utf8(obj.get('lastname')), _get_utf8(obj.get('firstname')), _get_utf8(obj.get('usual_lastname')), _get_utf8(obj.get('usual_firstname')), _get_utf8(obj.get('printable_name')), _get_utf8(obj.get('co_residency')), _get_utf8(obj.get('instance')), _get_utf8(obj.get('power_name')), _get_utf8(obj.get('state')), _get_utf8(obj.get('reference')), obj.get('birth_date'), _get_utf8(obj.get('gender')), _get_utf8(obj.get('tongue')), obj.get('adr_main'), obj.get('adr_unauthorized'), obj.get('adr_vip'), obj.get('adr_vip') and obfuscation or _get_utf8(obj.get('street2')), obj.get('adr_vip') and obfuscation or _get_utf8(obj.get('street')), obj.get('adr_vip') and obfuscation or obj.get('final_zip'), obj.get('adr_vip') and obfuscation or _get_utf8(obj.get('city')), obj.get('country_code'), _get_utf8(obj.get('country_name')), obj.get('fix_main'), obj.get('fix_unauthorized'), obj.get('fix_vip'), obj.get('fix_vip') and obfuscation or _get_utf8(obj.get('fix')), obj.get('mobile_main'), obj.get('mobile_unauthorized'), obj.get('mobile_vip'), obj.get('mobile_vip') and obfuscation or _get_utf8(obj.get('mobile')), obj.get('fax_main'), obj.get('fax_unauthorized'), obj.get('fax_vip'), obj.get('fax_vip') and obfuscation or _get_utf8(obj.get('fax')), obj.get('email_main'), obj.get('email_unauthorized'), obj.get('email_vip'), obj.get('email_vip') and obfuscation or _get_utf8(obj.get('email')), _get_utf8(obj.get('website')), _get_utf8(obj.get('secondary_website')), _get_utf8(obj.get('local_voluntary')), _get_utf8(obj.get('regional_voluntary')), _get_utf8(obj.get('national_voluntary')), _get_utf8(obj.get('local_only')), ] return export_values def _prefetch_csv_datas(self, cr, uid, model, model_ids, context=None): queries_obj = self.pool['export.csv.queries'] if not model_ids: return if model == 'email.coordinate': query = """ %s WHERE ec.id IN %%s """ % queries_obj.email_coordinate_request(cr, uid) elif model == 'postal.coordinate': query = """ %s WHERE pc.id IN %%s """ % queries_obj.postal_coordinate_request(cr, uid) elif model == 'virtual.target': query = """ %s WHERE vt.id IN %%s """ % queries_obj.virtual_target_request(cr, uid) else: raise orm.except_orm( _('Error'), _('Model %s not supported for csv export!') % model) order_by = self._get_order_by(context.get('sort_by')) query = "%s %s" % (query, order_by) cr.execute(query, (tuple(model_ids), )) for row in cr.dictfetchall(): yield row def get_csv(self, cr, uid, model, model_ids, group_by=False, context=None): """ Build a CSV file related to a coordinate model """ tmp = tempfile.NamedTemporaryFile(prefix='Extract', suffix=".csv", delete=False) f = open(tmp.name, "r+") writer = csv.writer(f) hdr = self._get_csv_rows(cr, uid, context=context) writer.writerow(hdr) co_residencies = [] if model_ids: state_ids = self.pool['membership.state'].search(cr, uid, [], context=context) states = self.pool['membership.state'].browse(cr, uid, state_ids, context=context) states = {st.id: st.name for st in states} country_ids = self.pool['res.country'].search(cr, uid, [], context=context) countries = self.pool['res.country'].browse(cr, uid, country_ids, context=context) countries = {cnt.id: cnt.name for cnt in countries} selections = self.pool['res.partner'].fields_get( cr, uid, allfields=['gender', 'tongue'], context=context) genders = {k: v for k, v in selections['gender']['selection']} tongues = {k: v for k, v in selections['tongue']['selection']} viper = self.pool['res.users'].has_group( cr, uid, 'mozaik_base.mozaik_res_groups_vip_reader') obfuscation = False if viper else 'VIP' for data in self._prefetch_csv_datas(cr, uid, model, model_ids, context=context): if data.get('state_id'): data['state'] = states[data['state_id']] if data.get('country_id'): data['country_name'] = countries[data['country_id']] if data.get('gender'): data['gender'] = genders.get(data['gender']) if data.get('tongue'): data['tongue'] = tongues.get(data['tongue']) if model == 'postal.coordinate' and group_by: # when grouping by co_residency, output only one row # by co_residency co_id = data.get('co_residency_id') if co_id and co_id in co_residencies: continue co_residencies.append(co_id) export_values = self._get_csv_values(cr, uid, data, obfuscation, context=context) assert len(hdr) == len(export_values) writer.writerow(export_values) f.close() f = open(tmp.name, "r") csv_content = f.read() f.close() return csv_content def export(self, cr, uid, ids, context=None): model = context.get('active_model', False) model_ids = context.get('active_ids', False) csv_content = self.get_csv(cr, uid, model, model_ids, context=context) csv_content = base64.encodestring(csv_content) self.write(cr, uid, ids[0], { 'export_file': csv_content, 'export_filename': 'Extract.csv' }, context=context) return { 'name': 'Export Csv', 'type': 'ir.actions.act_window', 'res_model': 'export.csv', 'view_mode': 'form', 'view_type': 'form', 'res_id': ids[0], 'views': [(False, 'form')], 'target': 'new', }
raise osv.except_osv( _('Wrong Certificate file format'), _('Be sure you have BEGIN CERTIFICATE string in' ' your first line.')) else: raise osv.except_osv( _('Unknown error'), _('X509 return this message:\n %s') % e[0]) wz.wsafip_request_id.write({'state': 'confirmed'}) _name = 'l10n_ar_wsafip.loadcert_config' _inherit = 'res.config' _columns = { 'wsafip_request_id': fields.many2one( 'crypto.certificate', 'Certificate Request', required=True), 'wsafip_request_file': fields.binary( 'Download Signed Certificate Request', readonly=True), 'wsafip_request_filename': fields.char( 'Filename', readonly=True), 'wsafip_response_file': fields.binary( 'Upload Certificate', required=True), } _defaults = { 'wsafip_request_filename': 'request.csr', } l10n_ar_wsafip_loadcert_config() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
_description = "Import Inventory" def _default_location(self, cr, uid, ids, context=None): try: loc_model, location_id = self.pool.get( 'ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock') except ValueError, e: return False return location_id or False _columns = { 'location_id': fields.many2one('stock.location', 'Location', required=True), 'import_file': fields.binary('File', filters="*.xls"), #to consider the product current inventory or not, if yes then add the current inventory to the upload excel quantity as the quantity to do physical inventory 'consider_inventory': fields.boolean('Consider Current Inventory', select=True), 'all_done': fields.boolean('All Data Imported', readonly=True, select=True), 'result_line': fields.one2many('stock.import.inventory.result', 'import_id', 'Importing Result', readonly=True), } _defaults = { 'location_id': _default_location, }
import os class stock_import_inventory(osv.osv_memory): _name = "stock.import.inventory" _description = "Import Inventory" def _default_location(self, cr, uid, ids, context=None): try: loc_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock') except ValueError, e: return False return location_id or False _columns = { 'location_id': fields.many2one('stock.location', 'Location', required=True), 'import_file': fields.binary('File', filters="*.xls"), #to consider the product current inventory or not, if yes then add the current inventory to the upload excel quantity as the quantity to do physical inventory 'consider_inventory': fields.boolean('Consider Current Inventory', select=True), 'all_done': fields.boolean('All Data Imported', readonly=True, select=True), 'result_line': fields.one2many('stock.import.inventory.result', 'import_id', 'Importing Result', readonly=True), 'file_template': fields.binary('Template File', readonly=True), 'file_template_name': fields.char('Template File Name'), } def _get_template(self, cr, uid, context): cur_path = os.path.split(os.path.realpath(__file__))[0] path = os.path.join(cur_path,'stock_import_template.xls') data = open(path,'rb').read().encode('base64') # data = os.path.getsize(path) return data
class mmr_pembayaranpembelian(osv.osv): # Pembayaran untuk pembelian yang dilakukan. # Dalam satu pembayaran, dipastikan hanya ke 1 supplier, tetapi dapat dilkaukan untuk beberapa faktur _name = "mmr.pembayaranpembelian" _description = "Modul Pembayaran Pembelian untuk PT. MMR." # Pembayaran akan dicocokkan oleh bag. keuangan. Sekali disetujui akan dikunci pembayarannya. def setuju(self, cr, uid, ids, context): userclass = self.pool.get("res.users") userobj = userclass.browse(cr, uid, uid) if self.browse(cr, uid, ids).tanggalbayar == False: raise osv.except_osv(_('Tidak Dapat Melanjutkan'), _("Tanggal Bayar Masih Kosong")) self.write(cr, uid, ids, {'disetujui': userobj.login}) return {'type': 'ir.actions.client', 'tag': 'reload'} def revisi(self, cr, uid, ids, context): self.write(cr, uid, ids, {'disetujui': False}) return {'type': 'ir.actions.client', 'tag': 'reload'} #Hitung total hutang dari seluruh faktur yang dipilih @api.one @api.depends("pembayaranpembeliandetil", "pembayaranpembeliandetil.hutang", "kelebihan", "kekurangan", "biayatransfer", "biayalain") def _hitung_hutang(self): for semuafaktur in self.pembayaranpembeliandetil: self.hutang += semuafaktur.hutang self.bayar += semuafaktur.bayar self.bayartotal += semuafaktur.bayar self.bayartotal += self.kelebihan self.bayartotal -= self.kekurangan self.bayartotal += self.biayatransfer self.bayartotal += self.biayalain #Isi jurnal dari template @api.one @api.onchange("pembayaranpembeliandetil", "aturanakun", "akunotomatis", "kelebihan", "kekurangan", "biayatransfer", "biayalain") def _isi_akun(self): if self.bayar != False and self.aturanakun != False and self.akunotomatis != False: self.akunterkena = False data = { 'bayar': self.bayar, 'hutang': self.hutang, 'kelebihan': self.kelebihan, 'kekurangan': self.kekurangan, 'biayatransfer': self.biayatransfer, 'biayalain': self.biayalain, 'bayartotal': self.bayartotal } for semuaakundetil in self.aturanakun.aturanakundetil: if semuaakundetil.debitkredit == "debit": if data[semuaakundetil.field.name] != 0: self.akunterkena += self.env['mmr.akundetil'].new({ "idakun": semuaakundetil.noakun.id, "tanggal": self.tanggalbayar, "kredit": 0, "debit": data[semuaakundetil.field.name], "sumberpembayaranpembelian": self.id, "notes": False }) else: if data[semuaakundetil.field.name] != 0: self.akunterkena += self.env['mmr.akundetil'].new({ "idakun": semuaakundetil.noakun.id, "tanggal": self.tanggalbayar, "debit": 0, "kredit": data[semuaakundetil.field.name], "sumberpembayaranpembelian": self.id, "notes": False }) return self # Pembayaran dapat dilakukan dengan beberapa metode, Apabila metode transfer, brt tujuan ke rekening, selain itu ke CP def onchange_metode(self, cr, uid, ids, metode, context=None): hasil = {} if metode != False: if metode == 'transfer': hasil['tujuancp'] = False else: hasil['tujuanrekening'] = False return {'value': hasil} # Status pembayaran ( Belum diterima alias belum disetujui, sudah diterima, dan paling akhir, apabila jurnal tidak balance) def _set_status(self, cr, uid, ids, field_name, field_value, args=None, context=None): res = {} for biaya in self.browse(cr, uid, ids): res[biaya.id] = "Belum Diterima" if biaya.disetujui != False: res[biaya.id] = "Sudah Diterima" total = 0 for semuaakundetil in biaya.akunterkena: total += semuaakundetil.debit total -= semuaakundetil.kredit if round(total, 2) != 0: res[biaya.id] = "Jurnal Tidak Balance" return res _columns = { "trigger": fields.char("Trigger", compute="_isi_akun"), "supplier": fields.many2one("mmr.supplier", "Supplier", required=True), "tanggalbayar": fields.date("Tanggal Bayar", required=True), "metode": fields.selection([('cash', 'Cash'), ('bg', 'BG'), ('cek', 'Cek'), ('transfer', 'Transfer')], "Metode Pembayaran", required=True), "tujuanrekening": fields.many2one("mmr.nomorrekening", "Rekening Tujuan", domain="[('supplier', '=', supplier)]"), "tujuancp": fields.many2one("mmr.cp", "CP Tujuan", domain="[('supplier', '=', supplier)]"), "hutang": fields.float("Total Hutang", compute="_hitung_hutang", digits=(12, 2)), "bayar": fields.float("Total Pembayaran Sebelum Biaya Tambahan", digits=(12, 2), compute="_hitung_hutang", help="Total Pembayaran Sebelum dikenai Biaya Tambahan"), 'bayartotal': fields.float("Total Pembayaran Setelah Biaya Tambahan", digits=(12, 2), compute="_hitung_hutang", help="Total Pembayaran Setelah dikenai Biaya Tambahan"), "pembayaranpembeliandetil": fields.one2many("mmr.pembayaranpembeliandetil", "idpembayaranpembelian", "List Faktur"), 'akunotomatis': fields.boolean( "Otomatisasi Jurnal", help= "Apabila tercentang, jurnal akan diisi otomatis sesuai data yang ada! Sebaliknya, jurnal tidak akan diisi otomatis dan user dapat mengubah jurnal secara manual!" ), "aturanakun": fields.many2one( "mmr.aturanakun", "Aturan Jurnal", domain="[('model', '=', namamodel),('aktif', '=', True)]"), "akunterkena": fields.one2many("mmr.akundetil", "sumberpembayaranpembelian", "Jurnal"), 'kelebihan': fields.float("Kelebihan", digits=(12, 2)), 'kekurangan': fields.float("Kekurangan, digits=(12,2)"), 'biayatransfer': fields.float("Biaya Transfer", digits=(12, 2)), 'biayalain': fields.float("Biaya Lain", digits=(12, 2)), 'diedit': fields.char("Diedit", readonly=True), "disetujui": fields.char("Disetujui", readonly=True), "bukti": fields.binary("Bukti", help="Masukkan Foto Bukti di Sini"), "notes": fields.text("Notes"), 'namamodel': fields.char("NamaModel"), 'status': fields.function(_set_status, type="char", method=True, string="Status"), } def create(self, cr, uid, vals, context=None): id = super(mmr_pembayaranpembelian, self).create(cr, uid, vals, context) objini = self.browse(cr, uid, id) validasipembayaranpembelian(self, cr, uid, id) return id def write(self, cr, uid, id, vals, context=None): res = super(mmr_pembayaranpembelian, self).write(cr, uid, id, vals, context) objini = self.browse(cr, uid, id) hasil = {} # Tandai penyetuju if 'supplier' in vals or 'tanggalbayar' in vals or 'metode' in vals or 'tujuanrekening' in vals or 'tujuancp' in vals or 'pembayaranpembeliandetil' in vals or 'akunotomatis' in vals or 'aturanakun' in vals or 'akunterkena' in vals or 'bukti' in vals or 'notes' in vals: userclass = self.pool.get("res.users") userobj = userclass.browse(cr, uid, uid) hasil['diedit'] = userobj.login res = super(mmr_pembayaranpembelian, self).write(cr, uid, id, hasil, context) validasipembayaranpembelian(self, cr, uid, id) return res # Apabila pembayaran sudah disetujui, jangan dapat dihapus def unlink(self, cr, uid, ids, context): pembayaranpembeliandetilClass = self.pool.get( "mmr.pembayaranpembeliandetil") ids = [ids] for id in ids: pembayaranpembelian = self.browse(cr, uid, id) if pembayaranpembelian.disetujui != False and 'ijindelete' not in context: raise osv.except_osv(_('Tidak Dapat Menghapus'), _("Pembayaran Telah Disetujui!")) for semuapembayaranpembeliandetil in pembayaranpembelian.pembayaranpembeliandetil: pembayaranpembeliandetilClass.unlink( cr, uid, semuapembayaranpembeliandetil.id) super(mmr_pembayaranpembelian, self).unlink(cr, uid, id, context) return True # Pemabayaran jangan dapat dicopy def copy(self, cr, uid, id, default=None, context=None): raise osv.except_osv( _('Tidak Dapat Duplikasi'), _('Dilarang melakukan duplikasi data Pembayaran.')) return True _defaults = { 'namamodel': "mmr.pembayaranpembelian", 'akunotomatis': True, }
{"url": url, "url_big": url_big, "url_medium": url_medium, "url_small": url_small}, context=context, ) else: return False return self.write(cr, uid, id, {"file_db_store": value}, context=context) _columns = { "name": fields.char("Image Title", size=64), "filename": fields.char("Filename", size=64), "extension": fields.char("file extension", oldname="extention"), "link": fields.boolean( "Link?", help="Images can be linked from files on " "your file system or remote (Preferred)" ), "file_db_store": fields.binary("Image stored in database"), "file": fields.function( _get_image, fnct_inv=_set_image, type="binary", string="File", filters="*.png,*.jpg,*.gif" ), "url": fields.char("File Location"), "url_big": fields.char("File Location Image Size Big"), "url_medium": fields.char("File Location Image Size Medium"), "url_small": fields.char("File Location Image Size Small"), "comments": fields.text("Comments"), "product_id": fields.many2one("product.product", "Product"), } _defaults = {"link": True} _sql_constraints = [ (
if statement.journal_id.name == 'Other': other_amount += statement.amount detail_record = detail_record +'12345678'+'|'+str(batch_id)+'|'+str(datetime.strftime(next_date, "%d%m%Y"))+'|'+str(hour)+'|'+str(recipt_count)+'|'+str(gto_sale)+'|'+str(amount_gst)+'|'+str(amount_discount)+'|'+str(service_charge)+'|'+str(number_of_pax)+'|'+str(cash_amount)+'|'+str(nets_amount)+'|'+str(visa_amount)+'|'+str(mastercard_amount)+'|'+str(amax_amount)+'|'+str(voucher_amount)+'|'+str(other_amount)+'|'+'N'+'\r\n' else: detail_record = detail_record +'12345678'+'|'+str(batch_id)+'|'+str(datetime.strftime(next_date, "%d%m%Y"))+'|'+str(hour)+'|'+str(recipt_count)+'|'+str(gto_sale)+'|'+str(amount_gst)+'|'+str(amount_discount)+'|'+str(service_charge)+'|'+str(number_of_pax)+'|'+str(cash_amount)+'|'+str(nets_amount)+'|'+str(visa_amount)+'|'+str(mastercard_amount)+'|'+str(amax_amount)+'|'+str(voucher_amount)+'|'+str(other_amount)+'|'+'N'+'\r\n' time += timedelta(hours=1) # time_prev += timedelta(hours=1) next_date += timedelta(days=1) tmp_file.write(detail_record) finally: if tmp_file: tmp_file.close() file = open(tgz_tmp_filename, "rb") out = file.read() file.close() return base64.b64encode(out) _columns = { 'name': fields.char('Name', size=64), 'sale_report_txt_file': fields.binary('Click On Download Link To Download File', readonly=True), } _defaults = { 'name': 'Sale Report.txt', 'sale_report_txt_file': _generate_sale_report_file, } binary_sale_report_text_file_wizard()