class product_icecat_wizard(osv.osv_memory): _name = 'product.icecat.wizard' _columns = { 'name': fields.boolean('Name'), 'description': fields.boolean('Description'), 'description_sale': fields.boolean('Description Śale'), 'attributes': fields.boolean('Attributes'), 'language_id': fields.many2one('res.lang', 'Language'), 'image': fields.boolean('Image'), 'html': fields.boolean('HTML Code'), 'result': fields.text('Result', readonly=True), 'resimg': fields.text('Image', readonly=True), 'state': fields.selection([ ('first', 'First'), ('done', 'Done'), ], 'State'), } _defaults = { 'state': lambda *a: 'first', 'name': lambda *a: 1, 'description': lambda *a: 1, 'description_sale': lambda *a: 1, 'attributes': lambda *a: 1, 'html': lambda *a: 1, } # ========================================== # save XML file into product_icecat/xml dir # ========================================== def save_file(self, name, value): path = os.path.abspath(os.path.dirname(__file__)) path += '/icecat/%s' % name path = re.sub('wizard/', '', path) f = open(path, 'w') try: f.write(value) finally: f.close() return path # ========================================== # Convert HTML to text # ========================================== def StripTags(self, text): finished = 0 while not finished: finished = 1 start = text.find("<") if start >= 0: stop = text[start:].find(">") if stop >= 0: text = text[:start] + text[start + stop + 1:] finished = 0 return text # ========================================== # Convert icecat values to OpenERP mapline # ========================================== def icecat2oerp(self, cr, uid, form, product, icecat, pathxml, language, data, context): #check if attributes product exists. If exists, raise error. Not update attributes values if form.attributes: attributes_ids = self.pool.get( 'product.manufacturer.attribute').search( cr, uid, [('product_id', '=', product.id)]) if len(attributes_ids) > 0: raise osv.except_osv( _('Error'), _("There are attributes avaible in this product. Delete this attributes or uncheck attributes option" )) if form.language_id.code: language = form.language_id.code doc = libxml2.parseFile(pathxml) for prod in doc.xpathEval('//Product'): if prod.xpathEval('@ErrorMessage'): if prod.xpathEval('@ErrorMessage')[0].content: return prod.xpathEval('@ErrorMessage')[0].content exit # product info short_summary = doc.xpathEval( '//SummaryDescription//ShortSummaryDescription') long_summary = doc.xpathEval( '//SummaryDescription//LongSummaryDescription') short_description = short_summary[0].content description = long_summary[0].content name = description.split('.')[0] for prod in doc.xpathEval('//ProductDescription'): if prod.xpathEval('@ShortDesc'): short_description = prod.xpathEval('@ShortDesc')[0].content if prod.xpathEval('@LongDesc'): description = prod.xpathEval('@LongDesc')[0].content # product details category categoryId = [] categoryName = [] for cat in doc.xpathEval('//CategoryFeatureGroup'): categoryId.append(cat.xpathEval('@ID')[0].content) for cat in doc.xpathEval('//CategoryFeatureGroup//FeatureGroup//Name'): categoryName.append(cat.xpathEval('@Value')[0].content) # join categorys lists category = zip(categoryId, categoryName) # product details feature prodFeatureId = [] prodFeatureName = [] values = {} for prod in doc.xpathEval('//ProductFeature'): prodFeatureId.append( prod.xpathEval('@CategoryFeatureGroup_ID')[0].content + "#" + prod.xpathEval('@Presentation_Value')[0].content) for prod in doc.xpathEval('//ProductFeature//Feature//Name'): prodFeatureName.append(prod.xpathEval('@Value')[0].content) # ordered id, name & description Product Feature prodFeature = {} i = 0 for feature in prodFeatureId: if not prodFeatureName[i] == 'Source data-sheet': values = feature.split('#') if values[1] == "Y": value = _("Yes") elif values[1] == "N": value = _("No") else: value = values[1] if values[0] not in prodFeature: prodFeature[values[0]] = [] prodFeature[values[0]].append('<strong>' + prodFeatureName[i] + ':</strong>' + ' ' + value) i += 1 mapline_ids = self.pool.get('product.icecat.mapline').search( cr, uid, [('icecat_id', '=', icecat.id)]) mapline_fields = [] for mapline_id in mapline_ids: mapline = self.pool.get('product.icecat.mapline').browse( cr, uid, mapline_id) mapline_fields.append({ 'icecat': mapline.name, 'oerp': mapline.field_id.name }) #show details product #TODO: HTML template use Mako template for not hardcode HTML tags mapline_values = [] attributes_values = [] sequence = 0 for cat in category: catID = cat[0] catName = cat[1] #product_manufacturer if form.attributes: attributes_values.append({ 'name': catName, 'icecat_category': catID, 'product_id': product.id, 'sequence': sequence }) sequence + 1 if catID in prodFeature and len(prodFeature[catID]): for feature in prodFeature[catID]: prod_value = feature.split(":") if len(prod_value) > 0: attributes_values.append({ 'name': '>' + self.StripTags(prod_value[0]), 'value': self.StripTags(prod_value[1]), 'icecat_category': catID, 'product_id': product.id, 'sequence': sequence }) sequence + 1 for mapline_field in mapline_fields: if mapline_field['icecat'] == catID: source = '<h3>%s</h3>' % catName i = True for feature in prodFeature[catID]: if i == True: source += '<ul>' source += '<li>%s</li>' % feature i = False source += '</ul>' if not form.html: source = self.StripTags(source) mapline_values.append({ 'field': mapline_field['oerp'], 'source': source }) # This is not hardcode. Short description is avaible in antother fields, for example meta_description website fields (magento, djnago,...) if mapline_field['icecat'] == 'ShortSummaryDescription': mapline_values.append({ 'field': mapline_field['oerp'], 'source': short_description }) # update icecat values at product # default values. It is not hardcode ;) values = {} if form.name: trans_name_id = self.pool.get('ir.translation').search( cr, uid, [('lang', '=', language), ('name', '=', 'product.template,name'), ('res_id', '=', product.id)]) if trans_name_id: self.pool.get('ir.translation').write(cr, uid, trans_name_id, {'value': name}, context) else: values['name'] = name if form.description_sale: trans_descsale_id = self.pool.get('ir.translation').search( cr, uid, [('lang', '=', language), ('name', '=', 'product.template,description_sale'), ('res_id', '=', product.id)]) if trans_descsale_id: self.pool.get('ir.translation').write( cr, uid, trans_descsale_id, {'value': short_description}, context) else: values['description_sale'] = short_description if form.description: if not form.html: description = self.StripTags(description) trans_description_id = self.pool.get('ir.translation').search( cr, uid, [('lang', '=', language), ('name', '=', 'product.template,description'), ('res_id', '=', product.id)]) if trans_description_id: self.pool.get('ir.translation').write(cr, uid, trans_description_id, {'value': description}, context) else: values['description'] = description #manufacturer product manufacturers = [] for supplier in doc.xpathEval('//Supplier'): manufacturers.append(supplier.xpathEval('@Name')[0].content) if len(manufacturers) > 0: partner_id = self.pool.get('res.partner').search( cr, uid, [('name', 'ilike', manufacturers[len(manufacturers) - 1])]) if len(partner_id) > 0: values['manufacturer'] = partner_id[0] values['manufacturer_pname'] = name ref = [] for prod in doc.xpathEval('//Product'): ref.append(prod.xpathEval('@Prod_id')[0].content) values['manufacturer_pref'] = ref[0] # add mapline values calculated for mapline_value in mapline_values: values[mapline_value['field']] = mapline_value['source'] self.pool.get('product.product').write(cr, uid, [product.id], values, context) #create manufacturer attribute if form.attributes: for values in attributes_values: self.pool.get('product.manufacturer.attribute').create( cr, uid, values, context) result = _("Product %s XML Import successfully") % name return result # ========================================== # Convert icecat values to OpenERP mapline # ========================================== def iceimg2oerpimg(self, cr, uid, form, product, icecat, pathxml, data, context): doc = libxml2.parseFile(pathxml) #product image for prod in doc.xpathEval('//Product'): if prod.xpathEval('@HighPic'): image = prod.xpathEval('@HighPic')[0].content if image: fname = image.split('/') fname = fname[len(fname) - 1] path = os.path.abspath(os.path.dirname(__file__)) path += '/icecat/%s' % fname path = re.sub('wizard/', '', path) #download image urllib.urlretrieve(image, path) #send ftp server ftp = FTP(icecat.ftpip) ftp.login(icecat.ftpusername, icecat.ftppassword) ftp.cwd(icecat.ftpdirectory) f = file(path, 'rb') ftp.storbinary('STOR ' + os.path.basename(path), f) ftp.quit() # add values into product_image # product info long_summary = doc.xpathEval( '//SummaryDescription//LongSummaryDescription') description = long_summary[0].content name = description.split('.')[0] values = { 'name': name, 'link': 1, 'filename': icecat.ftpurl + fname, 'product_id': product.id, } self.pool.get('product.images').create(cr, uid, values, context) return icecat.ftpurl + fname else: return _("Not exist %s image") % fname # ========================================== # wizard # ========================================= def import_xml(self, cr, uid, ids, data, context={}): icecat_id = self.pool.get('product.icecat').search( cr, uid, [('active', '=', 1)]) if not icecat_id: raise osv.except_osv(_('Error'), _("Configure your icecat preferences!")) icecat = self.pool.get('product.icecat').browse(cr, uid, icecat_id[0]) form = self.browse(cr, uid, ids[0]) if not form.language_id: language = self.pool.get('res.users').browse(cr, uid, uid).context_lang lang = language.split('_')[0] else: language = form.language_id.code lang = language.split('_')[0] resimg = '' for prod in data['active_ids']: product = self.pool.get('product.product').browse(cr, uid, prod) ean = product.ean13 if ean: url = 'http://data.icecat.biz/xml_s3/xml_server3.cgi?ean_upc=%s;lang=%s;output=productxml' % ( ean, lang) fileName = '%s.xml' % ean passman = urllib2.HTTPPasswordMgrWithDefaultRealm() # this creates a password manager passman.add_password(None, url, icecat.username, icecat.password) authhandler = urllib2.HTTPBasicAuthHandler(passman) # create the AuthHandler openerp = urllib2.build_opener(authhandler) urllib2.install_opener(openerp) # All calls to urllib2.urlopen will now use our handler try: pagehandle = urllib2.urlopen(url) req = urllib2.Request(url) handle = urllib2.urlopen(req) content = handle.read() #save file pathxml = self.save_file(fileName, content) #import values icecat2oerp result = self.icecat2oerp(cr, uid, form, product, icecat, pathxml, language, data, context) #import image icecat2oerp if icecat.ftp and form.image: resimg += self.iceimg2oerpimg(cr, uid, form, product, icecat, pathxml, data, context) resimg += "\n" else: resimg += _("Import image not avaible") resimg += "\n" except URLError, e: result = e.code else: result = _("EAN not avaible") resimg = False values = { 'state': 'done', 'result': result, 'resimg': resimg, } self.write(cr, uid, ids, values) return True
return 'need-provide-manually' else: return 'need-download' return 'need-install' def _is_remotewarehouse(self, cr, uid, context=None): try: entity = self.pool.get('sync.client.entity').get_entity(cr, uid) if entity.usb_instance_type == 'remote_warehouse': return True except Exception, e: pass return False _columns = { 'message' : fields.text("Caption", readonly=True), 'state' : fields.selection([ ('need-provide-manually','Need To Provide Manually The Files'), ('need-download','Need Download'), ('up-to-date','Up-To-Date'), ('need-install','Need Install'), ('need-restart','Need Restart'), ('blocked','Blocked') ], string="Status"), 'patch' : fields.binary("Patch"), 'error': fields.text('Error', readonly="1"), } _defaults = { 'message' : _generate, 'state' : _get_state,
class OeMedicalPatient(osv.Model): def _compute_age(self, cr, uid, ids, field_name, field_value, arg, context={}): result = {} now = datetime.now() for r in self.browse(cr, uid, ids, context=context): if r.dob: dob = datetime.strptime(r.dob, '%Y-%M-%d') delta = relativedelta(now, dob) result[r.id] = str(delta.years) else: result[r.id] = "" return result _name = 'oemedical.patient' _inherits = { 'res.partner': 'partner_id', } _columns={ 'partner_id': fields.many2one( 'res.partner', 'Related Partner', required=True, ondelete='cascade', help='Partner-related data of the patient'), 'family': fields.many2one('oemedical.family', string='Family', help='Family Code'), 'photo': fields.binary(string='Picture'), 'sex': fields.selection([('m', 'Male'), ('f', 'Female'), ], string='Sex', required=True), 'blood_type': fields.selection([('A', 'A'), ('B', 'B'), ('AB', 'AB'), ('O', 'O'), ], string='Blood Type'), 'general_info': fields.text(string='General Information', help='General information about the patient'), 'primary_care_doctor': fields.many2one('oemedical.physician', 'Primary Care Doctor', help='Current primary care / family doctor'), 'childbearing_age': fields.boolean('Potential for Childbearing'), 'medications': fields.one2many('oemedical.patient.medication', 'patient_id', string='Medications',), 'evaluations': fields.one2many('oemedical.patient.evaluation', 'patient_id', string='Evaluations',), 'critical_info': fields.text( string='Important disease, allergy or procedures information', help='Write any important information on the patient\'s disease,'\ ' surgeries, allergies, ...'), 'rh': fields.selection([('+', '+'), ('-', '-'), ], string='Rh'), 'current_address': fields.many2one('res.partner', string='Address', help='Contact information. You may choose from the different contacts'\ ' and addresses this patient has.'), 'diseases': fields.one2many('oemedical.patient.disease', 'patient_id', string='Diseases', help='Mark if the patient has died'), 'lastname': fields.char(size=256, string='Lastname',), 'slastname': fields.char(size=256, string='Second Lastname',), 'ethnic_group': fields.many2one('oemedical.ethnicity', string='Ethnic group',), 'ssn': fields.char(size=256, string='SSN',), 'vaccinations': fields.one2many('oemedical.vaccination', 'patient_id', 'Vaccinations',), 'dob': fields.date(string='DoB'), #'age': fields.char(size=256, string='Age Age',), 'age': fields.function(_compute_age, string= "Age",arg=None, fnct_inv=None, fnct_inv_arg=None, type="char",fnct_search=None, obj=None, method=True, store=False, multi=False,), 'marital_status': fields.selection([('s', 'Single'), ('m', 'Married'), ('w', 'Widowed'), ('d', 'Divorced'), ('x', 'Separated'), ], string='Marital Status', sort=False), 'dod': fields.datetime(string='Date of Death'), 'current_insurance': fields.many2one('oemedical.insurance', string='Insurance', help='Insurance information. You may choose from the different'\ ' insurances belonging to the patient'), 'cod': fields.many2one('oemedical.pathology', string='Cause of Death',), 'identification_code': fields.char(size=256, string='ID', help='Patient Identifier provided by the Health Center.Is not the'\ ' Social Security Number'), 'deceased': fields.boolean(string='Deceased'), } _defaults = { 'ref': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get( cr, uid, 'oemedical.patient'), }
'intervention_assignement_id':fields.many2one('openstc.intervention.assignement', 'Assignement'), 'absent_type_id':fields.many2one('openstc.absent.type', 'Type d''abscence'), 'category_id':fields.many2one('openstc.task.category', 'Category'), 'state': fields.selection([('absent', 'Absent'),('draft', 'New'),('open', 'In Progress'),('pending', 'Pending'), ('done', 'Done'), ('cancelled', 'Cancelled')], 'State', readonly=True, required=True, help='If the task is created the state is \'Draft\'.\n If the task is started, the state becomes \'In Progress\'.\n If review is needed the task is in \'Pending\' state.\ \n If the task is over, the states is set to \'Done\'.'), 'team_id': fields.many2one('openstc.team', 'Team'), 'km': fields.integer('Km', select=1), 'oil_qtity': fields.float('oil quantity', select=1), 'oil_price': fields.float('oil price', select=1), 'site1':fields.related('project_id','site1',type='many2one',relation='openstc.site', string='Site',store={'project.task':[lambda self,cr,uid,ids,ctx={}:ids, ['project_id'], 10], 'project.project':[_get_task_from_inter, ['site1'],11]}), 'inter_desc': fields.related('project_id', 'description', type='char'), 'inter_equipment': fields.related('project_id', 'equipment_id', type='many2one',relation='openstc.equipment'), 'cancel_reason': fields.text('Cancel reason'), 'agent_or_team_name':fields.function(_get_agent_or_team_name, type='char', method=True, store=False), 'cost':fields.float('Cost', type='float', digits=(5,2)), 'hr_cost':fields.float('Cost', type='float', digits=(5,2)), 'equipment_cost':fields.float('Cost', type='float', digits=(5,2)), 'consumable_cost':fields.float('Cost', type='float', digits=(5,2)), } _defaults = {'active': lambda *a: True, 'user_id':None} """ Creates an orphan task : not attached to intervention """ def createOrphan(self, cr, uid, ids, params, context=None):
'warranty_duration':fields.selection([ ('no_warranty', 'No Warranty'), ('30_days','30 Days'), ('60_days','60 Days'), ('90_days','90 Days'), ('180_days','180 Days'), ('1_year','1 Year'), ('2_years','2 Years'), ('3_years','3 Years'), ('4_years','4 Years'), ('5_years','5 Years')], 'Duration', help="Duration of the Warranty"), 'warranty_history':fields.one2many('mttl.warranty.history','serial_id', 'Warranty Claims History'), #+++ HoangTK - 02/18/2016 : Link warranty module to serials 'warranty_ids': fields.one2many('warranty.cases','serial_id','Warranty Cases'), #Notes 'notes':fields.text('Notes'), 'image':fields.text('Images'), #attachments 'attachment_lines': fields.one2many('ir.attachment', 'mttl_serials_id', 'Attachment'), } _defaults = { 'serial_number':_generate_serial_number, 'serial':'New Number', } def _check_sum(self, serial_no): weights=[8,7,6,5,4,3,2,0,10,9,8,7,6]
cr, uid, id, {"history": (history or "") + "\n" + time.strftime("%Y-%m-%d %H:%M:%S") + ": " + tools.ustr(message)}, context, ) _columns = { "email_from": fields.char("From", size=64), "email_to": fields.char("Recipient (To)", size=250), "email_cc": fields.char("CC", size=250), "email_bcc": fields.char("BCC", size=250), "reply_to": fields.char("Reply-To", size=250), "message_id": fields.char("Message-ID", size=250), "subject": fields.char("Subject", size=200), "body_text": fields.text("Standard Body (Text)"), "body_html": fields.text("Body (Rich Text Clients Only)"), "attachments_ids": fields.many2many( "ir.attachment", "mail_attachments_rel", "mail_id", "att_id", "Attachments" ), "account_id": fields.many2one("email_template.account", "User account", required=True), "user": fields.related("account_id", "user", type="many2one", relation="res.users", string="User"), "server_ref": fields.integer("Server Reference of mail", help="Applicable for inward items only"), "mail_type": fields.selection( [ ("multipart/mixed", "Has Attachments"), ("multipart/alternative", "Plain Text & HTML with no attachments"), ("multipart/related", "Intermixed content"), ("text/plain", "Plain Text"), ("text/html", "HTML Body"), ],
'pem_from':fields.char( 'From', size=64), 'pem_to':fields.char( 'Recepient (To)', size=250,), 'pem_cc':fields.char( ' CC', size=250), 'pem_bcc':fields.char( ' BCC', size=250), 'pem_subject':fields.char( ' Subject', size=200,), 'pem_body_text':fields.text( 'Standard Body (Text)'), 'pem_body_html':fields.text( 'Body (Text-Web Client Only)'), 'pem_attachments_ids':fields.many2many( 'ir.attachment', 'mail_attachments_rel', 'mail_id', 'att_id', 'Attachments'), 'pem_account_id' :fields.many2one( 'poweremail.core_accounts', 'User account', required=True), 'pem_user':fields.related( 'pem_account_id', 'user',
ids, image) # ok to use .fromkeys() as the image is same for all _columns = { 'host':fields.char('Host', size=64, required=True), 'port':fields.integer('Port', required=True), 'ooo_restart_cmd': fields.char('OOO restart command', size=256, \ help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \ 'The command will be executed as the user of the OpenERP server process,'+ \ 'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'), 'state':fields.selection([ ('init','Init'), ('error','Error'), ('done','Done'), ],'State', select=True, readonly=True), 'msg': fields.text('Message', readonly=True), 'error_details': fields.text('Error Details', readonly=True), 'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True), 'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True), } def default_get(self, cr, uid, fields, context=None): config_obj = self.pool.get('oo.config') data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context) ids = config_obj.search(cr, 1, [], context=context) if ids: res = config_obj.read(cr, 1, ids[0], context=context)
class project_task_employee(osv.osv): _name = 'project.task.employee' _rec_name = 'employee_id' #Funcion para calcular las horas del empleado que tiene asignadas en sus periodos. def _calculate_hours(self, cr, uid, ids, field_name, arg, context=None): res = {} for obj in self.browse(cr, uid, ids, context=context): hours = 0 for period in obj.work_period_ids: end_date = datetime.strptime(period.end_date, "%Y-%m-%d %H:%M:%S") start_date = datetime.strptime(period.start_date, "%Y-%m-%d %H:%M:%S") deltadate = relativedelta(end_date, start_date) hours += deltadate.hours + (deltadate.minutes / 60.0) res[obj.id] = hours return res def _calculate_dates(self, cr, uid, ids, field_name, arg, context=None): res = {} for obj in self.browse(cr, uid, ids, context=context): res[obj.id] = { 'start_date': None, 'end_date': None, } min_start_date = False max_end_date = False for period in obj.work_period_ids: if not min_start_date and not max_end_date: min_start_date = period.start_date max_end_date = period.end_date continue min_date = datetime.strptime(min_start_date, "%Y-%m-%d %H:%M:%S") max_date = datetime.strptime(max_end_date, "%Y-%m-%d %H:%M:%S") start_date = datetime.strptime(period.start_date, "%Y-%m-%d %H:%M:%S") end_date = datetime.strptime(period.end_date, "%Y-%m-%d %H:%M:%S") if min_date > start_date: min_start_date = period.start_date if end_date > max_date: max_end_date = period.end_date res[obj.id]['start_date'] = min_start_date res[obj.id]['end_date'] = max_end_date return res def _calculate_totals(self, cr, uid, ids, field_name, arg, context=None): cur_obj = self.pool.get('res.currency') res = {} for emp in self.browse(cr, uid, ids, context=context): res[emp.id] = { 'subtotal_hour': 0.0, 'subtotal_km': 0.0, 'subtotal_extra_hour': 0.0, 'subtotal_diet': 0.0 } res[emp.id]['subtotal_hour'] = (emp.hours * emp.price_hour) #cur_obj.round(cr, uid, (emp.hours * emp.price_hour)) res[emp.id]['subtotal_km'] = (emp.km * emp.price_km) #cur_obj.round(cr, uid, (emp.km * emp.price_km)) res[emp.id]['subtotal_extra_hour'] = (emp.extra_hour * emp.price_extra_hour) #cur_obj.round(cr, uid, (emp.extra_hour * emp.price_extra_hour)) res[emp.id]['subtotal_diet'] = (emp.price_diet * emp.diets) #cur_obj.round(cr, uid, (emp.price_diet * emp.diets)) return res def _get_task_employee(self, cr, uid, ids, context=None): result = {} for period in self.pool.get('employee.work.period').browse(cr, uid, ids, context=context): result[period.employee_task_id.id] = True return result.keys() ro_states_pending = { 'draft': [('readonly', False)], 'selected': [('readonly', False)], 'confirmed': [('readonly', False)], 'pending': [('readonly', False)] } ro_states_payment_pending = { 'draft': [('readonly', False)], 'selected': [('readonly', False)], 'confirmed': [('readonly', False)], 'pending': [('readonly', False)], 'discharged': [('readonly', False)], 'worked': [('readonly', False)] } _columns = { 'task_id': fields.many2one('project.task', 'Task', select=True), 'employee_id': fields.many2one('hr.employee', 'Employee', required=True, readonly=True, states={'draft': [('readonly', False)],'selected': [('readonly', False)]}, select=True), 'employee_phone': fields.related('employee_id','mobile_phone', type="char", readonly=True, string="Mobile Phone"), 'employee_mail': fields.related('employee_id','work_email', type="char", readonly=True, string="E-mail"), 'work_address': fields.text('Work Address', readonly=True, states=ro_states_pending), 'work_activity': fields.text('Activity', readonly=True, states=ro_states_pending), 'uniform': fields.text('Uniform', readonly=True, states=ro_states_pending), 'instructions': fields.text('Instructions', readonly=True, states=ro_states_pending), 'work_period_ids': fields.one2many('employee.work.period', 'employee_task_id', 'Periods', readonly=True, states=ro_states_pending), 'hours': fields.function(_calculate_hours, method=True, type='float', string="Hours", store = { 'project.task.employee': (lambda self, cr, uid, ids, c={}: ids, ['work_period_ids'], 10), 'employee.work.period': (_get_task_employee, ['start_date', 'end_date'], 10), }), 'price_hour': fields.float('Price Per Hour', digits_compute= dp.get_precision('Sale Price'), readonly=True, states=ro_states_pending), 'subtotal_hour': fields.function(_calculate_totals, multi='sums', type='float', digits_compute= dp.get_precision('Sale Price'), string="Subtotal (Hours)"), 'km': fields.float('Km.', readonly=True, states=ro_states_payment_pending), 'price_km': fields.float('Price per Km', digits_compute= dp.get_precision('Sale Price'), readonly=True, states=ro_states_payment_pending), 'subtotal_km': fields.function(_calculate_totals, multi='sums', type='float', digits_compute= dp.get_precision('Sale Price'), string= "Subtotal (Km)"), 'extra_hour': fields.float('Extra Hour', readonly=True, states=ro_states_payment_pending), 'price_extra_hour': fields.float('Price per Extra Hour', digits_compute= dp.get_precision('Sale Price'), readonly=True, states=ro_states_payment_pending), 'subtotal_extra_hour': fields.function(_calculate_totals, multi='sums', type='float', digits_compute= dp.get_precision('Sale Price'), string="Subtotal (Extra Hour)"), 'diets': fields.float('Diets', readonly=True, states=ro_states_payment_pending), 'price_diet': fields.float('Price per diet', digits_compute= dp.get_precision('Sale Price'), readonly=True, states=ro_states_payment_pending), 'subtotal_diet': fields.function(_calculate_totals, multi='sums', type='float', digits_compute= dp.get_precision('Sale Price'), string='Subtotal (Diets)'), 'other_cost': fields.float('Other Cost', readonly=True, states=ro_states_payment_pending), 'observations': fields.text('observations', readonly=True, states=ro_states_payment_pending), 'self_employed': fields.boolean('Self Employeed', readonly=True, states=ro_states_pending), 'coordinator': fields.many2one('hr.employee','Coordinator', readonly=True, states=ro_states_payment_pending, select=True), 'start_date': fields.function(_calculate_dates, type='date', multi='sums', string="Start Date", select=True), 'end_date': fields.function(_calculate_dates, type='date', multi='sums', string="End Date", select=True), 'create_date': fields.datetime('Creation Date', readonly=True), 'state': fields.selection( (('draft','Draft'), ('selected','Selected'), ('reserve','Reserve'), ('confirmed','Confirmed'), ('pending','Contract Pending'), ('discharged','Discharged'), ('worked','Worked'), ('pending_payment','Pending Payment'), ('paid','Paid'), ('cancel','Void')), 'state', readonly=True, select=True), } _defaults = { 'state': 'selected', } def copy(self, cr, uid, id, default={}, context=None): if context is None: context = {} default = default or {} default['state'] = 'selected' default['work_period_ids'] = [] default['km'] = 0.0 default['diets'] = 0.0 default['other_cost'] = 0.0 default['extra_hour'] = 0.0 return super(project_task_employee, self).copy(cr, uid, id, default=default, context=context) def contract_pending(self, cr, uid, ids, context=None): contract_obj = self.pool.get('hr.contract') for emp in self.browse(cr, uid, ids, context=context): contract_val = { 'name': "%s - %s" % (emp.task_id.name, emp.employee_id.name), 'employee_id': emp.employee_id.id, 'wage': emp.subtotal_hour, 'date_start': emp.start_date, 'date_end': emp.end_date, } contract_obj.create(cr, uid, contract_val) self.write(cr, uid, ids, {'state': 'pending'}) return True def discharge_employee(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'discharged'}) return True def action_cancel(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'cancel'}, context=context) return True def unlink(self, cr, uid, ids, context=None): task_employee = self.read(cr, uid, ids, ['state'], context=context) unlink_ids = [] for t in task_employee: if t['state'] in ['draft', 'cancel']: unlink_ids.append(t['id']) else: raise osv.except_osv(_('Invalid action !'), _('In order to delete an employee record, you must cancel it before !')) return osv.osv.unlink(self, cr, uid, unlink_ids, context=context) def button_dummy(self, cr, uid, ids, context=None): return True
class agreement(osv.osv): _name = 'sale.recurring_orders.agreement' _inherit = ['mail.thread'] _description = "Recurring orders agreement" def __get_next_term_date(self, date, unit, interval): """ Get the date that results on incrementing given date an interval of time in time unit. @param date: Original date. @param unit: Interval time unit. @param interval: Quantity of the time unit. @rtype: date @return: The date incremented in 'interval' units of 'unit'. """ if unit == 'days': return date + timedelta(days=interval) elif unit == 'weeks': return date + timedelta(weeks=interval) elif unit == 'months': return date + relativedelta(months=interval) elif unit == 'years': return date + relativedelta(years=interval) def __get_next_expiration_date(self, cr, uid, ids, field_name, arg, context=None): """ Get next expiration date of the agreement. For unlimited agreements, get max date """ if not ids: return {} res = {} for agreement in self.browse(cr, uid, ids): if agreement.prolong == 'fixed': res[agreement.id] = agreement.end_date elif agreement.prolong == 'unlimited': now = datetime.now() date = self.__get_next_term_date( datetime.strptime(agreement.start_date, "%Y-%m-%d"), agreement.prolong_unit, agreement.prolong_interval) while (date < now): date = self.__get_next_term_date( date, agreement.prolong_unit, agreement.prolong_interval) res[agreement.id] = date else: # for renewable fixed term res[agreement.id] = self.__get_next_term_date(datetime.strptime( \ agreement.last_renovation_date if agreement.last_renovation_date else agreement.start_date, "%Y-%m-%d"), \ agreement.prolong_unit, agreement.prolong_interval) return res _columns = { 'name': fields.selection([('Clasea', 'Clase A'), ('claseb', 'Clase B'), ('clasec', 'Clase C'), ('canje', 'CANJE')], string="Nombre", required=True, help='Name that helps to identify the agreement'), 'number': fields.char( 'Agreement number', select=1, size=32, help= "Number of agreement. Keep empty to get the number assigned by a sequence." ), 'active': fields.boolean('Active', help='Unchecking this field, quotas are not generated'), 'partner_id': fields.many2one('res.partner', 'Customer', select=1, change_default=True, required=True, help="Customer you are making the agreement with"), 'company_id': fields.many2one('res.company', 'Company', required=True, help="Company that signs the agreement"), 'start_date': fields.date( 'Start date', select=1, help= "Beginning of the agreement. Keep empty to use the current date"), 'prolong': fields.selection( [('recurrent', 'Anual renovable'), ('unlimited', 'Vitalicia'), ('fixed', 'Fixed term')], 'Prolongation', help= "Sets the term of the agreement. 'Renewable fixed term': It sets a fixed term, but with possibility of manual renew; 'Unlimited term': Renew is made automatically; 'Fixed term': The term is fixed and there is no possibility to renew.", required=True), 'end_date': fields.date('End date', help="End date of the agreement"), 'prolong_interval': fields.integer( 'Interval', help= "Interval in time units to prolong the agreement until new renewable (that is automatic for unlimited term, manual for renewable fixed term)." ), 'prolong_unit': fields.selection([('days', 'days'), ('weeks', 'weeks'), ('months', 'months'), ('years', 'years')], 'Intervalo de renovacion', help='Time unit for the prolongation interval'), 'agreement_line': fields.one2many('sale.recurring_orders.agreement.line', 'agreement_id', 'Agreement lines'), 'order_line': fields.one2many('sale.recurring_orders.agreement.order', 'agreement_id', 'Order lines', readonly=True), 'renewal_line': fields.one2many('sale.recurring_orders.agreement.renewal', 'agreement_id', 'Renewal lines', readonly=True), 'last_renovation_date': fields.date( 'Last renovation date', help= "Last date when agreement was renewed (same as start date if not renewed)" ), 'next_expiration_date': fields.function(__get_next_expiration_date, string='Next expiration date', type='date', method=True, store=True), #TODO: Añadir posibilidad de seguir cuando se genera una factura con _track = {} 'state': fields.selection([('empty', 'Without orders'), ('first', 'First order created'), ('orders', 'With orders')], 'State', readonly=True), 'renewal_state': fields.selection([('not_renewed', 'Agreement not renewed'), ('renewed', 'Agreement renewed')], 'Renewal state', readonly=True), 'notes': fields.text('Observaciones'), 'usuario_id': fields.many2one('res.users', 'Vendedor', help="Seleccione el Nombre del Vendedor"), 'numeromeses': fields.integer(string="Anticipo de Cuotas Mensuales", help="anticipo de cuotas Mensuales."), 'fecha_ic': fields.date('Fecha de Inscripcion', help="Fecha en la que el contrato fue realizado"), 'excusas_id': fields.one2many('excusa', 'recurrente_id', string="Excusas", help="Historial de Excusas dejadas por los clientes"), } _defaults = { 'active': lambda *a: 1, 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'sale', context=c), 'prolong': lambda *a: 'unlimited', 'prolong_interval': lambda *a: 1, 'prolong_unit': lambda *a: 'years', 'state': lambda *a: 'empty', 'renewal_state': lambda *a: 'not_renewed', 'numeromeses': 1, } _sql_constraints = [ ('number_uniq', 'unique(number)', 'Agreement number must be unique !'), ('name_partner_unique', 'unique(partner_id)', 'El cliente ya posee un contrato'), ] def _check_related(self, cr, uid, ids): for i in self.browse(cr, uid, ids): if (len(i.agreement_line) == 1): return True return False def _check_dates(self, cr, uid, ids, context=None): """ Check correct dates. When prolongation is unlimited or renewal, end_date is False, so doesn't apply @rtype: boolean @return: True if dates are correct or don't apply, False otherwise """ if context == None: context = {} agreements = self.browse(cr, uid, ids, context=context) val = True for agreement in agreements: if agreement.end_date: val = val and agreement.end_date > agreement.start_date return val _constraints = [ (_check_dates, 'Agreement end date must be greater than start date', ['start_date', 'end_date']), ] def create(self, cr, uid, vals, context=None): # Set start date if empty if not vals.get('start_date'): vals['start_date'] = datetime.now() # Set agreement number if empty if not vals.get('number'): vals['number'] = self.pool.get('ir.sequence').get( cr, uid, 'sale.r_o.agreement.sequence') return super(agreement, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): value = super(agreement, self).write(cr, uid, ids, vals, context=context) # unlink all future orders if vals.has_key('active') or vals.has_key('number') or ( vals.has_key('agreement_line') and len(vals['agreement_line']) ) \ or vals.has_key('prolong') or vals.has_key('end_date') or vals.has_key('prolong_interval') or vals.has_key('prolong_unit'): self.unlink_orders(cr, uid, ids, datetime.date(datetime.now()), context) return value def copy(self, cr, uid, orig_id, default={}, context=None): if context is None: context = {} agreement_record = self.browse(cr, uid, orig_id) default.update({ 'state': 'empty', 'number': False, 'active': True, 'name': '%s*' % agreement_record['name'], 'start_date': False, 'order_line': [], 'renewal_line': [], }) return super(agreement, self).copy(cr, uid, orig_id, default, context) def unlink(self, cr, uid, ids, context=None): unlink_ids = [] for agreement in self.browse(cr, uid, ids, context=context): confirmedOrders = False for order_line in agreement.order_line: if order_line.confirmed: confirmedOrders = True if not confirmedOrders: unlink_ids.append(agreement.id) else: raise osv.except_osv( _('Invalid action!'), _('You cannot remove agreements with confirmed orders!')) self.unlink_orders(cr, uid, unlink_ids, datetime.date(datetime.now()), context=context) return osv.osv.unlink(self, cr, uid, unlink_ids, context=context) def onchange_start_date(self, cr, uid, ids, start_date=False): """ It changes last renovation date to the new start date. @rtype: dictionary @return: field last_renovation_date with new start date """ if not start_date: return {} result = {} result['value'] = {'last_renovation_date': start_date} return result def revise_agreements_expirations_planned(self, cr, uid, context={}): """ Check each active agreement to see if the end is near """ ids = self.search(cr, uid, []) revise_ids = [] for agreement in self.browse(cr, uid, ids, context=context): if not agreement.active: continue next_expiration_date = datetime.date( datetime.strptime(agreement.next_expiration_date, "%Y-%m-%d")) if agreement.prolong == 'unlimited' and next_expiration_date <= datetime.date( datetime.now()): # add to a list for reviewing expiration date revise_ids.append(agreement.id) if revise_ids: # force recalculate next_expiration_date self.write(cr, uid, revise_ids, {}, context=context) return True def create_order(self, cr, uid, agreement, date, agreement_lines, confirmed_flag, context={}): """ Method that creates an order from given data. @param agreement: Agreement method get data from. @param date: Date of created order. @param agreement_lines: Lines that will generate order lines. @confirmed_flag: Confirmed flag in agreement order line will be set to this value. """ order_obj = self.pool.get('sale.order') order_line_obj = self.pool.get('sale.order.line') # Create order object context['company_id'] = agreement.company_id.id order = { 'date_order': date.strftime('%Y-%m-%d'), 'date_confirm': date.strftime('%Y-%m-%d'), 'origin': agreement.number, 'partner_id': agreement.partner_id.id, 'state': 'draft', 'company_id': agreement.company_id.id, 'from_agreement': True, } # Get other order values from agreement partner order.update( sale.sale.sale_order.onchange_partner_id( order_obj, cr, uid, [], agreement.partner_id.id)['value']) order['user_id'] = agreement.partner_id.user_id.id order_id = order_obj.create(cr, uid, order, context=context) # Create order lines objects agreement_lines_ids = [] for agreement_line in agreement_lines: order_line = { 'cliente_linea': agreement_line.cliente_linea_agreement.id, 'order_id': order_id, 'product_id': agreement_line.product_id.id, 'product_uom_qty': agreement_line.quantity, 'discount': agreement_line.discount, } # get other order line values from agreement line product order_line.update(sale.sale.sale_order_line.product_id_change(order_line_obj, cr, uid, [], order['pricelist_id'], \ product=agreement_line.product_id.id, qty=agreement_line.quantity, partner_id=agreement.partner_id.id, fiscal_position=order['fiscal_position'])['value']) # Put line taxes order_line['tax_id'] = [(6, 0, tuple(order_line['tax_id']))] # Put custom description if agreement_line.additional_description: order_line[ 'name'] += " " + agreement_line.additional_description order_line_obj.create(cr, uid, order_line, context=context) agreement_lines_ids.append(agreement_line.id) # Update last order date for lines self.pool.get('sale.recurring_orders.agreement.line').write( cr, uid, agreement_lines_ids, {'last_order_date': date.strftime('%Y-%m-%d')}, context=context) # Update agreement state if agreement.state != 'orders': self.pool.get('sale.recurring_orders.agreement').write( cr, uid, [agreement.id], {'state': 'orders'}, context=context) # Create order agreement record agreement_order = { 'agreement_id': agreement.id, 'order_id': order_id, } self.pool.get('sale.recurring_orders.agreement.order').create( cr, uid, agreement_order, context=context) return order_id def _order_created(self, cr, uid, agreement, agreement_lines_ordered, order_id, context={}): """ It triggers actions after order is created. This method can be overriden for extending its functionality thanks to its parameters. @param agreement: Agreement object whose order has been created @param agreement_lines_ordered: List of agreement lines objects used in the creation of the order. @param order_id: ID of the created order. """ pass def _order_confirmed(self, cr, uid, agreement, order_id, context={}): """ It triggers actions after order is confirmed. This method can be overriden for extending its functionality thanks to its parameters. @param agreement: Agreement object whose order has been confirmed @param order_id: ID of the confirmed order. """ pass def _get_next_order_date(self, agreement, line, startDate, context={}): """ Get next date starting from given date when an order is generated. @param line: Agreement line @param startDate: Start date from which next order date is calculated. @rtype: datetime @return: Next order date starting from the given date. """ next_date = datetime.strptime( agreement.start_date, '%Y-%m-%d') + relativedelta(day=1, months=1) while next_date <= startDate: next_date = self.__get_next_term_date(next_date, line.ordering_unit, line.ordering_interval) return next_date def generate_agreement_orders(self, cr, uid, agreement, startDate, endDate, context={}): """ Check if there is any pending order to create for given agreement. """ if not agreement.active: return lines_to_order = {} agreement_expiration_date = datetime.strptime( agreement.next_expiration_date, '%Y-%m-%d') if (agreement_expiration_date < endDate) and (agreement.prolong != 'unlimited'): endDate = agreement_expiration_date for line in agreement.agreement_line: # Check if there is any agreement line to order if line.active_chk: # Check future orders for this line until endDate next_order_date = self._get_next_order_date( agreement, line, startDate) while next_order_date < endDate: # Add to a list to order all lines together if not lines_to_order.get(next_order_date): lines_to_order[next_order_date] = [] lines_to_order[next_order_date].append(line) next_order_date = self._get_next_order_date( agreement, line, next_order_date) # Order all pending lines dates = lines_to_order.keys() dates.sort() agreement_order_obj = self.pool.get( 'sale.recurring_orders.agreement.order') for date in dates: # Check if an order exists for that date if not len( agreement_order_obj.search( cr, uid, [('date', '=', date), ('agreement_id', '=', agreement['id'])])): # create it if not exists order_id = self.create_order(cr, uid, agreement, date, lines_to_order[date], False, context=context) # Call 'event' method self._order_created(cr, uid, agreement, lines_to_order, order_id, context=context) def generate_initial_order(self, cr, uid, ids, context={}): """ Method that creates an initial order with all the agreement lines """ agreement = self.browse(cr, uid, ids, context=context)[0] agreement_lines = [] # Add only active lines for line in agreement.agreement_line: if line.active_chk: agreement_lines.append(line) order_id = self.create_order(cr, uid, agreement, datetime.strptime(agreement.start_date, '%Y-%m-%d'), agreement_lines, True, context=context) # Update agreement state self.write(cr, uid, agreement.id, {'state': 'first'}, context=context) # Confirm order wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'sale.order', order_id, 'order_confirm', cr) # Get view to show data_obj = self.pool.get('ir.model.data') result = data_obj._get_id(cr, uid, 'sale', 'view_order_form') view_id = data_obj.browse(cr, uid, result).res_id # Return view with order created return { 'domain': "[('id','=', " + str(order_id) + ")]", 'view_type': 'form', 'view_mode': 'form', 'res_model': 'sale.order', 'context': context, 'res_id': order_id, 'view_id': [view_id], 'type': 'ir.actions.act_window', 'nodestroy': True } def generate_next_year_orders_planned(self, cr, uid, context={}): """ Check if there is any pending order to create for each agreement. """ if context is None: context = {} ids = self.search(cr, uid, [('active', '=', True)]) self.generate_next_year_orders(cr, uid, ids, context) def generate_next_year_orders(self, cr, uid, ids, context={}): """ Method that generates all the orders of the given agreements for the next year, counting from current date. """ result = {} for meses in self.browse(cr, uid, ids, context=context): result[meses.id] = meses.numeromeses startDate = datetime.now() endDate = datetime( startDate.year, startDate.month, startDate.day) + relativedelta(months=result[meses.id]) for agreement in self.browse(cr, uid, ids, context=context): self.generate_agreement_orders(cr, uid, agreement, startDate, endDate, context) return True def confirm_current_orders_planned(self, cr, uid, context={}): if context is None: context = {} ids = self.search(cr, uid, []) now = datetime.now() wf_service = netsvc.LocalService("workflow") for agreement in self.browse(cr, uid, ids, context=context): for agreement_order in agreement.order_line: if datetime.strptime( agreement_order['date'], '%Y-%m-%d') <= now and not agreement_order.confirmed: order = agreement_order.order_id if order: wf_service.trg_validate(uid, 'sale.order', order.id, 'order_confirm', cr) self._order_confirmed(cr, uid, agreement, order.id, context) self.pool.get( 'sale.recurring_orders.agreement.order').write( cr, uid, agreement_order.id, {'confirmed': 'True'}, context=context) def unlink_orders(self, cr, uid, ids, startDate, context={}): """ Remove generated orders from given date. """ agreement_order_obj = self.pool.get( 'sale.recurring_orders.agreement.order') ordersToRemove = [] for agreement in self.browse(cr, uid, ids, context=context): for order in agreement['order_line']: order_date = datetime.date( datetime.strptime(order['date'], '%Y-%m-%d')) if order_date >= startDate and not order.confirmed: if order.order_id.id: ordersToRemove.append(order.order_id.id) agreement_order_obj.unlink(cr, uid, order['id'], context) self.pool.get('sale.order').unlink(cr, uid, ordersToRemove, context)
class partner_vat_intra(osv.osv_memory): """ Partner Vat Intra """ _name = "partner.vat.intra" _description = 'Partner VAT Intra' def _get_xml_data(self, cr, uid, context=None): if context.get('file_save', False): return base64.encodestring(context['file_save'].encode('utf8')) return '' def _get_europe_country(self, cursor, user, context=None): return self.pool.get('res.country').search(cursor, user, [ ('code', 'in', [ 'AT', 'BG', 'CY', 'CZ', 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IE', 'IT', 'LV', 'LT', 'LU', 'MT', 'NL', 'PL', 'PT', 'RO', 'SK', 'SI', 'ES', 'SE', 'GB' ]) ]) _columns = { 'name': fields.char('File Name', size=32), 'period_code': fields.char( 'Period Code', size=6, required=True, help= '''This is where you have to set the period code for the intracom declaration using the format: ppyyyy PP can stand for a month: from '01' to '12'. PP can stand for a trimester: '31','32','33','34' The first figure means that it is a trimester, The second figure identify the trimester. PP can stand for a complete fiscal year: '00'. YYYY stands for the year (4 positions). '''), 'period_ids': fields.many2many( 'account.period', 'account_period_rel', 'acc_id', 'period_id', 'Period (s)', help= 'Select here the period(s) you want to include in your intracom declaration' ), 'tax_code_id': fields.many2one('account.tax.code', 'Company', domain=[('parent_id', '=', False)], help="Keep empty to use the user's company", required=True), 'test_xml': fields.boolean('Test XML file', help="Sets the XML output as test file"), 'mand_id': fields.char( 'Reference', size=14, help="Reference given by the Representative of the sending company." ), 'msg': fields.text('File created', size=14, readonly=True), 'no_vat': fields.text( 'Partner With No VAT', size=14, readonly=True, help= "The Partner whose VAT number is not defined they doesn't include in XML File." ), 'file_save': fields.binary('Save File', readonly=True), 'country_ids': fields.many2many('res.country', 'vat_country_rel', 'vat_id', 'country_id', 'European Countries'), 'comments': fields.text('Comments'), } def _get_tax_code(self, cr, uid, context=None): obj_tax_code = self.pool.get('account.tax.code') obj_user = self.pool.get('res.users') company_id = obj_user.browse(cr, uid, uid, context=context).company_id.id tax_code_ids = obj_tax_code.search(cr, uid, [('company_id', '=', company_id), ('parent_id', '=', False)], context=context) return tax_code_ids and tax_code_ids[0] or False _defaults = { 'country_ids': _get_europe_country, 'file_save': _get_xml_data, 'name': 'vat_intra.xml', 'tax_code_id': _get_tax_code, } def _get_datas(self, cr, uid, ids, context=None): """Collects require data for vat intra xml :param ids: id of wizard. :return: dict of all data to be used to generate xml for Partner VAT Intra. :rtype: dict """ if context is None: context = {} obj_user = self.pool.get('res.users') obj_sequence = self.pool.get('ir.sequence') obj_partner = self.pool.get('res.partner') obj_partner_add = self.pool.get('res.partner.address') xmldict = {} post_code = street = city = country = data_clientinfo = '' seq = amount_sum = 0 wiz_data = self.browse(cr, uid, ids[0], context=context) comments = wiz_data.comments if wiz_data.tax_code_id: data_company = wiz_data.tax_code_id.company_id else: data_company = obj_user.browse(cr, uid, uid, context=context).company_id # Get Company vat company_vat = data_company.partner_id.vat if not company_vat: raise osv.except_osv( _('Data Insufficient'), _('No VAT Number Associated with Main Company!')) company_vat = company_vat.replace(' ', '').upper() issued_by = company_vat[:2] if len(wiz_data.period_code) != 6: raise osv.except_osv( _('Wrong Period Code'), _('The period code you entered is not valid.')) if not wiz_data.period_ids: raise osv.except_osv(_('Data Insufficient!'), _('Please select at least one Period.')) p_id_list = obj_partner.search(cr, uid, [('vat', '!=', False)], context=context) if not p_id_list: raise osv.except_osv( _('Data Insufficient!'), _('No partner has a VAT Number asociated with him.')) seq_declarantnum = obj_sequence.get(cr, uid, 'declarantnum') dnum = company_vat[2:] + seq_declarantnum[-4:] addr = obj_partner.address_get(cr, uid, [data_company.partner_id.id], ['invoice']) email = data_company.partner_id.email or '' phone = data_company.partner_id.phone or '' if addr.get('invoice', False): ads = obj_partner_add.browse(cr, uid, [addr['invoice']])[0] city = (ads.city or '') post_code = (ads.zip or '') if ads.street: street = ads.street if ads.street2: street += ' ' street += ads.street2 if ads.country_id: country = ads.country_id.code if not country: country = company_vat[:2] if not email: raise osv.except_osv( _('Data Insufficient!'), _('No email address associated with the company.')) if not phone: raise osv.except_osv(_('Data Insufficient!'), _('No phone associated with the company.')) xmldict.update({ 'company_name': data_company.name, 'company_vat': company_vat, 'vatnum': company_vat[2:], 'mand_id': wiz_data.mand_id, 'sender_date': str(time.strftime('%Y-%m-%d')), 'street': street, 'city': city, 'post_code': post_code, 'country': country, 'email': email, 'phone': phone.replace('/', '').replace('.', '').replace('(', '').replace( ')', '').replace(' ', ''), 'period': wiz_data.period_code, 'clientlist': [], 'comments': comments, 'issued_by': issued_by, }) codes = ('44', '46L', '46T', '48s44', '48s46L', '48s46T') cr.execute( '''SELECT p.name As partner_name, l.partner_id AS partner_id, p.vat AS vat, (CASE WHEN t.code = '48s44' THEN '44' WHEN t.code = '48s46L' THEN '46L' WHEN t.code = '48s46T' THEN '46T' ELSE t.code END) AS intra_code, SUM(CASE WHEN t.code in ('48s44','48s46L','48s46T') THEN -l.tax_amount ELSE l.tax_amount END) AS amount FROM account_move_line l LEFT JOIN account_tax_code t ON (l.tax_code_id = t.id) LEFT JOIN res_partner p ON (l.partner_id = p.id) WHERE t.code IN %s AND l.period_id IN %s AND t.company_id = %s GROUP BY p.name, l.partner_id, p.vat, intra_code''', (codes, tuple([p.id for p in wiz_data.period_ids]), data_company.id)) p_count = 0 for row in cr.dictfetchall(): if not row['vat']: row['vat'] = '' p_count += 1 seq += 1 amt = row['amount'] or 0.0 amount_sum += amt intra_code = row['intra_code'] == '44' and 'S' or ( row['intra_code'] == '46L' and 'L' or (row['intra_code'] == '46T' and 'T' or '')) xmldict['clientlist'].append({ 'partner_name': row['partner_name'], 'seq': seq, 'vatnum': row['vat'][2:].replace(' ', '').upper(), 'vat': row['vat'], 'country': row['vat'][:2], 'amount': amt, 'intra_code': row['intra_code'], 'code': intra_code }) xmldict.update({ 'dnum': dnum, 'clientnbr': str(seq), 'amountsum': amount_sum, 'partner_wo_vat': p_count }) return xmldict def create_xml(self, cursor, user, ids, context=None): """Creates xml that is to be exported and sent to estate for partner vat intra. :return: Value for next action. :rtype: dict """ mod_obj = self.pool.get('ir.model.data') xml_data = self._get_datas(cursor, user, ids, context=context) month_quarter = xml_data['period'][:2] year = xml_data['period'][2:] data_file = '' # Can't we do this by etree? data_head = """<?xml version="1.0" encoding="ISO-8859-1"?> <ns2:IntraConsignment xmlns="http://www.minfin.fgov.be/InputCommon" xmlns:ns2="http://www.minfin.fgov.be/IntraConsignment" IntraListingsNbr="1"> <ns2:Representative> <RepresentativeID identificationType="NVAT" issuedBy="%(issued_by)s">%(company_vat)s</RepresentativeID> <Name>%(company_name)s</Name> <Street>%(street)s</Street> <PostCode>%(post_code)s</PostCode> <City>%(city)s</City> <CountryCode>%(country)s</CountryCode> <EmailAddress>%(email)s</EmailAddress> <Phone>%(phone)s</Phone> </ns2:Representative>""" % (xml_data) if xml_data['mand_id']: data_head += '\n\t\t<ns2:RepresentativeReference>%(mand_id)s</ns2:RepresentativeReference>' % ( xml_data) data_comp_period = '\n\t\t<ns2:Declarant>\n\t\t\t<VATNumber>%(vatnum)s</VATNumber>\n\t\t\t<Name>%(company_name)s</Name>\n\t\t\t<Street>%(street)s</Street>\n\t\t\t<PostCode>%(post_code)s</PostCode>\n\t\t\t<City>%(city)s</City>\n\t\t\t<CountryCode>%(country)s</CountryCode>\n\t\t\t<EmailAddress>%(email)s</EmailAddress>\n\t\t\t<Phone>%(phone)s</Phone>\n\t\t</ns2:Declarant>' % ( xml_data) if month_quarter.startswith('3'): data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Quarter>' + month_quarter[ 1] + '</ns2:Quarter> \n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' elif month_quarter.startswith('0') and month_quarter.endswith('0'): data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' else: data_comp_period += '\n\t\t<ns2:Period>\n\t\t\t<ns2:Month>' + month_quarter + '</ns2:Month> \n\t\t\t<ns2:Year>' + year + '</ns2:Year>\n\t\t</ns2:Period>' data_clientinfo = '' for client in xml_data['clientlist']: if not client['vatnum']: raise osv.except_osv( _('Data Insufficient!'), _('No vat number defined for %s') % client['partner_name']) data_clientinfo += '\n\t\t<ns2:IntraClient SequenceNumber="%(seq)s">\n\t\t\t<ns2:CompanyVATNumber issuedBy="%(country)s">%(vatnum)s</ns2:CompanyVATNumber>\n\t\t\t<ns2:Code>%(code)s</ns2:Code>\n\t\t\t<ns2:Amount>%(amount)s</ns2:Amount>\n\t\t</ns2:IntraClient>' % ( client) data_decl = '\n\t<ns2:IntraListing SequenceNumber="1" ClientsNbr="%(clientnbr)s" DeclarantReference="%(dnum)s" AmountSum="%(amountsum)s">' % ( xml_data) data_file += data_head + data_decl + data_comp_period + data_clientinfo + '\n\t\t<ns2:Comment>%(comments)s</ns2:Comment>\n\t</ns2:IntraListing>\n</ns2:IntraConsignment>' % ( xml_data) context['file_save'] = data_file model_data_ids = mod_obj.search(cursor, user, [('model', '=', 'ir.ui.view'), ('name', '=', 'view_vat_intra_save')], context=context) resource_id = mod_obj.read(cursor, user, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Save'), 'context': context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'partner.vat.intra', 'views': [(resource_id, 'form')], 'view_id': 'view_vat_intra_save', 'type': 'ir.actions.act_window', 'target': 'new', } def preview(self, cr, uid, ids, context=None): xml_data = self._get_datas(cr, uid, ids, context=context) datas = {'ids': [], 'model': 'partner.vat.intra', 'form': xml_data} return { 'type': 'ir.actions.report.xml', 'report_name': 'partner.vat.intra.print', 'datas': datas, }
class document_file(osv.osv): _inherit = 'ir.attachment' _rec_name = 'datas_fname' def _attach_parent_id(self, cr, uid, ids=None, context=None): """Migrate ir.attachments to the document module. When the 'document' module is loaded on a db that has had plain attachments, they will need to be attached to some parent folder, and be converted from base64-in-bytea to raw-in-bytea format. This function performs the internal migration, once and forever, for these attachments. It cannot be done through the nominal ORM maintenance code, because the root folder is only created after the document_data.xml file is loaded. It also establishes the parent_id NOT NULL constraint that ir.attachment should have had (but would have failed if plain attachments contained null values). It also updates the File Size for the previously created attachments. """ parent_id = self.pool.get('document.directory')._get_root_directory( cr, uid) if not parent_id: logging.getLogger('document').warning( "at _attach_parent_id(), still not able to set the parent!") return False if ids is not None: raise NotImplementedError( "Ids is just there by convention! Don't use it yet, please.") cr.execute("UPDATE ir_attachment " \ "SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \ "WHERE parent_id IS NULL", (parent_id,)) cr.execute( "UPDATE ir_attachment SET file_size=length(db_datas) WHERE file_size = 0 and type = 'binary'" ) cr.execute("ALTER TABLE ir_attachment ALTER parent_id SET NOT NULL") return True def _get_filestore(self, cr): return os.path.join(DMS_ROOT_PATH, cr.dbname) def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} fbrl = self.browse(cr, uid, ids, context=context) nctx = nodes.get_node_context(cr, uid, context={}) # nctx will /not/ inherit the caller's context. Most of # it would be useless, anyway (like active_id, active_model, # bin_size etc.) result = {} bin_size = context.get('bin_size', False) for fbro in fbrl: fnode = nodes.node_file(None, None, nctx, fbro) if not bin_size: data = fnode.get_data(cr, fbro) result[fbro.id] = base64.encodestring(data or '') else: result[fbro.id] = fnode.get_data_len(cr, fbro) return result # # This code can be improved # def _data_set(self, cr, uid, id, name, value, arg, context=None): if not value: return True fbro = self.browse(cr, uid, id, context=context) nctx = nodes.get_node_context(cr, uid, context={}) fnode = nodes.node_file(None, None, nctx, fbro) res = fnode.set_data(cr, base64.decodestring(value), fbro) return res _columns = { # Columns from ir.attachment: 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'write_date': fields.datetime('Date Modified', readonly=True), 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True), 'res_model': fields.char('Attached Model', size=64, readonly=True, change_default=True), 'res_id': fields.integer('Attached ID', readonly=True), # If ir.attachment contained any data before document is installed, preserve # the data, don't drop the column! 'db_datas': fields.binary('Data', oldname='datas'), 'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True), # Fields of document: 'user_id': fields.many2one('res.users', 'Owner', select=1), # 'group_ids': fields.many2many('res.groups', 'document_group_rel', 'item_id', 'group_id', 'Groups'), # the directory id now is mandatory. It can still be computed automatically. 'parent_id': fields.many2one('document.directory', 'Directory', select=1, required=True, change_default=True), 'index_content': fields.text('Indexed Content'), 'partner_id': fields.many2one('res.partner', 'Partner', select=1), 'file_size': fields.integer('File Size', required=True), 'file_type': fields.char('Content Type', size=128), # fields used for file storage 'store_fname': fields.char('Stored Filename', size=200), } _order = "id desc" def __get_def_directory(self, cr, uid, context=None): dirobj = self.pool.get('document.directory') return dirobj._get_root_directory(cr, uid, context) _defaults = { 'user_id': lambda self, cr, uid, ctx: uid, 'file_size': lambda self, cr, uid, ctx: 0, 'parent_id': __get_def_directory } _sql_constraints = [ # filename_uniq is not possible in pure SQL ] def _check_duplication(self, cr, uid, vals, ids=[], op='create'): name = vals.get('name', False) parent_id = vals.get('parent_id', False) res_model = vals.get('res_model', False) res_id = vals.get('res_id', 0) if op == 'write': for file in self.browse(cr, uid, ids): # FIXME fields_only if not name: name = file.name if not parent_id: parent_id = file.parent_id and file.parent_id.id or False if not res_model: res_model = file.res_model and file.res_model or False if not res_id: res_id = file.res_id and file.res_id or 0 res = self.search(cr, uid, [('id', '<>', file.id), ('name', '=', name), ('parent_id', '=', parent_id), ('res_model', '=', res_model), ('res_id', '=', res_id)]) if len(res): return False if op == 'create': res = self.search(cr, uid, [('name', '=', name), ('parent_id', '=', parent_id), ('res_id', '=', res_id), ('res_model', '=', res_model)]) if len(res): return False return True def check(self, cr, uid, ids, mode, context=None, values=None): """Check access wrt. res_model, relax the rule of ir.attachment parent With 'document' installed, everybody will have access to attachments of any resources they can *read*. """ return super(document_file, self).check(cr, uid, ids, mode='read', context=context, values=values) def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): # Grab ids, bypassing 'count' ids = super(document_file, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False) if not ids: return 0 if count else [] # Filter out documents that are in directories that the user is not allowed to read. # Must use pure SQL to avoid access rules exceptions (we want to remove the records, # not fail), and the records have been filtered in parent's search() anyway. cr.execute( 'SELECT id, parent_id from "%s" WHERE id in %%s' % self._table, (tuple(ids), )) doc_pairs = cr.fetchall() parent_ids = set(zip(*doc_pairs)[1]) visible_parent_ids = self.pool.get('document.directory').search( cr, uid, [('id', 'in', list(parent_ids))]) disallowed_parents = parent_ids.difference(visible_parent_ids) for doc_id, parent_id in doc_pairs: if parent_id in disallowed_parents: ids.remove(doc_id) return len(ids) if count else ids def copy(self, cr, uid, id, default=None, context=None): if not default: default = {} if 'name' not in default: name = self.read(cr, uid, [id], ['name'])[0]['name'] default.update({'name': name + " " + _("(copy)")}) return super(document_file, self).copy(cr, uid, id, default, context=context) def write(self, cr, uid, ids, vals, context=None): result = False if not isinstance(ids, list): ids = [ids] res = self.search(cr, uid, [('id', 'in', ids)]) if not len(res): return False if not self._check_duplication(cr, uid, vals, ids, 'write'): raise osv.except_osv(_('ValidateError'), _('File name must be unique!')) # if nodes call this write(), they must skip the code below from_node = context and context.get('__from_node', False) if (('parent_id' in vals) or ('name' in vals)) and not from_node: # perhaps this file is renaming or changing directory nctx = nodes.get_node_context(cr, uid, context={}) dirobj = self.pool.get('document.directory') if 'parent_id' in vals: dbro = dirobj.browse(cr, uid, vals['parent_id'], context=context) dnode = nctx.get_dir_node(cr, dbro) else: dbro = None dnode = None ids2 = [] for fbro in self.browse(cr, uid, ids, context=context): if ('parent_id' not in vals or fbro.parent_id.id == vals['parent_id']) \ and ('name' not in vals or fbro.name == vals['name']): ids2.append(fbro.id) continue fnode = nctx.get_file_node(cr, fbro) res = fnode.move_to(cr, dnode or fnode.parent, vals.get('name', fbro.name), fbro, dbro, True) if isinstance(res, dict): vals2 = vals.copy() vals2.update(res) wid = res.get('id', fbro.id) result = super(document_file, self).write(cr, uid, wid, vals2, context=context) # TODO: how to handle/merge several results? elif res == True: ids2.append(fbro.id) elif res == False: pass ids = ids2 if 'file_size' in vals: # only write that field using direct SQL calls del vals['file_size'] if ids and vals: result = super(document_file, self).write(cr, uid, ids, vals, context=context) return result def create(self, cr, uid, vals, context=None): if context is None: context = {} vals['parent_id'] = context.get('parent_id', False) or vals.get( 'parent_id', False) if not vals['parent_id']: vals['parent_id'] = self.pool.get( 'document.directory')._get_root_directory(cr, uid, context) if not vals.get('res_id', False) and context.get( 'default_res_id', False): vals['res_id'] = context.get('default_res_id', False) if not vals.get('res_model', False) and context.get( 'default_res_model', False): vals['res_model'] = context.get('default_res_model', False) if vals.get('res_id', False) and vals.get('res_model', False) \ and not vals.get('partner_id', False): vals['partner_id'] = self.__get_partner_id(cr, uid, \ vals['res_model'], vals['res_id'], context) datas = None if vals.get('link', False): import urllib datas = base64.encodestring(urllib.urlopen(vals['link']).read()) else: datas = vals.get('datas', False) if datas: vals['file_size'] = len(datas) else: if vals.get('file_size'): del vals['file_size'] result = self._check_duplication(cr, uid, vals) if not result: domain = [ ('res_id', '=', vals['res_id']), ('res_model', '=', vals['res_model']), ('datas_fname', '=', vals['datas_fname']), ] attach_ids = self.search(cr, uid, domain, context=context) super(document_file, self).write(cr, uid, attach_ids, {'datas': vals['datas']}, context=context) result = attach_ids[0] else: #raise osv.except_osv(_('ValidateError'), _('File name must be unique!')) result = super(document_file, self).create(cr, uid, vals, context) return result def __get_partner_id(self, cr, uid, res_model, res_id, context=None): """ A helper to retrieve the associated partner from any res_model+id It is a hack that will try to discover if the mentioned record is clearly associated with a partner record. """ obj_model = self.pool.get(res_model) if obj_model._name == 'res.partner': return res_id elif 'partner_id' in obj_model._columns and obj_model._columns[ 'partner_id']._obj == 'res.partner': bro = obj_model.browse(cr, uid, res_id, context=context) return bro.partner_id.id elif 'address_id' in obj_model._columns and obj_model._columns[ 'address_id']._obj == 'res.partner.address': bro = obj_model.browse(cr, uid, res_id, context=context) return bro.address_id.partner_id.id return False def unlink(self, cr, uid, ids, context=None): stor = self.pool.get('document.storage') unres = [] # We have to do the unlink in 2 stages: prepare a list of actual # files to be unlinked, update the db (safer to do first, can be # rolled back) and then unlink the files. The list wouldn't exist # after we discard the objects ids = self.search(cr, uid, [('id', 'in', ids)]) for f in self.browse(cr, uid, ids, context=context): # TODO: update the node cache par = f.parent_id storage_id = None while par: if par.storage_id: storage_id = par.storage_id break par = par.parent_id #assert storage_id, "Strange, found file #%s w/o storage!" % f.id #TOCHECK: after run yml, it's fail if storage_id: r = stor.prepare_unlink(cr, uid, storage_id, f) if r: unres.append(r) else: logging.getLogger('document').warning( "Unlinking attachment #%s %s that has no storage", f.id, f.name) res = super(document_file, self).unlink(cr, uid, ids, context) stor.do_unlink(cr, uid, unres) return res
class OemedicalPerinatal(osv.Model): _name = 'oemedical.perinatal' _description = 'Perinatal Information' _columns={ 'name' : fields.many2one('oemedical.patient', string='Perinatal Infomation'), 'admission_code' : fields.char('Admission Code', size=64), 'gravida_number' : fields.integer('Gravida #'), 'abortion' : fields.boolean('Abortion'), 'admission_date' : fields.datetime('Admission date', help="Date when she was admitted to give birth"), 'prenatal_evaluations' : fields.integer('Prenatal evaluations', help="Number of visits to the doctor during pregnancy"), 'start_labor_mode' : fields.selection([ ('n', 'Normal'), ('i', 'Induced'), ('c', 'c-section'), ], 'Labor mode', select=True), 'gestational_weeks' : fields.integer('Gestational weeks'), 'gestational_days' : fields.integer('Gestational days'), 'fetus_presentation' : fields.selection([ ('n', 'Correct'), ('o', 'Occiput / Cephalic Posterior'), ('fb', 'Frank Breech'), ('cb', 'Complete Breech'), ('t', 'Transverse Lie'), ('t', 'Footling Breech'), ], 'Fetus Presentation', select=True), 'dystocia' : fields.boolean('Dystocia'), 'laceration' : fields.selection([ ('perineal', 'Perineal'), ('vaginal', 'Vaginal'), ('cervical', 'Cervical'), ('broad_ligament', 'Broad Ligament'), ('vulvar', 'Vulvar'), ('rectal', 'Rectal'), ('bladder', 'Bladder'), ('urethral', 'Urethral'), ], 'Lacerations', sort=False), 'hematoma' : fields.selection([ ('vaginal', 'Vaginal'), ('vulvar', 'Vulvar'), ('retroperitoneal', 'Retroperitoneal'), ], 'Hematoma', sort=False), 'placenta_incomplete' : fields.boolean('Incomplete Placenta'), 'placenta_retained' : fields.boolean('Retained Placenta'), 'abruptio_placentae' : fields.boolean('Abruptio Placentae', help='Abruptio Placentae'), 'episiotomy' : fields.boolean('Episiotomy'), 'vaginal_tearing' : fields.boolean('Vaginal tearing'), 'forceps' : fields.boolean('Use of forceps'), 'monitoring' : fields.one2many('oemedical.perinatal.monitor', 'name', string='Monitors'), 'puerperium_monitor' : fields.one2many('oemedical.puerperium.monitor', 'name','Puerperium monitor'), 'medications': fields.one2many('oemedical.patient.medication','patient_id', string='Medications',), 'dismissed' : fields.datetime('Dismissed from hospital'), 'place_of_death' : fields.selection([ ('ho', 'Hospital'), ('dr', 'At the delivery room'), ('hh', 'in transit to the hospital'), ('th', 'Being transferred to other hospital'), ], 'Place of Death'), 'mother_deceased' : fields.boolean('Deceased', help="Mother died in the process"), 'notes' : fields.text('Notes'), }
class jasper_document(osv.osv): _name = 'jasper.document' _description = 'Jasper Document' _order = 'sequence' def _get_formats(self, cr, uid, context=None): """ Return the list of all types of document that can be generate by JasperServer """ if not context: context = {} extension_obj = self.pool.get('jasper.document.extension') ext_ids = extension_obj.search(cr, uid, []) extensions = self.pool.get('jasper.document.extension').read( cr, uid, ext_ids) extensions = [ (extension['jasper_code'], extension['name'] + " (*." + extension['extension'] + ")") for extension in extensions ] return extensions # TODO: Add One2many with model list and depth for each, use for ban process # TODO: Implement thhe possibility to dynamicaly generate a wizard _columns = { 'name': fields.char('Name', size=128, translate=True, required=True), # button name 'service': fields.char( 'Service name', size=64, required=True, help='Enter the service name register at start by OpenERP Server'), 'enabled': fields.boolean('Active', help="Indicates if this document is active or not"), 'model_id': fields.many2one('ir.model', 'Object Model', required=True), # object model in ir.model 'jasper_file': fields.char('Jasper file', size=128), # jasper filename 'group_ids': fields.many2many( 'res.groups', 'jasper_wizard_group_rel', 'document_id', 'group_id', 'Groups', ), 'depth': fields.integer('Depth', required=True), 'format_choice': fields.selection([('mono', 'Single Format'), ('multi', 'Multi Format')], 'Format Choice', required=True), 'format': fields.selection(_get_formats, 'Formats'), 'report_unit': fields.char('Report Unit', size=128, help='Enter the name for report unit in Jasper Server'), 'mode': fields.selection([('sql', 'SQL'), ('xml', 'XML'), ('multi', 'Multiple Report')], 'Mode', required=True), 'before': fields.text( 'Before', help= 'This field must be filled with a valid SQL request and will be executed BEFORE the report edition', ), 'after': fields.text( 'After', help= 'This field must be filled with a valid SQL request and will be executed AFTER the report edition', ), 'attachment': fields.char( 'Save As Attachment Prefix', size=255, help= 'This is the filename of the attachment used to store the printing result. Keep empty to not save the printed reports. You can use a python expression with the object and time variables.' ), 'attachment_use': fields.boolean( 'Reload from Attachment', help= 'If you check this, then the second time the user prints with same attachment name, it returns the previous report.' ), 'toolbar': fields.boolean( 'Hide in toolbar', help='Check this if you want to hide button in toolbar'), 'param_ids': fields.one2many( 'jasper.document.parameter', 'document_id', 'Parameters', ), 'ctx': fields.char( 'Context', size=128, help= "Enter condition with context does match to see the print action\neg: context.get('foo') == 'bar'" ), 'sql_view': fields.text('SQL View', help='Insert your SQL view, if the report is base on it'), 'sql_name': fields.char( 'Name of view', size=128, ), 'child_ids': fields.many2many( 'jasper.document', 'jasper_document_multi_rel', 'source_id', 'destin_id', 'Child report', help='Select reports to launch when this report is called'), 'sequence': fields.integer( 'Sequence', help= 'The sequence is used when launch a multple report, to select the order to launch' ), 'only_one': fields.boolean('Launch one time for all ids', help='Launch the report only one time on multiple id'), 'duplicate': fields.char( 'Duplicate', size=256, help= "Indicate the number of duplicate copie, use o as object to evaluate\neg: o.partner_id.copy\nor\n'1'", ), 'lang': fields.char( 'Lang', size=256, help= "Indicate the lang to use for this report, use o as object to evaluate\neg: o.partner_id.lang\nor\n'fr_FR'\ndefault use user's lang" ), 'report_id': fields.many2one('ir.actions.report.xml', 'Report link', readonly=True, help='Link to the report in ir.actions.report.xml'), 'check_sel': fields.selection( [('none', 'None'), ('simple', 'Simple'), ('func', 'Function')], 'Checking type', help= 'if None, no check\nif Simple, define on Check Simple the condition\n if function, the object have check_print function' ), 'check_simple': fields.char( 'Check Simple', size=256, help= "This code inside this field must return True to send report execution\neg o.state in ('draft', 'open')" ), 'message_simple': fields.char('Return message', size=256, translate=True, help="Error message when check simple doesn't valid"), 'label_ids': fields.one2many('jasper.document.label', 'document_id', 'Labels'), } _defaults = { 'format_choice': lambda *a: 'mono', 'mode': lambda *a: 'sql', 'attachment': lambda *a: False, 'toolbar': lambda *a: True, 'depth': lambda *a: 0, 'sequence': lambda *a: 100, 'format': lambda *a: 'PDF', 'duplicate': lambda *a: "'1'", 'lang': lambda *a: False, 'report_id': lambda *a: False, 'check_sel': lambda *a: 'none', 'check_simple': lambda *a: False, 'message_simple': lambda *a: False, } def __init__(self, pool, cr): """ Automaticaly registered service at server starts """ super(jasper_document, self).__init__(pool, cr) def make_action(self, cr, uid, id, context=None): """ Create an entry in ir_actions_report_xml and ir.values """ b = self.browse(cr, uid, id, context=context) act_report_obj = self.pool.get('ir.actions.report.xml') doc = self.browse(cr, uid, id, context=context) if doc.report_id: _logger.info('Update "%s" service' % doc.name) args = { 'name': doc.name, 'report_name': 'jasper.' + doc.service, 'model': doc.model_id.model, 'groups_id': [(6, 0, [x.id for x in doc.group_ids])], 'header': False, 'multi': doc.toolbar, } act_report_obj.write(cr, uid, [doc.report_id.id], args, context=context) else: _logger.info('Create "%s" service' % doc.name) args = { 'name': doc.name, 'report_name': 'jasper.' + doc.service, 'model': doc.model_id.model, 'report_type': 'jasper', 'groups_id': [(6, 0, [x.id for x in doc.group_ids])], 'header': False, 'multi': doc.toolbar, } report_id = act_report_obj.create(cr, uid, args, context=context) cr.execute( """UPDATE jasper_document SET report_id=%s WHERE id=%s""", (report_id, id)) value = 'ir.actions.report.xml,' + str(report_id) ir.ir_set(cr, uid, 'action', 'client_print_multi', doc.name, [doc.model_id.model], value, replace=False, isobject=True) registered_report('jasper.' + doc.service) def action_values(self, cr, uid, report_id, context=None): """ Search ids for reports """ args = [ ('key2', '=', 'client_print_multi'), ('value', '=', 'ir.actions.report.xml,%d' % report_id), ('object', '=', True), ] return self.pool.get('ir.values').search(cr, uid, args, context=context) def create_values(self, cr, uid, id, context=None): doc = self.browse(cr, uid, id, context=context) if not self.action_values(cr, uid, doc.report_id.id, context=context): value = 'ir.actions.report.xml,%d' % doc.report_id.id ir.ir_set(cr, uid, 'action', 'client_print_multi', doc.name, [doc.model_id.model], value, replace=False, isobject=True) return True def unlink_values(self, cr, uid, id, context=None): """ Only remove link in ir.values, not the report """ doc = self.browse(cr, uid, id, context=context) for v in self.action_values(cr, uid, doc.report_id.id, context=context): ir.ir_del(cr, uid, v) return True def create(self, cr, uid, vals, context=None): """ Dynamicaly declare the wizard for this document """ if context is None: context = {} doc_id = super(jasper_document, self).create(cr, uid, vals, context=context) self.make_action(cr, uid, doc_id, context=context) # Check if view and create it in the database if vals.get('sql_name') and vals.get('sql_view'): drop_view_if_exists(cr, vals.get('sql_name')) sql_query = 'CREATE OR REPLACE VIEW %s AS\n%s' % (vals['sql_name'], vals['sql_view']) cr.execute(sql_query) return doc_id def write(self, cr, uid, ids, vals, context=None): """ If the description change, we must update the action """ if context is None: context = {} if vals.get('sql_name') or vals.get('sql_view'): sql_name = vals.get('sql_name', self.browse(cr, uid, ids[0]).sql_name) sql_view = vals.get('sql_view', self.browse(cr, uid, ids[0]).sql_view) drop_view_if_exists(cr, sql_name) sql_query = 'CREATE OR REPLACE VIEW %s AS\n%s' % (sql_name, sql_view) cr.execute(sql_query, (ids, )) res = super(jasper_document, self).write(cr, uid, ids, vals, context=context) if not context.get('action'): for id in ids: self.make_action(cr, uid, id, context=context) if 'enabled' in vals: if vals['enabled']: for id in ids: self.create_values(cr, uid, id, context) else: for id in ids: self.unlink_values(cr, uid, id, context) return res def copy(self, cr, uid, id, default=None, context=None): """ When we duplicate code, we must remove some field, before """ if context is None: context = {} if default is None: default = {} doc = self.browse(cr, uid, id, context=context) default['report_id'] = False default['service'] = doc.service + '_copy' default['name'] = doc.name + _(' (copy)') return super(jasper_document, self).copy(cr, uid, id, default, context=context) def unlink(self, cr, uid, ids, context=None): """ When remove jasper_document, we must remove data to ir.actions.report.xml and ir.values """ if context is None: context = {} for doc in self.browse(cr, uid, ids, context=context): if doc.report_id: self.unlink_values(cr, uid, doc.id, context) self.pool.get('ir.actions.report.xml').unlink( cr, uid, [doc.report_id.id], context=context) return super(jasper_document, self).unlink(cr, uid, ids, context=context)
class admin_affairs_account(osv.osv): """ To manage admin affaire accounts """ def create(self, cr, user, vals, context=None): """ Create new entry sequence for every new admin affairs account record. @param vals: record to be created @return: return a result that create a new record in the database """ if ('name' not in vals) or (vals.get('name')=='/'): seq_obj_name = 'admin_affairs.account' vals['name'] = self.pool.get('ir.sequence').get(cr, user, seq_obj_name) return super(admin_affairs_account, self).create(cr, user, vals, context) _name = "admin_affairs.account" _description = 'Admin affairs account' _columns = { 'name': fields.char('Reference', size=64, required=True, select=True, readonly=True, help="unique number of the record"), 'date' : fields.date('Date',required=True, readonly=True,), 'model_id': fields.many2one('admin.affairs.model','Model',required=True), 'templet_id': fields.many2one('account.account.template','Account Templet'), 'code': fields.related('templet_id','code',type='char',relation='account.account.template',string='Code', store=True, readonly=True), 'name_type': fields.many2one('account.account.type','Account_type'), #'company_id': fields.many2one('res.company','Company'), 'journal_id': fields.property('account.journal', required=True,type='many2one', relation='account.journal',string='Journal', method=True, view_load=True), 'account_id': fields.property('account.account',type='many2one', relation='account.account', string='Account', method=True, view_load=True), # 'pro_journal_id': fields.property('account.journal', # type='many2one', # relation='account.journal', # string='Project Journal', # method=True, # view_load=True), #'pro_account_id': fields.property('account.account', #type='many2one', #relation='account.account', #string='Project Account', # method=True, # view_load=True), 'analytic_id': fields.property('account.analytic.account', type='many2one', relation='account.analytic.account', string='Analytic account', method=True, view_load=True), 'notes': fields.text('Notes', size=256 ), } _sql_constraints = [ ('model_uniq', 'unique(model_id)', 'Model must be unique!'), ('name_uniq', 'unique(name)', 'Reference must be unique !'), ] _defaults = { 'date': time.strftime('%Y-%m-%d'), 'name':'/' }
class liquidation_shipping(osv.osv): _name = "liquidation.shipping" _description = "Liquidation Shiping" _columns = { 'name': fields.char('Liquidation Number', size=64, required=True, readonly=True), 'carrier_company_id': fields.many2one('res.partner', 'Carrier Company', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)]}), 'date_liquidation':fields.date('Date Liquidation', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)],'done':[('readonly',True)]}), 'driver_id': fields.many2one('res.partner', 'Driver', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)]}), 'guide_id': fields.many2one('delivery.guide', 'Guide', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)],'done':[('readonly',True)]}), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)]}), 'ruta_id': fields.many2one('guide.ruta', 'Ruta', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)]}), 'vehiculo_id': fields.many2one('guide.vehiculo', 'Vehiculo Carga', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)]}), 'base_amount': fields.float('Base Amount', required=True, states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)],'done':[('readonly',True)]}, digits=(16, int(config['price_accuracy']))), 'extra_amount': fields.float('Extra Amount', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)],'done':[('readonly',True)]}, digits=(16, int(config['price_accuracy']))), 'manual_amount': fields.float('Extra Amount', digits=(16, int(config['price_accuracy'])), states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)],'cancel':[('readonly',True)],'done':[('readonly',True)]}), 'number': fields.char('Invoice Number', size=64, states={'done':[('readonly',True)],'cancel':[('readonly',True)]}), 'notes': fields.text('Notes'), 'comment': fields.text('Comment'), 'comment_manual': fields.text('Comment Manual', states={'confirmed':[('readonly',True)], 'approved':[('readonly',True)]}), 'state': fields.selection([ ('draft', 'Draft'), ('denied','Denied'), ('except', 'Except'), ('confirmed', 'Confirmed'), ('approved', 'Approved'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Liquidation State', readonly=True, select=True), 'liquidation_line': fields.one2many('liquidation.shipping.line', 'liquidation_id', 'Invoice Refund Lines'), 'liquidation_fletes': fields.one2many('liquidation.shipping.line.fl', 'liquidation_ids', 'Fletes Lines'), 'liquidation_esp': fields.boolean('Liquidacion Especial', states={'done':[('readonly',True)],'cancel':[('readonly',True)]}), 'liquidation_manual': fields.boolean('Liquidacion Manual', states={'done':[('readonly',True)],'cancel':[('readonly',True)]}), } _defaults = { 'name': lambda obj, cr, uid, context: obj.pool.get('liquidation.shipping').liquidation_seq_get(cr, uid), 'date_liquidation': lambda *a: time.strftime('%Y-%m-%d'), 'state': lambda *a: 'draft', } ##liquidation_seq_get------------------------------------------------------------------------------------------------------- #Asigna el numero de liquidacion, de forma temporal, al guardar se obtiene el definitivo # def liquidation_seq_get(self, cr, uid): pool_seq=self.pool.get('ir.sequence') cr.execute("select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where code='liquidation.shipping' and active=True") res = cr.dictfetchone() if res: if res['number_next']: return pool_seq._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + pool_seq._process(res['suffix']) else: return pool_seq._process(res['prefix']) + pool_seq._process(res['suffix']) return False ##write--------------------------------------------------------------------------------------------------------------------- # # def write(self, cursor, user, ids, vals, context=None): if vals.has_key('guide_id') and vals['guide_id']: idguide = vals['guide_id'] infoguide = self.pool.get('delivery.guide').read(cursor, user, [idguide], ['carrier_company_id','driver_id','warehouse_id','ruta_id','vehiculo_id'])[0] if infoguide: vals['carrier_company_id']= infoguide['carrier_company_id'][0] vals['driver_id']= infoguide['driver_id'][0] vals['warehouse_id']= infoguide['warehouse_id'][0] vals['ruta_id']= infoguide['ruta_id'][0] vals['vehiculo_id']= infoguide['vehiculo_id'][0] return super(liquidation_shipping, self).write(cursor, user, ids, vals,context=context) ##create--------------------------------------------------------------------------------------------------------------------- #Asigna el numero definitivi a la liquidacion, y aumenta el contador de la sequencia 'liquidation.shipping' # def create(self, cursor, user, vals, context=None): vals['name']=self.pool.get('ir.sequence').get(cursor, user, 'liquidation.shipping') #Las assignacion de los datos siguientes se debe a que la vista al momento de crear una #liq. da fletes estan en modo solo lectura, por lo tanto debe ser llenados antes de cear la liquidacion... idguide = vals['guide_id'] infoguide = self.pool.get('delivery.guide').read(cursor, user, [idguide], ['carrier_company_id','driver_id','warehouse_id','ruta_id','vehiculo_id'])[0] if infoguide: vals['carrier_company_id']= infoguide['carrier_company_id'][0] vals['driver_id']= infoguide['driver_id'][0] vals['warehouse_id']= infoguide['warehouse_id'][0] vals['ruta_id']= infoguide['ruta_id'][0] vals['vehiculo_id']= infoguide['vehiculo_id'][0] lq_id=super(liquidation_shipping, self).create(cursor, user, vals,context=context) return lq_id ##button_compute_shipping---------------------------------------------------------------------------------------------------- #Se Calcula el monto de la liquidacion # def button_compute_shipping(self, cr, uid, ids, context={}): totalflete = 0 dtotalflete = 0 tipovh = 0 if not ids: return {} #Datos Requiridos: se obtienen los datos necesario para realizar el calculo del Flete. infoshipping = self.pool.get('liquidation.shipping').read(cr, uid, ids, ['guide_id','ruta_id','vehiculo_id'])[0] if infoshipping: guide = infoshipping['guide_id'][0] ruta = infoshipping['ruta_id'][0] vh = infoshipping['vehiculo_id'][0] if vh: vhinfo = self.pool.get('guide.vehiculo').read(cr, uid, [vh], ['tipo_id'])[0] tipovh = vhinfo['tipo_id'][0] else: return {} #Procesando Guia de Despacho: se obtiene los datos de la guia a procesar infoguide = self.pool.get('delivery.guide').read(cr, uid, [guide], ['carrier_company_id','driver_id','warehouse_id','ruta_id','vehiculo_id'])[0] if infoguide: #Procesando Rutas - Tarifas: #se obtiene los valores de las tarifas de la ruta asignadas al tipo de vehiculo sqlg = """ SELECT r.category_fle_id,r.price,c.name FROM guide_ruta_line AS r INNER JOIN product_category_fle AS c ON r.category_fle_id=c.id WHERE r.ruta_id=%d AND r.tipo_vehiculo_id=%d ;"""%(ruta,tipovh) cr.execute (sqlg) datos_tarifa = cr.fetchall() #Se obtienen los productos y sus catidades agrupados por categoria de fletes sqlp = """ SELECT SUM(i.quantity) as cantidad,p.id_flete FROM delivery_guide_line AS l INNER JOIN account_invoice_line AS i ON l.invoice_id=i.invoice_id INNER JOIN product_product AS p ON i.product_id=p.id WHERE l.guide_id=%d GROUP BY p.id_flete ;"""%guide cr.execute (sqlp) resultSQL = cr.fetchall() if not resultSQL: return False #Se borrar los fletes fl_obj = self.pool.get('liquidation.shipping.line.fl') for id in ids: cr.execute("DELETE FROM liquidation_shipping_line_fl WHERE liquidation_ids=%s", (id,)) #Se calculo el monto neto del felte, segun se la tarifa que le corresponda lo cual depende de la categoria for product in resultSQL: costo = 0 total = 0 cantidad = 0 for tarf in datos_tarifa: if tarf[0] == product[1]: #Se valida la categoria del flete de la tarifa con la del producto costo = tarf[1] #Costo de la Tarifa cantidad = product[0] #Cantidad de Productos total = cantidad * costo totalflete += total #Se crea el flete en la linea de fletes vals_fl = { 'liquidation_fletes':[(0,0,{'name': tarf[2],'id_flete':tarf[0] ,'price':costo ,'quantity':cantidad})] } #fl_obj.create(cr, uid, vals_fl) self.pool.get('liquidation.shipping').write(cr, uid, ids, vals_fl) break #Monto Reconcido: se almacena el monto neto del flete if totalflete > 0: self.pool.get('liquidation.shipping').write(cr, uid, ids, {'base_amount': totalflete}) #Se obtienen las Notas de Credito del Flete dtotalflete = 0 sqlnc = """ SELECT SUM(i.quantity) as cantidad,p.id_flete,a.reference,l.liquidation_esp FROM liquidation_shipping_line AS l INNER JOIN account_invoice AS a ON l.invoice_id=a.id INNER JOIN account_invoice_line AS i ON l.invoice_id=i.invoice_id INNER JOIN product_product AS p ON i.product_id=p.id WHERE l.liquidation_id=%d AND a.state !='cancel' GROUP BY p.id_flete,a.reference,liquidation_esp ;"""%ids[0] cr.execute (sqlnc) liq_line = cr.fetchall() #Validar si la Facturas poseen Notas de Credito sqlnotasc = """ SELECT i.id,i.name,i.reference,i.parent_id FROM account_invoice AS i WHERE i.type='out_refund' and i.adjustment=False and i.manual=False and i.internal=False AND i.state !='cancel' AND i.parent_id in (SELECT i.id FROM delivery_guide_line AS l INNER JOIN account_invoice AS i ON l.invoice_id=i.id WHERE l.guide_id=%d) ORDER BY i.reference;"""%guide cr.execute (sqlnotasc) notas_cred = cr.fetchall() #Se valida que las notas correspondientes a las facturas esten asignadas al flete. #Si existen notas y aun no estan asignadas se procede a la aignacion automatica #print "NC",notas_cred,"LQ";liq_line if not liq_line and notas_cred: for new in notas_cred: #Se obtiene nro de pedido de la factura para completar los valores a ingresar en la linea de fletes. invoic = self.pool.get('account.invoice').read(cr, uid, [new[3]], ['reference']) nro_fact = invoic[0]['reference'] vals ={ 'liquidation_esp':True, 'liquidation_line':[(0,0,{ 'invoice_id':new[0],'name': new[2],'sale_order':nro_fact, 'liquidation_esp':True})] } self.pool.get('liquidation.shipping').write(cr, uid, ids, vals) cr.execute (sqlnc) liq_line = cr.fetchall() #Calculando el total neto de la notas de credito for inf in liq_line: costo = 0 total = 0 if inf[3]: for tarf in datos_tarifa: if tarf[0] == inf[1]: costo = tarf[1] total = inf[0] * costo dtotalflete += total break self.pool.get('liquidation.shipping').write(cr, uid, ids, {'extra_amount': dtotalflete}) return True ##button_compute_shipping---------------------------------------------------------------------------------------------------- #Se Calcula el monto de la liquidacion # def button_compute_shipping_tras(self, cr, uid, ids, context={}): totalflete = 0 dtotalflete = 0 tipovh = 0 if not ids: return {} #Datos Control: se obtienen los datos de control para realizar el calculo del Flete. infoshipping = self.pool.get('liquidation.shipping').read(cr, uid, ids, ['guide_id','ruta_id','vehiculo_id'])[0] if infoshipping: guide = infoshipping['guide_id'][0] ruta = infoshipping['ruta_id'][0] vh = infoshipping['vehiculo_id'][0] if vh: vhinfo = self.pool.get('guide.vehiculo').read(cr, uid, [vh], ['tipo_id'])[0] tipovh = vhinfo['tipo_id'][0] else: return {} #Procesando Guia de Despacho: se obtiene los datos de la guia a procesar infoguide = self.pool.get('delivery.guide').read(cr, uid, [guide], ['carrier_company_id','driver_id','warehouse_id','ruta_id','vehiculo_id'])[0] if infoguide: #Procesando Rutas - Tarifas: #se obtiene los valores de las tarifas de la ruta asignadas al tipo de vehiculo sqlg = """ SELECT r.category_fle_id,r.price,c.name FROM guide_ruta_line AS r INNER JOIN product_category_fle AS c ON r.category_fle_id=c.id WHERE r.ruta_id=%d AND r.tipo_vehiculo_id=%d ;"""%(ruta,tipovh) cr.execute (sqlg) datos_tarifa = cr.fetchall() #Se obtienen los productos y sus catidades agrupados por categoria de fletes sqlp = """ SELECT SUM(s.product_qty) as cantidad,p.id_flete FROM delivery_guide_picking_line AS l INNER JOIN stock_move AS s ON l.picking_id=s.picking_id INNER JOIN product_product AS p ON s.product_id=p.id WHERE l.guide_id=%d GROUP BY p.id_flete ;"""%guide cr.execute (sqlp) resultSQL = cr.fetchall() if not resultSQL: return False #Se borrar los fletes fl_obj = self.pool.get('liquidation.shipping.line.fl') for id in ids: cr.execute("DELETE FROM liquidation_shipping_line_fl WHERE liquidation_ids=%s", (id,)) #Se calculo el monto neto del felte, segun se la tarifa que le corresponda lo cual depende de la categoria for product in resultSQL: costo = 0 total = 0 cantidad = 0 for tarf in datos_tarifa: if tarf[0] == product[1]: #Se valida la categoria del flete de la tarifa con la del producto costo = tarf[1] #Costo de la Tarifa cantidad = product[0] #Cantidad de Productos total = cantidad * costo totalflete += total #Se crea el flete en la linea de fletes vals_fl = { 'liquidation_fletes':[(0,0,{'name': tarf[2],'id_flete':tarf[0] ,'price':costo ,'quantity':cantidad})] } #fl_obj.create(cr, uid, vals_fl) self.pool.get('liquidation.shipping').write(cr, uid, ids, vals_fl) break #Monto Reconcido: se almacena el monto neto del flete if totalflete > 0: self.pool.get('liquidation.shipping').write(cr, uid, ids, {'base_amount': totalflete}) return True ##guide_change---------------------------------------------------------------------------------------------------- #Asignando Guia: #cuando se asigna la guia de despacho al flete, se obtienen otros valores automaticamente def guide_change(self, cr, uid, ids,guide): infoguide = {} if not guide: return {} infoguide = self.pool.get('delivery.guide').read(cr, uid, [guide], ['carrier_company_id','driver_id','warehouse_id','ruta_id','vehiculo_id'])[0] if infoguide: res = {'value': { 'carrier_company_id':infoguide['carrier_company_id'][0], 'driver_id':infoguide['driver_id'][0], 'warehouse_id':infoguide['warehouse_id'][0], 'ruta_id':infoguide['ruta_id'][0], 'vehiculo_id':infoguide['vehiculo_id'][0], }} return res return res ##WORKFLOWS---------------------------------------------------------------------------------------------------- # # def wkf_confirm_liquidation(self, cr, uid, ids): obj_flete = pooler.get_pool(cr.dbname).get('liquidation.shipping') flete = obj_flete.browse(cr, uid, ids[0]) idguide = flete.guide_id.id sql = "SELECT id FROM liquidation_shipping where guide_id=%d "%idguide cr.execute (sql) shipping_ids = cr.fetchall() for sp in shipping_ids: if ids[0] != sp[0]: raise osv.except_osv(_('Alerta :'),_('Guia ya procesada con otro flete!!!')) self.write(cr, uid, ids, {'state': 'confirmed'}) self.pool.get('delivery.guide').write(cr, uid, [idguide], {'paid':True}) return True def wkf_cancel_liquidation(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'cancel'}) return True
else: raise osv.except_osv(_('Warning !'),'No products found on Amazon as per your search query. Please try again') return True _columns = { 'amazon_sku': fields.char('Amazon SKU', size=126), 'amazon_asin': fields.char('ASIN', size=16,readonly=True), 'orderitemid': fields.char('Orderitemid', size=16), 'product_order_item_id': fields.char('Order_item_id', size=256), 'amazon_export':fields.boolean('Exported to Amazon'), 'amazon_category':fields.many2one('amazon.category','Amazon Category'), 'amz_type': fields.selection([('',''),('IBSN','IBSN'),('UPC','UPC'),('EAN','EAN'),('ASIN','ASIN')],'Type'), 'amz_type_value': fields.char('Amazon Type Value', size=126), 'amzn_condtn': fields.selection([('',''),('New','New'),('UsedLikeNew','Used Like New'),('UsedVeryGood','Used Very Good'),('UsedGood','UsedGood') ,('UsedAcceptable','Used Acceptable'),('CollectibleLikeNew','Collectible Like New'),('CollectibleVeryGood','Collectible Very Good'),('CollectibleGood','Collectible Good') ,('CollectibleAcceptable','Collectible Acceptable'),('Refurbished','Refurbished'),('Club','Club')],'Amazon Condition'), 'prod_query': fields.char('Product Search Query', size=200, help="A search string with the same support as that is provided on Amazon marketplace websites."), 'prod_query_contextid': fields.selection(_amazon_browse_node_get,'Query ContextId', help="An identifier for the context within which the given search will be performed."), 'amazon_instance_id': fields.selection(_amazon_instance_get,'Amazon Instance', help="Select the Amazon instance where you want to perform search."), 'amazon_products_ids': fields.one2many('amazon.products.master', 'product_id', 'Amazon Searched Products'), 'amazon_prod_status': fields.selection([('active','Active'),('fail','Failure')],'Status',readonly="True"), 'operation_performed': fields.char('Operation Performed', size=126), 'submit_feed_result' : fields.text('Submit Feed Result',readonly=True), 'amazon_updated_price':fields.float('Amazon Updated Price',digits=(16,2)), 'condition_note' : fields.text('Condition Note'), } _defaults = { 'amzn_condtn':'', 'amazon_instance_id': _assign_default_amazon_instance } product_product()
class tcv_stock_changes(osv.osv): _name = 'tcv.stock.changes' _description = '' _order = 'ref desc' ##------------------------------------------------------------------------- ##------------------------------------------------------- _internal methods def _check_stock_production_lot(self, cr, lot_ids, sql): cr.execute(sql % str(lot_ids)[1:-1].replace('L', '')) res = cr.fetchall() return res and str([str(x[0]) for x in res])[1:-1] ##--------------------------------------------------------- function fields _columns = { 'ref': fields.char('ref', size=64, required=False, readonly=True), 'name': fields.char('Name', size=64, required=True, readonly=True, states={'draft': [('readonly', False)]}, select=True), 'date': fields.date('Date', required=True, readonly=True, states={'draft': [('readonly', False)]}, select=True), 'method_id': fields.many2one('tcv.stock.changes.method', 'Adj. method', readonly=True, states={'draft': [('readonly', False)]}, select=True, required=True, ondelete='restrict'), 'picking_in_id': fields.many2one('stock.picking', 'Picking IN', readonly=True, ondelete='restrict'), 'picking_out_id': fields.many2one('stock.picking', 'Picking OUT', readonly=True, ondelete='restrict'), 'move_id': fields.many2one('account.move', 'Accounting entries', ondelete='restrict', help="The move of this entry line.", select=True, readonly=True), 'state': fields.selection(_TCV_STOCK_CHANGES_STATES, 'State', required=True, readonly=True), 'narration': fields.text('Notes', readonly=False), 'confirm_user_id': fields.many2one('res.users', 'Confirmed by', readonly=True, select=True, ondelete='restrict'), 'done_user_id': fields.many2one('res.users', 'Doned by', readonly=True, select=True, ondelete='restrict'), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, ondelete='restrict'), 'account_id': fields.many2one( 'account.account', 'Account', ondelete='restrict', required=False, readonly=True, states={'draft': [('readonly', False)]}, help="The account for inventory variation, use only when " + "need to replace default account"), 'line_ids': fields.one2many('tcv.stock.changes.lines', 'line_id', 'Production lots', readonly=True, states={'draft': [('readonly', False)]}), } _defaults = { 'ref': lambda *a: '/', 'date': lambda *a: time.strftime('%Y-%m-%d'), 'state': lambda *a: 'draft', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, self._name, context=c), } _sql_constraints = [] ##------------------------------------------------------------------------- ##---------------------------------------------------------- public methods def create_stock_move_lines(self, cr, uid, item, context=None): lines_in = [] lines_out = [] obj_lot = self.pool.get('stock.production.lot') obj_uom = self.pool.get('product.uom') for line in item.line_ids: if not line.location_id: act_loc_id = obj_lot.get_actual_lot_location( cr, uid, line.prod_lot_id.id, context)[0] else: act_loc_id = line.location_id.id locations = { 'internal': act_loc_id, 'scrap': item.method_id.location_id.id } sml = { 'product_id': line.product_id.id, 'prodlot_id': line.prod_lot_id.id, 'name': item.name or '', 'date': item.date, 'product_uom': line.product_id.uom_id.id, 'state': 'draft' } lot = {} if line.stock_driver in ('slab', 'block'): lot.update({ 'length': line.new_length, 'heigth': line.new_heigth, 'width': line.new_width }) lot_area = obj_uom._compute_area( cr, uid, line.stock_driver, line.pieces, line.length, line.heigth, line.width, context=context) if line.pieces else 0 new_area = obj_uom._compute_area( cr, uid, line.stock_driver, line.new_pieces, line.new_length, line.new_heigth, line.new_width, context=context) if line.new_pieces else 0 if item.method_id.type == 'stock': lot_value = lot_area * line.cost_price new_cost = (lot_value / new_area) if new_area else 0 if new_cost: lot.update({'property_cost_price': new_cost}) if lot: obj_lot.write(cr, uid, [line.prod_lot_id.id], lot, context=context) # Send out all stock (=0) then create new stock if lot_area > 0: sml.update({ 'location_id': locations['internal'], 'location_dest_id': locations['scrap'], 'pieces_qty': line.pieces, 'product_qty': lot_area, 'product_uos_qty': lot_area, }) lines_out.append((0, 0, sml.copy())) if new_area > 0: sml.update({ 'location_id': locations['scrap'], 'location_dest_id': locations['internal'], 'pieces_qty': line.new_pieces, 'product_qty': new_area, 'product_uos_qty': new_area, }) lines_in.append((0, 0, sml.copy())) if lot_area == 0 and new_area == 0 and line.qty_diff: # To clear any remaing stock sml.update({ 'location_id': locations['internal'], 'location_dest_id': locations['scrap'], 'pieces_qty': line.pieces, 'product_qty': abs(line.qty_diff), 'product_uos_qty': abs(line.qty_diff), }) lines_out.append((0, 0, sml.copy())) return lines_in, lines_out def create_stock_picking(self, cr, uid, ids, context=None): if context is None: context = {} res = {'picking_in_id': 0, 'picking_out_id': 0} obj_pck = self.pool.get('stock.picking') ids = isinstance(ids, (int, long)) and [ids] or ids for item in self.browse(cr, uid, ids, context={}): picking = { 'name': '/', 'origin': '[%s] %s' % (item.ref, item.name or ''), 'date': item.date, 'invoice_state': 'none', 'stock_journal_id': item.method_id.stock_journal_id.id, 'company_id': item.company_id.id, 'auto_picking': False, # 'move_type': 'one', 'partner_id': item.company_id.partner_id.id, 'state_rw': 0, 'note': item.narration, } picking_in = picking.copy() picking_out = picking.copy() lines_in, lines_out = self.create_stock_move_lines( cr, uid, item, context) #~ Out first if lines_out: picking_out.update({ 'date': '%s 00:00:00' % item.date, 'type': 'out', 'move_lines': lines_out }) res['picking_out_id'] = obj_pck.create(cr, uid, picking_out, context) #~ Then in... if lines_in: picking_in.update({ 'date': '%s 00:00:01' % item.date, 'type': 'in', 'move_lines': lines_in }) res['picking_in_id'] = obj_pck.create(cr, uid, picking_in, context) return res def create_account_move_lines(self, cr, uid, item, context=None): res = [] for line in item.line_ids: categ = line.product_id.categ_id name = '(%s) %s' % (line.prod_lot_id.name, line.product_id.name) pre_line = { 'auto': True, 'company_id': item.company_id.id, 'name': name, 'debit': 0, 'credit': 0, 'reconcile': False, } # -stock(cr) -> +scrap(db) if not categ.property_stock_variation or \ not categ.property_stock_account_output_categ: raise osv.except_osv( _('Error!'), _('Can\'t find accounting info for categ: %s\n' + '(stock output & stock variation)') % categ.name) accs = [ item.account_id and item.account_id.id or categ.property_stock_variation.id, categ.property_stock_account_output_categ.id if line.qty_diff < 0 else categ.property_stock_account_input_categ.id ] if accs[0] == accs[1]: raise osv.except_osv( _('Error!'), _('Debtor and creditor accounts can not be the same, ' + 'You must enter the account manually')) if line.qty_diff < 0: # -scrap(cr) -> +stock(db) accs.reverse() amt_fld = ['credit', 'debit'] # Yes: credit, debit amount = abs(line.qty_diff) * line.prod_lot_id.property_cost_price for x in range(2): move_line = pre_line.copy() move_line.update({ 'account_id': accs[x], amt_fld[x]: round(amount, 2) }) res.append((0, 0, move_line)) res.reverse() return res def create_account_move(self, cr, uid, ids, context=None): if context is None: context = {} res = {'move_id': 0} obj_move = self.pool.get('account.move') obj_per = self.pool.get('account.period') for item in self.browse(cr, uid, ids, context={}): if item.method_id.type != 'account': return res period_id = obj_per.find(cr, uid, item.date)[0] move = { 'ref': '[%s] %s' % (item.ref, item.name or ''), 'journal_id': item.method_id.journal_id.id, 'date': item.date, 'min_date': item.date, 'company_id': item.company_id.id, 'state': 'draft', 'to_check': False, 'period_id': period_id, 'narration': item.narration, } move.update({ 'line_id': self.create_account_move_lines(cr, uid, item, context) }) move_id = obj_move.create(cr, uid, move, context) if move_id: obj_move.post(cr, uid, [move_id], context=context) res['move_id'] = move_id return res ##-------------------------------------------------------- buttons (object) ##------------------------------------------------------------ on_change... ##----------------------------------------------------- create write unlink def create(self, cr, uid, vals, context=None): if not vals.get('ref') or vals.get('ref') == '/': vals.update({ 'ref': self.pool.get('ir.sequence').get(cr, uid, 'tcv.stock.changes') }) res = super(tcv_stock_changes, self).create(cr, uid, vals, context) return res def unlink(self, cr, uid, ids, context=None): unlink_ids = [] for item in self.browse(cr, uid, ids, context={}): if item.state in ('cancel'): unlink_ids.append(item['id']) else: raise osv.except_osv( _('Invalid action !'), _('Cannot delete a record that aren\'t cancelled!')) res = super(tcv_stock_changes, self).\ unlink(cr, uid, unlink_ids, context) return res ##---------------------------------------------------------------- Workflow def button_draft(self, cr, uid, ids, context=None): vals = {'state': 'draft', 'done_user_id': 0, 'confirm_user_id': 0} return self.write(cr, uid, ids, vals, context) def button_confirm(self, cr, uid, ids, context=None): vals = {'state': 'confirm', 'confirm_user_id': uid} return self.write(cr, uid, ids, vals, context) def button_done(self, cr, uid, ids, context=None): vals = {'state': 'done', 'done_user_id': uid} vals.update(self.create_stock_picking(cr, uid, ids, context)) vals.update(self.create_account_move(cr, uid, ids, context)) return self.write(cr, uid, ids, vals, context) def button_cancel(self, cr, uid, ids, context=None): vals = {'state': 'cancel'} return self.write(cr, uid, ids, vals, context) def test_draft(self, cr, uid, ids, *args): return True def test_confirm(self, cr, uid, ids, *args): ids = isinstance(ids, (int, long)) and [ids] or ids for item in self.browse(cr, uid, ids, context={}): if not item.line_ids: raise osv.except_osv(_('Error!'), _('Please add some lines first')) for line in item.line_ids: if not line.qty_diff: raise osv.except_osv( _('Error!'), _('The Diff must be <> 0 (%s)') % line.prod_lot_id.name) if (not line.qty or not line.new_qty) and \ item.method_id.type != 'account': raise osv.except_osv( _('Error!'), _('Must set method to accounting if actual or ' + 'new quantity = 0')) return True def test_done(self, cr, uid, ids, *args): ids = isinstance(ids, (int, long)) and [ids] or ids for item in self.browse(cr, uid, ids, context={}): lot_ids = [] for line in item.line_ids: lot_ids.append(line.prod_lot_id.id) if line.qty != line.prod_lot_id.stock_available: raise osv.except_osv( _('Error!'), _('Lot stock changed: %s please remove and reload ' + 'data before process') % line.prod_lot_id.name) if lot_ids: #~ Check if lot in sale_order sql = ''' select distinct lot.name from sale_order_line l left join sale_order o on l.order_id = o.id left join stock_production_lot lot on l.prod_lot_id = lot.id where l.prod_lot_id in (%s) and o.state = 'draft' ''' res = self._check_stock_production_lot(cr, lot_ids, sql) if res: raise osv.except_osv( _('Error!'), _('Can\'t adjust a lot while is in a sale order:' + '\n%s') % res) sql = ''' select distinct lot.name from account_invoice_line l left join account_invoice i on l.invoice_id = i.id left join stock_production_lot lot on l.prod_lot_id = lot.id where l.prod_lot_id in (%s) and i.state = 'draft' ''' res = self._check_stock_production_lot(cr, lot_ids, sql) if res: raise osv.except_osv( _('Error!'), _('Can\'t adjust a lot while is in a invoice:' + '\n%s') % res) sql = ''' select distinct lot.name from stock_move m left join stock_production_lot lot on m.prodlot_id = lot.id where m.prodlot_id in (%s) and m.state not in ('done', 'cancel') ''' res = self._check_stock_production_lot(cr, lot_ids, sql) if res: raise osv.except_osv( _('Error!'), _('Can\'t adjust a lot while is in a stock move:' + '\n%s') % res) return True def test_cancel(self, cr, uid, ids, *args): return True
warning = self.make_warning_message() res_text = '\n' for i in sorted(self.imported_records): res_text+=i+': '+str(len(self.imported_records[i]))+'\n' self.imported_records.clear() self.warning_text = [] self.write(cr, uid, self_id, {'log':warning+res_text,'state':'done'}) return _columns = { 'name':fields.char('Name', size=64), 'date': fields.date('Date', required=True), 'import_model_ids':fields.many2many('migration.import_models', 'schedule_models_rel', 'schedule_id', 'import_model_id', 'Import Models'), 'actions_ids': fields.many2many('migration.model_actions', 'schedule_actions_rel', 'schedule_id', 'action_id', 'Actions'), 'state':fields.selection([('ready','Ready'),('running','Running'),('error','Error'),('done','Done'),('stop','Stopped')], 'State'), 'log': fields.text('Log'), 'print_log':fields.boolean('Print Log to Console'), 'cron_id':fields.many2one('ir.cron', 'Scheduler', readonly=True), } _defaults = { 'date': lambda *a: time.strftime('%Y-%m-%d'), 'state': lambda *a: 'ready', } def set_start(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state':'ready'}) cron_id = self.browse(cr, uid, ids[0], {}).cron_id.id nextcall = (now()+DateTime.RelativeDateTime(seconds=30)).strftime('%Y-%m-%d %H:%M:%S') self.pool.get('ir.cron').write(cr, uid, cron_id, {'numbercall':1, 'active':True, 'nextcall':nextcall}) return True
class reminder(osv.osv): _name = "c2c_timesheet_reports.reminder" _description = "Handle the scheduling of messages" _columns = { 'reply_to': fields.char('Reply To', size=100), 'message': fields.text('Message'), 'subject': fields.char('Subject', size=200), } #default cron (the one created if missing) cron = { 'active': False, 'priority': 1, 'interval_number': 1, 'interval_type': 'weeks', 'nextcall': time.strftime("%Y-%m-%d %H:%M:%S", (datetime.today() + timedelta(days=1)).timetuple()), #tomorrow same time 'numbercall': -1, 'doall': True, 'model': 'c2c_timesheet_reports.reminder', 'function': 'run', 'args': '()', } #default message (the one created if missing) message = {'reply_to': '*****@*****.**'} def run(self, cr, uid): """ find the reminder recipients and send them an email """ #get all companies companies = self.pool.get('res.company').browse( cr, uid, self.pool.get('res.company').search(cr, uid, [])) #for each company, get all recipients recipients = [] for c in companies: recipients += self.get_recipients(cr, uid, {}, c) #get the message to send message_id = self.get_message_id(cr, uid, {}) message_data = self.browse(cr, uid, message_id) #send them email if they have an email defined emails = [] for r in recipients: if r.work_email: emails.append(r.work_email) if emails: tools.email_send(message_data.reply_to, [], message_data.subject, message_data.message, email_bcc=emails) def get_recipients(self, cr, uid, context, company): """return the list of users that must recieve the email """ #get the whole list of employees employees = self.compute_employees_list(cr, uid, context, company) #periods periods = self.compute_periods(company, time.gmtime(), 13) #remove the first one because it's the current one del periods[0] recipients = [] # for each employee for e in employees: #if the user still in the company? in_company = True if (e.started != False and e.started > time.strftime('%Y-%m-%d') ) or (e.ended != False and e.ended < time.strftime('%Y-%m-%d')): #do nothing... this user is not concerned anymore by timesheets pass else: #and for each periods for p_index in range(len(periods)): p = periods[p_index] status = self.compute_timesheet_status( cr, uid, context, company, e, p) # if there is a missing sheet or a draft sheet # and the user can receive alerts #then we must alert the user if status in ['Missing', 'Draft' ] and e.receive_timesheet_alerts: recipients.append(e) break # no need to go further for this user, he is now added in the list, go to the next one return recipients def compute_periods(self, company, date, periods_number=5): """ return the timeranges to display. This is the 5 last timesheets (depending on the timerange defined for the company) """ periods = [] (last_start_date, last_end_date) = self.get_last_period_dates(company, date) for cpt in range(periods_number): #find the delta between last_XXX_date to XXX_date if company.timesheet_range == 'month': delta = DateTime.RelativeDateTime(months=-cpt) elif company.timesheet_range == 'week': delta = DateTime.RelativeDateTime(weeks=-cpt) elif company.timesheet_range == 'year': delta = DateTime.RelativeDateTime(years=-cpt) start_date = last_start_date + delta end_date = last_end_date + delta periods.append((start_date, end_date)) return periods def get_last_period_dates(self, company, date): """ return the start date and end date of the last period to display """ # return the first day and last day of the month if company.timesheet_range == 'month': start_date = DateTime.Date(date.tm_year, date.tm_mon, 1) end_date = start_date + DateTime.RelativeDateTime(months=+1) #return the first and last days of the week elif company.timesheet_range == 'week': start_date = DateTime.Date( date.tm_year, date.tm_mon, date.tm_mday) + DateTime.RelativeDateTime( weekday=(DateTime.Monday, 0)) end_date = DateTime.Date(date.tm_year, date.tm_mon, date.tm_mday) + DateTime.RelativeDateTime( weekday=(DateTime.Sunday, 0)) # return the first and last days of the year elif company.timesheet_range == 'year': start_date = DateTime.Date(date.tm_year, 1, 1) end_date = DateTime.Date(date.tm_year, 12, 31) return (start_date, end_date) def compute_employees_list(self, cr, uid, context, company): """ return a dictionnary of lists of employees ids linked to the companies (param company) """ hr_employee_object = self.pool.get('hr.employee') employees = [] #employees associated with a Tinyerp user users_ids = self.pool.get('res.users').search( cr, uid, [('company_id', '=', company.id)], context=context) employees_users_ids = hr_employee_object.search( cr, uid, [('user_id', 'in', users_ids)]) #combine the two employees list, remove duplicates and order by name DESC employees_ids = hr_employee_object.search( cr, uid, [('id', 'in', employees_users_ids)], order="name ASC", context=context) return hr_employee_object.browse(cr, uid, employees_ids, context=context) def compute_timesheet_status(self, cr, uid, context, obj, employee, period): """ return the timesheet status for a user and a period """ status = 'Error' time_from = time.strptime(str(period[0]), "%Y-%m-%d %H:%M:%S.00") time_to = time.strptime(str(period[1]), "%Y-%m-%d %H:%M:%S.00") #if the starting date is defined and is greater than the date_to, it means the employee wasn't one at this period if employee.started != None and ( employee.started != False) and time.strptime( employee.started, "%Y-%m-%d") > time_to: status = 'Not in Company' #if the ending date is defined and is earlier than the date_from, it means the employee wasn't one at this period elif employee.ended != None and ( employee.ended != False) and time.strptime( employee.ended, "%Y-%m-%d") < time_from: status = 'Not in Company' #the employee was in the company at this period else: # does the timesheet exsists in db and what is its status? timeformat = "%Y-%m-%d" date_from = time.strftime(timeformat, time_from) date_to = time.strftime(timeformat, time_to) sheet = [] if employee.user_id.id != False: query = """SELECT state, date_from, date_to FROM hr_timesheet_sheet_sheet WHERE user_id = %s AND date_from >= '%s' AND date_to <= '%s' """ % (employee.user_id.id, date_from, date_to) cr.execute(query) sheets = cr.dictfetchall() #the tiemsheet does not exists in db if len(sheets) == 0: status = 'Missing' if len(sheets) > 0: status = 'Confirmed' for s in sheets: if s['state'] == 'draft': status = 'Draft' return status def get_cron_id(self, cr, uid, context): """return the reminder cron's id. Create one if the cron does not exists """ cron_id = 0 cron_obj = self.pool.get('ir.cron') try: #find the cron that send messages cron_id = cron_obj.search( cr, uid, [('function', 'ilike', self.cron['function']), ('model', 'ilike', self.cron['model'])], context={'active_test': False}) cron_id = int(cron_id[0]) except Exception, e: print e print 'warning cron not found one will be created' pass # ignore if the cron is missing cause we are going to create it in db #the cron does not exists if not cron_id: #translate self.cron['name'] = _('timesheet status reminder') cron_id = cron_obj.create(cr, uid, self.cron, context) return cron_id
finally: ofile.close() return True def _set_image(self, cr, uid, id, name, value, arg, context=None): local_media_repository = self.pool.get('res.company').get_local_media_repository(cr, uid, context=context) if local_media_repository: image = self.browse(cr, uid, id, context=context) return self._save_file(os.path.join(local_media_repository, image.product_id.default_code), '%s%s'%(image.name, image.extention), value) return self.write(cr, uid, id, {'file_db_store' : value}, context=context) _columns = { 'name':fields.char('Image Title', size=100, required=True), 'extention': fields.char('file extention', size=6), 'link':fields.boolean('Link?', help="Images can be linked from files on your file system or remote (Preferred)"), 'file_db_store':fields.binary('Image stored in database'), 'file':fields.function(_get_image, fnct_inv=_set_image, type="binary", method=True, filters='*.png,*.jpg,*.gif'), 'url':fields.char('File Location', size=250), 'comments':fields.text('Comments'), 'product_id':fields.many2one('product.product', 'Product') } _defaults = { 'link': lambda *a: False, } _sql_constraints = [('uniq_name_product_id', 'UNIQUE(product_id, name)', _('A product can have only one image with the same name'))] product_images()
'styles_mode': fields.selection([ ('default','Not used'), ('global', 'Global'), ('specified', 'Specified'), ], string='Stylesheet'), 'stylesheet_id':fields.many2one('report.stylesheets', 'Template Stylesheet'), 'preload_mode':fields.selection([ ('static',_('Static')), ('preload',_('Preload')), ],'Preload Mode'), 'tml_source':fields.selection([ ('database','Database'), ('file','File'), ('parser','Parser'), ],'Template source', select=True), 'parser_def': fields.text('Parser Definition'), 'parser_loc':fields.char('Parser location', size=128, help="Path to the parser location. Beginning of the path must be start with the module name!\nLike this: {module name}/{path to the parser.py file}"), 'parser_state':fields.selection([ ('default',_('Default')), ('def',_('Definition')), ('loc',_('Location')), ],'State of Parser', select=True), 'in_format': fields.selection(_get_in_mimetypes, 'Template Mime-type'), 'out_format':fields.many2one('report.mimetypes', 'Output Mime-type'), 'report_sxw_content': fields.function(_report_content, fnct_inv=_report_content_inv, method=True, type='binary', string='SXW content',), 'active':fields.boolean('Active', help='Disables the report if unchecked.'), 'report_wizard':fields.boolean('Report Wizard'), 'copies': fields.integer('Number of Copies'), 'fallback_false':fields.boolean('Disable Format Fallback'),
result[record["res_id"]] = "%(module)s.%(name)s" % record return result _columns = { "charset": fields.selection(_get_encodings, string="Charset", required=True), "content_fname": fields.char("Override Extension", size=64, help="Here you can override output file extension"), "styles_mode": fields.selection( [("default", "Not used"), ("global", "Global"), ("specified", "Specified")], string="Stylesheet" ), #'report_styles' : fields.binary('Template Styles', help='OpenOffice stylesheet (.odt)'), "stylesheet_id": fields.many2one("report.stylesheets", "Template Stylesheet"), "preload_mode": fields.selection([("static", _("Static")), ("preload", _("Preload"))], "Preload Mode"), "tml_source": fields.selection( [("database", "Database"), ("file", "File"), ("parser", "Parser")], "Template source", select=True ), "parser_def": fields.text("Parser Definition"), "parser_loc": fields.char( "Parser location", size=128, help="Path to the parser location. Beginning of the path must be start with the module name!\nLike this: {module name}/{path to the parser.py file}", ), "parser_state": fields.selection( [("default", _("Default")), ("def", _("Definition")), ("loc", _("Location"))], "State of Parser", select=True, ), "in_format": fields.selection(_get_in_mimetypes, "Template Mime-type"), "out_format": fields.many2one("report.mimetypes", "Output Mime-type"), "report_sxw_content": fields.function( _report_content, fnct_inv=_report_content_inv, method=True, type="binary", string="SXW content" ),
class account_analytic_account(osv.osv): _name = 'account.analytic.account' _description = 'Analytic Account' def _compute_level_tree(self, cr, uid, ids, child_ids, res, field_names, context=None): currency_obj = self.pool.get('res.currency') recres = {} def recursive_computation(account): result2 = res[account.id].copy() for son in account.child_ids: result = recursive_computation(son) for field in field_names: if (account.currency_id.id != son.currency_id.id) and (field!='quantity'): result[field] = currency_obj.compute(cr, uid, son.currency_id.id, account.currency_id.id, result[field], context=context) result2[field] += result[field] return result2 for account in self.browse(cr, uid, ids, context=context): if account.id not in child_ids: continue recres[account.id] = recursive_computation(account) return recres def _debit_credit_bal_qtty(self, cr, uid, ids, fields, arg, context=None): res = {} if context is None: context = {} child_ids = tuple(self.search(cr, uid, [('parent_id', 'child_of', ids)])) for i in child_ids: res[i] = {} for n in fields: res[i][n] = 0.0 if not child_ids: return res where_date = '' where_clause_args = [tuple(child_ids)] if context.get('from_date', False): where_date += " AND l.date >= %s" where_clause_args += [context['from_date']] if context.get('to_date', False): where_date += " AND l.date <= %s" where_clause_args += [context['to_date']] cr.execute(""" SELECT a.id, sum( CASE WHEN l.amount > 0 THEN l.amount ELSE 0.0 END ) as debit, sum( CASE WHEN l.amount < 0 THEN -l.amount ELSE 0.0 END ) as credit, COALESCE(SUM(l.amount),0) AS balance, COALESCE(SUM(l.unit_amount),0) AS quantity FROM account_analytic_account a LEFT JOIN account_analytic_line l ON (a.id = l.account_id) WHERE a.id IN %s """ + where_date + """ GROUP BY a.id""", where_clause_args) for row in cr.dictfetchall(): res[row['id']] = {} for field in fields: res[row['id']][field] = row[field] return self._compute_level_tree(cr, uid, ids, child_ids, res, fields, context) def name_get(self, cr, uid, ids, context=None): if not ids: return [] res = [] for account in self.browse(cr, uid, ids, context=context): data = [] acc = account while acc: data.insert(0, acc.name) acc = acc.parent_id data = ' / '.join(data) res.append((account.id, data)) return res def _complete_name_calc(self, cr, uid, ids, prop, unknow_none, unknow_dict): res = self.name_get(cr, uid, ids) return dict(res) def _child_compute(self, cr, uid, ids, name, arg, context=None): result = {} if context is None: context = {} for account in self.browse(cr, uid, ids, context=context): result[account.id] = map(lambda x: x.id, [child for child in account.child_ids if child.state != 'template']) return result def _get_analytic_account(self, cr, uid, ids, context=None): company_obj = self.pool.get('res.company') analytic_obj = self.pool.get('account.analytic.account') accounts = [] for company in company_obj.browse(cr, uid, ids, context=context): accounts += analytic_obj.search(cr, uid, [('company_id', '=', company.id)]) return accounts def _set_company_currency(self, cr, uid, ids, name, value, arg, context=None): if type(ids) != type([]): ids=[ids] for account in self.browse(cr, uid, ids, context=context): if account.company_id: if account.company_id.currency_id.id != value: raise osv.except_osv(_('Error !'), _("If you set a company, the currency selected has to be the same as it's currency. \nYou can remove the company belonging, and thus change the currency, only on analytic account of type 'view'. This can be really usefull for consolidation purposes of several companies charts with different currencies, for example.")) return cr.execute("""update account_analytic_account set currency_id=%s where id=%s""", (value, account.id, )) def _currency(self, cr, uid, ids, field_name, arg, context=None): result = {} for rec in self.browse(cr, uid, ids, context=context): if rec.company_id: result[rec.id] = rec.company_id.currency_id.id else: result[rec.id] = rec.currency_id.id return result _columns = { 'name': fields.char('Account Name', size=128, required=True), 'complete_name': fields.function(_complete_name_calc, type='char', string='Full Account Name'), 'code': fields.char('Code/Reference', size=24, select=True), 'type': fields.selection([('view','View'), ('normal','Normal')], 'Account Type', help='If you select the View Type, it means you won\'t allow to create journal entries using that account.'), 'description': fields.text('Description'), 'parent_id': fields.many2one('account.analytic.account', 'Parent Analytic Account', select=2), 'child_ids': fields.one2many('account.analytic.account', 'parent_id', 'Child Accounts'), 'child_complete_ids': fields.function(_child_compute, relation='account.analytic.account', string="Account Hierarchy", type='many2many'), 'line_ids': fields.one2many('account.analytic.line', 'account_id', 'Analytic Entries'), 'balance': fields.function(_debit_credit_bal_qtty, type='float', string='Balance', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'debit': fields.function(_debit_credit_bal_qtty, type='float', string='Debit', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'credit': fields.function(_debit_credit_bal_qtty, type='float', string='Credit', multi='debit_credit_bal_qtty', digits_compute=dp.get_precision('Account')), 'quantity': fields.function(_debit_credit_bal_qtty, type='float', string='Quantity', multi='debit_credit_bal_qtty'), 'quantity_max': fields.float('Maximum Time', help='Sets the higher limit of time to work on the contract.'), 'partner_id': fields.many2one('res.partner', 'Partner'), 'contact_id': fields.many2one('res.partner.address', 'Contact'), 'user_id': fields.many2one('res.users', 'Account Manager'), 'date_start': fields.date('Date Start'), 'date': fields.date('Date End', select=True), 'company_id': fields.many2one('res.company', 'Company', required=False), #not required because we want to allow different companies to use the same chart of account, except for leaf accounts. 'state': fields.selection([('template', 'Template'),('draft','New'),('open','Open'), ('pending','Pending'),('cancelled', 'Cancelled'),('close','Closed')], 'State', required=True, help='* When an account is created its in \'Draft\' state.\ \n* If any associated partner is there, it can be in \'Open\' state.\ \n* If any pending balance is there it can be in \'Pending\'. \ \n* And finally when all the transactions are over, it can be in \'Close\' state. \ \n* The project can be in either if the states \'Template\' and \'Running\'.\n If it is template then we can make projects based on the template projects. If its in \'Running\' state it is a normal project.\ \n If it is to be reviewed then the state is \'Pending\'.\n When the project is completed the state is set to \'Done\'.'), 'currency_id': fields.function(_currency, fnct_inv=_set_company_currency, store = { 'res.company': (_get_analytic_account, ['currency_id'], 10), }, string='Currency', type='many2one', relation='res.currency'), } def _default_company(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) if user.company_id: return user.company_id.id return self.pool.get('res.company').search(cr, uid, [('parent_id', '=', False)])[0] def _get_default_currency(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return user.company_id.currency_id.id _defaults = { 'type': 'normal', 'company_id': _default_company, 'state': 'open', 'user_id': lambda self, cr, uid, ctx: uid, 'partner_id': lambda self, cr, uid, ctx: ctx.get('partner_id', False), 'contact_id': lambda self, cr, uid, ctx: ctx.get('contact_id', False), 'date_start': lambda *a: time.strftime('%Y-%m-%d'), 'currency_id': _get_default_currency, } def check_recursion(self, cr, uid, ids, context=None, parent=None): return super(account_analytic_account, self)._check_recursion(cr, uid, ids, context=context, parent=parent) _order = 'name asc' _constraints = [ (check_recursion, 'Error! You can not create recursive analytic accounts.', ['parent_id']), ] def copy(self, cr, uid, id, default=None, context=None): if not default: default = {} default['code'] = False default['line_ids'] = [] return super(account_analytic_account, self).copy(cr, uid, id, default, context=context) def on_change_partner_id(self, cr, uid, id, partner_id, context={}): if not partner_id: return {'value': {'contact_id': False}} addr = self.pool.get('res.partner').address_get(cr, uid, [partner_id], ['invoice']) return {'value': {'contact_id': addr.get('invoice', False)}} def on_change_company(self, cr, uid, id, company_id): if not company_id: return {} currency = self.pool.get('res.company').read(cr, uid, [company_id], ['currency_id'])[0]['currency_id'] return {'value': {'currency_id': currency}} def on_change_parent(self, cr, uid, id, parent_id): if not parent_id: return {} parent = self.read(cr, uid, [parent_id], ['partner_id','code'])[0] if parent['partner_id']: partner = parent['partner_id'][0] else: partner = False res = {'value': {}} if partner: res['value']['partner_id'] = partner return res def onchange_partner_id(self, cr, uid, ids, partner, context=None): partner_obj = self.pool.get('res.partner') if not partner: return {'value':{'contact_id': False}} address = partner_obj.address_get(cr, uid, [partner], ['contact']) return {'value':{'contact_id': address['contact']}} def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): if not args: args=[] if context is None: context={} if context.get('current_model') == 'project.project': cr.execute("select analytic_account_id from project_project") project_ids = [x[0] for x in cr.fetchall()] return self.name_get(cr, uid, project_ids, context=context) if name: account = self.search(cr, uid, [('code', '=', name)] + args, limit=limit, context=context) if not account: names=map(lambda i : i.strip(),name.split('/')) for i in range(len(names)): dom=[('name', operator, names[i])] if i>0: dom+=[('id','child_of',account)] account = self.search(cr, uid, dom, limit=limit, context=context) newacc = account while newacc: newacc = self.search(cr, uid, [('parent_id', 'in', newacc)], limit=limit, context=context) account += newacc if args: account = self.search(cr, uid, [('id', 'in', account)] + args, limit=limit, context=context) else: account = self.search(cr, uid, args, limit=limit, context=context) return self.name_get(cr, uid, account, context=context)
except AttributeError, e: _logger.warning( 'Data not found for items of %s %s', module_rec.name, str(e)) except Exception, e: _logger.warning('Unknown error while fetching data of %s', module_rec.name, exc_info=True) for key, value in res.iteritems(): for k, v in res[key].iteritems(): res[key][k] = "\n".join(sorted(v)) return res _columns = { 'name': fields.char("Name", size=128, readonly=True, required=True, select=True), 'category_id': fields.many2one('ir.module.category', 'Category', readonly=True, select=True), 'shortdesc': fields.char('Short Description', size=256, readonly=True, translate=True), 'description': fields.text("Description", readonly=True, translate=True), 'author': fields.char("Author", size=128, readonly=True), 'maintainer': fields.char('Maintainer', size=128, readonly=True), 'contributors': fields.text('Contributors', readonly=True), 'website': fields.char("Website", size=256, readonly=True), # attention: Incorrect field names !! # installed_version refer the latest version (the one on disk) # latest_version refer the installed version (the one in database) # published_version refer the version available on the repository 'installed_version': fields.function(_get_latest_version, string='Latest version', type='char'), 'latest_version': fields.char('Installed version', size=64, readonly=True), 'published_version': fields.char('Published Version', size=64, readonly=True), 'url': fields.char('URL', size=128, readonly=True),
class perintah_kerja(osv.osv): _name = "perintah.kerja" _columns = { 'name': fields.char('Work Order', required=True, size=64, readonly=True, states={'draft': [('readonly', False)]}), 'date' : fields.date('Order Date', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'type': fields.selection([('other', 'Others'), ('pabrikasi', 'Pabrikasi'), ('man', 'Man Power'), ('service', 'Service')], 'Type', readonly=True, states={'draft': [('readonly', False)]}), 'sale_id': fields.many2one('sale.order', 'Sale Order', required=False, readonly=True, domain=[('state','in', ('progress','manual'))], states={'draft': [('readonly', False)]}), 'partner_id': fields.many2one('res.partner', 'Customer', domain=[('customer','=', True)], readonly=True, states={'draft': [('readonly', False)]}), 'kontrak': fields.char('Contract No', size=64, readonly=True, states={'draft': [('readonly', False)]}), 'kontrakdate' : fields.date('Contract Date', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'workshop': fields.char('Working Place', size=64, readonly=True, states={'draft': [('readonly', False)]}), 'state': fields.selection([('draft', 'Draft'), ('approve', 'Approved'), ('done', 'Done'), ('cancel', 'Cancel')], 'State', readonly=True), 'perintah_lines': fields.one2many('perintah.kerja.line', 'perintah_id', 'Work Lines', readonly=True, states={'draft': [('readonly', False)]}), 'material_lines': fields.one2many('raw.material.line', 'perintah_id', 'Material Consumption', readonly=True, states={'draft': [('readonly', False)]}), 'delivery_date' : fields.date('Delivery Date', required=False, readonly=True, states={'draft': [('readonly', False)]}), 'write_date': fields.datetime('Date Modified', readonly=True), 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'creator' : fields.many2one('res.users', 'Created by'), 'checker' : fields.many2one('res.users', 'Checked by'), 'approver' : fields.many2one('res.users', 'Approved by'), 'note': fields.text('Notes'), 'terms':fields.text('Terms & Condition'), 'location_src_id': fields.many2one('stock.location', 'Raw Materials Location', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'location_dest_id': fields.many2one('stock.location', 'Finished Products Location', required=True, readonly=True, states={'draft':[('readonly',False)]}), } _defaults = { 'name': '/', 'note': '-', 'type': 'pabrikasi', 'state': 'draft', 'location_src_id': 12, 'location_dest_id': 12, 'date': time.strftime('%Y-%m-%d'), 'kontrakdate': time.strftime('%Y-%m-%d'), } _order = "name desc" def create(self, cr, uid, vals, context=None): # print vals if vals['special']==True: person = self.pool.get('res.users').browse(cr, uid, uid) rom = [0, 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX', 'X', 'XI', 'XII'] # usa = 'SPC' usa = str(self.pool.get('pr').browse(cr, uid, vals['pr_id']).salesman_id.initial) val = self.pool.get('ir.sequence').get(cr, uid, 'perintah.kerja').split('/') use = str(person.initial) vals['creator'] = person.id vals['name'] = val[-1]+'A/SBM-ADM/'+usa+'-'+use+'/'+rom[int(val[2])]+'/'+val[1] return super(perintah_kerja, self).create(cr, uid, vals, context=context) else: person = self.pool.get('res.users').browse(cr, uid, uid) rom = [0, 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX', 'X', 'XI', 'XII'] usa = str(self.pool.get('sale.order').browse(cr, uid, vals['sale_id']).user_id.initial) val = self.pool.get('ir.sequence').get(cr, uid, 'perintah.kerja').split('/') use = str(person.initial) vals['creator'] = person.id vals['name'] = val[-1]+'A/SBM-ADM/'+usa+'-'+use+'/'+rom[int(val[2])]+'/'+val[1] return super(perintah_kerja, self).create(cr, uid, vals, context=context) # oldd # person = self.pool.get('res.users').browse(cr, uid, uid) # rom = [0, 'I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX', 'X', 'XI', 'XII'] # usa = str(self.pool.get('sale.order').browse(cr, uid, vals['sale_id']).user_id.initial) # val = self.pool.get('ir.sequence').get(cr, uid, 'perintah.kerja').split('/') # use = str(person.initial) # vals['creator'] = person.id # vals['name'] = val[-1]+'A/SBM-ADM/'+usa+'-'+use+'/'+rom[int(val[2])]+'/'+val[1] # return super(perintah_kerja, self).create(cr, uid, vals, context=context) # return False def sale_change(self, cr, uid, ids, sale): if sale: res = {}; line = [] obj_sale = self.pool.get('sale.order').browse(cr, uid, sale) for x in obj_sale.order_line: line.append({ 'product_id' : x.product_id.id, 'product_qty': x.product_uom_qty, 'product_uom': x.product_uom.id, 'name': x.name # 'name': '['+str(x.product_id.code)+']' + ' ' + x.product_id.name }) res['perintah_lines'] = line res['kontrak'] = obj_sale.client_order_ref res['partner_id'] = obj_sale.partner_id.id res['kontrakdate'] = obj_sale.date_order res['delivery_date'] = obj_sale.delivery_date return {'value': res} return True def work_cancel(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'draft'}) return True def btn_cancel(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'cancel'}) return True def work_confirm(self, cr, uid, ids, context=None): val = self.browse(cr, uid, ids)[0] if not val.perintah_lines: raise osv.except_osv(('Perhatian !'), ('Tabel work line harus diisi !')) self.write(cr, uid, ids, {'state': 'approve', 'checker': self.pool.get('res.users').browse(cr, uid, uid).id}) return True def work_validate(self, cr, uid, ids, context=None): val = self.browse(cr, uid, ids, context={})[0] if val.type == 'pabrikasi' : seq_out_mnfct = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking.out.manufacture') seq_from_mnfct = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking.from.manufacture') if not seq_out_mnfct: raise osv.except_osv(_('Error'), _('stock.picking.out.manufacture Sequence not exist.\nPlease contact system administrator')) if not seq_from_mnfct: raise osv.except_osv(_('Error'), _('stock.picking.from.manufacture Sequence not exist.\nPlease contact system administrator.')) material_id = self.pool.get('stock.picking').create(cr,uid, { 'name': seq_out_mnfct, 'origin': val.name, 'type': 'internal', 'move_type': 'one', 'state': 'draft', 'date': val.date, 'auto_picking': True, 'company_id': 1, }) goods_id = self.pool.get('stock.picking').create(cr,uid, { 'name': seq_from_mnfct, 'origin': val.name, 'type': 'internal', 'move_type': 'one', 'state': 'draft', 'date': val.date, 'auto_picking': True, 'company_id': 1, }) for x in val.material_lines: self.pool.get('stock.move').create(cr,uid, { 'name': x.product_id.default_code + x.product_id.name_template, 'picking_id': material_id, 'product_id': x.product_id.id, 'product_qty': x.product_qty, 'product_uom': x.product_uom.id, 'date': val.date, 'location_id': val.location_src_id.id, 'location_dest_id': 7, 'state': 'waiting', 'company_id': 1}) prodlot = self.pool.get('stock.production.lot') for x in val.perintah_lines: prodlot_obj_id = False if x.product_id.track_production: # check if manufacture lot exists lot_name_ws = x.product_id.default_code+'-WS' get_lot = prodlot.search(cr, uid, [('product_id','=',x.product_id.id), ('name','=',lot_name_ws)]) if not get_lot: # set new serial prodlot_obj_id = prodlot.create( cr, uid, { 'name': lot_name_ws, 'product_id': x.product_id.id, 'desc': 'Manufacture Lot', }, context=context ) else: prodlot_obj_id = get_lot[0] # set serial number for manufacture lot print prodlot_obj_id,">>>>>>>>>>>>>>" self.pool.get('stock.move').create(cr,uid, { 'name': x.product_id.default_code + x.product_id.name_template, 'picking_id': goods_id, 'product_id': x.product_id.id, 'product_qty': x.product_qty, 'product_uom': x.product_uom.id, 'date': val.date, 'location_id': 7, 'location_dest_id': val.location_dest_id.id, 'state': 'waiting', 'company_id': 1 ,'prodlot_id':prodlot_obj_id or False}) wf_service = netsvc.LocalService("workflow") wf_service.trg_validate(uid, 'stock.picking', goods_id, 'button_confirm', cr) wf_service.trg_validate(uid, 'stock.picking', material_id, 'button_confirm', cr) self.pool.get('stock.picking').force_assign(cr, uid, [goods_id, material_id], context) self.write(cr, uid, ids, {'state': 'done', 'approver': self.pool.get('res.users').browse(cr, uid, uid).id}) return True def unlink(self, cr, uid, ids, context=None): val = self.browse(cr, uid, ids, context={})[0] if val.state != 'draft': raise osv.except_osv(('Invalid action !'), ('Cannot delete a work order which is in state \'%s\'!') % (val.state,)) return super(perintah_kerja, self).unlink(cr, uid, ids, context=context) def print_perintah(self, cr, uid, ids, context=None): data = {} val = self.browse(cr, uid, ids)[0] data['form'] = {} data['ids'] = context.get('active_ids',[]) data['form']['data'] = self.read(cr, uid, ids)[0] qty = '' product = '' for x in val.perintah_lines: qty = qty + str(x.product_qty) + ' ' + x.product_uom.name + '\n\n' product = product + x.name + '\n\n' product = product + '\n\n' + val.note data['form']['data']['qty'] = qty data['form']['data']['product'] = product data['form']['data']['creator'] = val.creator.name data['form']['data']['checker'] = val.checker.name data['form']['data']['approver'] = val.approver.name return { 'type': 'ir.actions.report.xml', 'report_name': 'perintah.A4', 'datas': data, 'nodestroy':True }
class account_balance_reporting_line(osv.osv): """ Account balance report line / Accounting concept One line of detail of the balance report representing an accounting concept with its values. The accounting concepts follow a parent-children hierarchy. Its values (current and previous) are calculated based on the 'value' formula of the linked template line. """ _name = "account.balance.reporting.line" _columns = { # Parent report of this line 'report_id': fields.many2one('account.balance.reporting', 'Report', ondelete='cascade'), # Concept official code (as specified by normalized models, will be used when printing) 'code': fields.char('Code', size=64, required=True, select=True), # Concept official name (will be used when printing) 'name': fields.char('Name', size=256, required=True, select=True), # Notes value (references to the notes) 'notes': fields.text('Notes'), # Concept value in this fiscal year 'current_value': fields.float('Fiscal year 1', digits=(16, 2)), # Concept value on the previous fiscal year 'previous_value': fields.float('Fiscal year 2', digits=(16, 2)), # Date of the last calculation 'calc_date': fields.datetime("Calculation date"), # Order sequence, it's also used for grouping into sections, that's why it is a char 'sequence': fields.char('Sequence', size=32, required=False), # CSS class, used when printing to set the style of the line 'css_class': fields.selection(CSS_CLASSES, 'CSS Class', required=False), # Linked template line used to calculate this line values 'template_line_id': fields.many2one('account.balance.reporting.template.line', 'Line template', ondelete='set null'), # Parent accounting concept 'parent_id': fields.many2one('account.balance.reporting.line', 'Parent', ondelete='cascade'), # Children accounting concepts 'child_ids': fields.one2many('account.balance.reporting.line', 'parent_id', 'Children'), 'current_line_account_ids': fields.one2many('account.balance.reporting.line.account', 'report_line_id', 'Line Accounts', domain=[('fiscal_year', '=', 'current')]), 'previous_line_account_ids': fields.one2many('account.balance.reporting.line.account', 'report_line_id', 'Line Accounts', domain=[('fiscal_year', '=', 'previous')]), } _defaults = { # Use context report_id as the the parent report 'report_id': lambda self, cr, uid, context: context.get('report_id', None), # Default css class (so we always have a class) 'css_class': lambda *a: 'default', } # Lines are sorted by its sequence and code _order = "sequence, code" # Don't let the user repeat codes in the report (the codes will be used to look up accounting concepts) _sql_constraints = [('report_code_uniq', 'unique (report_id,code)', _("The code must be unique for this report!"))] def name_get(self, cr, uid, ids, context=None): """ Redefine the name_get method to show the code in the name ("[code] name"). """ if not len(ids): return [] res = [] for item in self.browse(cr, uid, ids): res.append((item.id, "[%s] %s" % (item.code, item.name))) return res def name_search(self, cr, uid, name, args=[], operator='ilike', context={}, limit=80): """ Redefine the name_search method to allow searching by code. """ ids = [] if name: ids = self.search(cr, uid, [('code', 'ilike', name)] + args, limit=limit) if not ids: ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit) return self.name_get(cr, uid, ids, context=context) def refresh_values(self, cr, uid, ids, context=None): """ Recalculates the values of this report line using the linked line template values formulas: Depending on this formula the final value is calculated as follows: - Empy template value: sum of (this concept) children values. - Number with decimal point ("10.2"): that value (constant). - Account numbers separated by commas ("430,431,(437)"): Sum of the account balances. (The sign of the balance depends on the balance mode) - Concept codes separated by "+" ("11000+12000"): Sum of those concepts values. """ for line in self.browse(cr, uid, ids): current_value = 0.0 previous_value = 0.0 # # We use the same code to calculate both fiscal year values, # just iterating over them. # for fyear in ('current', 'previous'): value = 0 if fyear == 'current': template_value = line.template_line_id.current_value elif fyear == 'previous': template_value = line.template_line_id.previous_value # Remove characters after a ";" (we use ; for comments) if template_value and len(template_value): template_value = template_value.split(';')[0] if (fyear == 'current' and not line.report_id.current_fiscalyear_id) \ or (fyear == 'previous' and not line.report_id.previous_fiscalyear_id): value = 0 else: # # Calculate the value # if not template_value or not len(template_value): # # Empy template value => sum of the children, of this concept, values. # for child in line.child_ids: if child.calc_date != child.report_id.calc_date: # Tell the child to refresh its values child.refresh_values() # Reload the child data child = self.browse(cr, uid, [child.id])[0] if fyear == 'current': value += float(child.current_value) elif fyear == 'previous': value += float(child.previous_value) elif re.match(r'^\-?[0-9]*\.[0-9]*$', template_value): # # Number with decimal points => that number value (constant). # value = float(template_value) elif re.match(r'^[0-9a-zA-Z,\(\)\*_]*$', template_value): # # Account numbers separated by commas => sum of the account balances. # # We will use the context to filter the accounts by fiscalyear # and periods. # if fyear == 'current': ctx = { 'fiscalyear': line.report_id.current_fiscalyear_id.id, 'periods': [ p.id for p in line.report_id.current_period_ids ], } elif fyear == 'previous': ctx = { 'fiscalyear': line.report_id.previous_fiscalyear_id.id, 'periods': [ p.id for p in line.report_id.previous_period_ids ], } # Get the mode of balance calculation from the template balance_mode = line.template_line_id.report_id.balance_mode # Get the balance ctx['period'] = fyear value = line._get_account_balance( template_value, balance_mode, ctx) elif re.match(r'^[\+\-0-9a-zA-Z_\*]*$', template_value): # # Account concept codes separated by "+" => sum of the concept (report lines) values. # for line_code in re.findall(r'(-?\(?[0-9a-zA-Z_]*\)?)', template_value): # Check the sign of the code (substraction) if line_code.startswith( '-') or line_code.startswith('('): sign = -1.0 else: sign = 1.0 line_code = line_code.strip('-()*') # Check if the code is valid (findall might return empty strings) if len(line_code) > 0: # Search for the line (perfect match) line_ids = self.search(cr, uid, [ ('report_id', '=', line.report_id.id), ('code', '=', line_code), ]) for child in self.browse(cr, uid, line_ids): if child.calc_date != child.report_id.calc_date: # Tell the child to refresh its values child.refresh_values() # Reload the child data child = self.browse( cr, uid, [child.id])[0] if fyear == 'current': value += float( child.current_value) * sign elif fyear == 'previous': value += float( child.previous_value) * sign # # Negate the value if needed # if line.template_line_id.negate: value = -value if fyear == 'current': current_value = value elif fyear == 'previous': previous_value = value # Write the values self.write( cr, uid, [line.id], { 'current_value': current_value, 'previous_value': previous_value, 'calc_date': line.report_id.calc_date }) return True def _get_account_balance(self, cr, uid, ids, code, balance_mode=0, context=None): """ It returns the (debit, credit, balance*) tuple for a account with the given code, or the sum of those values for a set of accounts when the code is in the form "400,300,(323)" Depending on the balance_mode, the balance is calculated as follows: Mode 0: debit-credit for all accounts (default); Mode 1: debit-credit, credit-debit for accounts in brackets; Mode 2: credit-debit for all accounts; Mode 3: credit-debit, debit-credit for accounts in brackets. Also the user may specify to use only the debit or credit of the account instead of the balance writing "debit(551)" or "credit(551)". """ acc_facade = self.pool.get('account.account') report_line_account_obj = self.pool.get( 'account.balance.reporting.line.account') res = 0.0 line = self.browse(cr, uid, ids)[0] assert balance_mode in ('0', '1', '2', '3'), "balance_mode should be in [0..3]" # We iterate over the accounts listed in "code", so code can be # a string like "430+431+432-438"; accounts split by "+" will be added, # accounts split by "-" will be substracted. # # We also take in consideration the balance_mode: # Mode 0: credit-debit for all accounts # Mode 1: debit-credit, credit-debit for accounts in brackets # Mode 2: credit-debit, debit-credit for accounts in brackets # Mode 3: credit-debit, debit-credit for accounts in brackets. # # And let the user get just the credit or debit if he specifies so. # for account_code in re.findall('(-?\w*\(?[0-9a-zA-Z_]*\)?)', code): # Check if the code is valid (findall might return empty strings) if len(account_code) > 0: # # Check the sign of the code (substraction) # if account_code.startswith('-'): sign = -1.0 account_code = account_code[1:] # Strip the sign else: sign = 1.0 if re.match(r'^debit\(.*\)$', account_code): # Use debit instead of balance mode = 'debit' account_code = account_code[6:-1] # Strip debit() if balance_mode == '2': # We use credit-debit in the balance sign = -1.0 * sign elif re.match(r'^credit\(.*\)$', account_code): # Use credit instead of balance mode = 'credit' account_code = account_code[7:-1] # Strip credit() if balance_mode == '2': # We use credit-debit in the balance sign = -1.0 * sign else: mode = 'balance' # # Calculate the balance, as given by the balance mode # if balance_mode == '1': # We use debit-credit as default balance, # but for accounts in brackets we use credit-debit if account_code.startswith( '(') and account_code.endswith(')'): sign = -1.0 * sign elif balance_mode == '2': # We use credit-debit as the balance, sign = -1.0 * sign elif balance_mode == '3': # We use credit-debit as default balance, # but for accounts in brackets we use debit-credit if not account_code.startswith( '(') and account_code.endswith(')'): sign = -1.0 * sign # Strip the brackets (if there are brackets) if account_code.startswith('(') and account_code.endswith( ')'): account_code = account_code[1:-1] # Search for the account (perfect match) account_ids = acc_facade.search( cr, uid, [('code', '=', account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if not account_ids: # We didn't find the account, search for a subaccount ending with '0' account_ids = acc_facade.search( cr, uid, [('code', '=like', '%s%%0' % account_code), ('company_id', '=', line.report_id.company_id.id)], context=context) if account_ids: account_ids = acc_facade.search( cr, uid, [ ('parent_id', 'child_of', account_ids), ('type', '!=', 'view'), ], context=context) for account in acc_facade.browse(cr, uid, account_ids, context): balance = account.balance if ((mode == 'debit' and balance > 0.0) or (mode == 'credit' and balance < 0.0) or (mode == 'balance')): res += balance * sign if (not report_line_account_obj.search( cr, uid, [ ('account_id', '=', account.id), ('fiscal_year', '=', context['period']), ('report_line_id', '=', line.id), ], context=context) and (account.debit or account.credit)): vals = { 'account_id': account.id, 'debit': account.debit, 'credit': account.credit, 'report_line_id': line.id, 'fiscal_year': context['period'], } report_line_account_obj.create( cr, uid, vals, context) else: netsvc.Logger().notifyChannel( 'account_balance_reporting', netsvc.LOG_WARNING, "Account with code '%s' not found!" % account_code) return res
class ir_values(osv.osv): _name = 'ir.values' def _value_unpickle(self, cursor, user, ids, name, arg, context=None): res = {} for report in self.browse(cursor, user, ids, context=context): value = report[name[:-9]] if not report.object and value: try: value = str(pickle.loads(value)) except: pass res[report.id] = value return res def _value_pickle(self, cursor, user, id, name, value, arg, context=None): if context is None: context = {} ctx = context.copy() if self.CONCURRENCY_CHECK_FIELD in ctx: del ctx[self.CONCURRENCY_CHECK_FIELD] if not self.browse(cursor, user, id, context=context).object: value = pickle.dumps(value) self.write(cursor, user, id, {name[:-9]: value}, context=ctx) def onchange_object_id(self, cr, uid, ids, object_id, context={}): if not object_id: return {} act = self.pool.get('ir.model').browse(cr, uid, object_id, context=context) return {'value': {'model': act.model}} def onchange_action_id(self, cr, uid, ids, action_id, context={}): if not action_id: return {} act = self.pool.get('ir.actions.actions').browse(cr, uid, action_id, context=context) return {'value': {'value_unpickle': act.type + ',' + str(act.id)}} _columns = { 'name': fields.char('Name', size=128), 'model_id': fields.many2one( 'ir.model', 'Object', size=128, help= "This field is not used, it only helps you to select a good model." ), 'model': fields.char('Object Name', size=128, select=True), 'action_id': fields.many2one( 'ir.actions.actions', 'Action', help= "This field is not used, it only helps you to select the right action." ), 'value': fields.text('Value'), 'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle, method=True, type='text', string='Value'), 'object': fields.boolean('Is Object'), 'key': fields.selection([('action', 'Action'), ('default', 'Default')], 'Type', size=128, select=True), 'key2': fields.char( 'Event Type', help= "The kind of action or button in the client side that will trigger the action.", size=128, select=True), 'meta': fields.text('Meta Datas'), 'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle, method=True, type='text', string='Metadata'), 'res_id': fields.integer( 'Object ID', help="Keep 0 if the action must appear on all resources.", select=True), 'user_id': fields.many2one('res.users', 'User', ondelete='cascade', select=True), 'company_id': fields.many2one('res.company', 'Company', select=True) } _defaults = { 'key': lambda *a: 'action', 'key2': lambda *a: 'tree_but_open', 'company_id': lambda *a: False } def _auto_init(self, cr, context=None): super(ir_values, self)._auto_init(cr, context) cr.execute( 'SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_res_id_user_id_idx\'' ) if not cr.fetchone(): cr.execute( 'CREATE INDEX ir_values_key_model_key2_res_id_user_id_idx ON ir_values (key, model, key2, res_id, user_id)' ) def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False): if isinstance(value, unicode): value = value.encode('utf8') if not isobject: value = pickle.dumps(value) if meta: meta = pickle.dumps(meta) ids_res = [] for model in models: if isinstance(model, (list, tuple)): model, res_id = model else: res_id = False if replace: search_criteria = [('key', '=', key), ('key2', '=', key2), ('model', '=', model), ('res_id', '=', res_id), ('user_id', '=', preserve_user and uid)] if key in ('meta', 'default'): search_criteria.append(('name', '=', name)) else: search_criteria.append(('value', '=', value)) self.unlink(cr, uid, self.search(cr, uid, search_criteria)) vals = { 'name': name, 'value': value, 'model': model, 'object': isobject, 'key': key, 'key2': key2 and key2[:200], 'meta': meta, 'user_id': preserve_user and uid, } if company: cid = self.pool.get('res.users').browse( cr, uid, uid, context={}).company_id.id vals['company_id'] = cid if res_id: vals['res_id'] = res_id ids_res.append(self.create(cr, uid, vals)) return ids_res def get(self, cr, uid, key, key2, models, meta=False, context={}, res_id_req=False, without_user=True, key2_req=True): result = [] for m in models: if isinstance(m, (list, tuple)): m, res_id = m else: res_id = False where = ['key=%s', 'model=%s'] params = [key, str(m)] if key2: where.append('key2=%s') params.append(key2[:200]) elif key2_req and not meta: where.append('key2 is null') if res_id_req and (models[-1][0] == m): if res_id: where.append('res_id=%s') params.append(res_id) else: where.append('(res_id is NULL)') elif res_id: if (models[-1][0] == m): where.append('(res_id=%s or (res_id is null))') params.append(res_id) else: where.append('res_id=%s') params.append(res_id) where.append('(user_id=%s or (user_id IS NULL)) order by id') params.append(uid) clause = ' and '.join(where) cr.execute( 'select id,name,value,object,meta, key from ir_values where ' + clause, params) result = cr.fetchall() if result: break if not result: return [] def _result_get(x, keys): if x[1] in keys: return False keys.append(x[1]) if x[3]: model, id = x[2].split(',') # FIXME: It might be a good idea to opt-in that kind of stuff # FIXME: instead of arbitrarily removing random fields fields = [ field for field in self.pool.get(model).fields_get_keys(cr, uid) if field not in EXCLUDED_FIELDS ] try: datas = self.pool.get(model).read(cr, uid, [int(id)], fields, context) except except_orm, e: return False datas = datas and datas[0] if not datas: return False else:
'view_mode': 'form', 'res_model': 'calendar.event.subscribe', 'views': [(id2, 'form'), (False, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'target': 'new' } return value _name = "calendar.event.subscribe" _description = "Event subscribe" _columns = { 'url_path': fields.char('Provide path for remote calendar', size=124, required=True), 'msg': fields.text('', readonly=True), } _defaults = {'msg': lambda *a: 'Import Sucessful.'} calendar_event_subscribe() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
class hr_applicant(osv.osv): ''' override the message_new for adding job code,state, stage rename the file name ''' _inherit = 'hr.applicant' _columns = { 'imp_pending': fields.boolean('IMP Empoyee'), 'not_visiblel': fields.char('test', size=56), 'resume_note': fields.text('Resume'), } def message_new(self, cr, uid, msg, custom_values=None, context=None): """Automatically called when new email message arrives""" subject = msg.get('subject') or _("No Subject") body = msg.get('body_text') # body=smart_str(body) unicodedata.normalize('NFKD', body).encode('ascii', 'ignore') if body.find("
") > -1: body = body.replace(" ", "") body = body.replace("\n", "#") body2 = body.split('####') list1 = [] if subject.find("Naukri.com") == -1: body = body body = body.replace("#", "\n") else: for i in body2: dict = {} if i.find("Resume Headline") > -1: dict['Resume Headline'] = i.replace( "#Resume Headline #: #", "") list1.append(dict) if i.find("Key Skills ") > -1: dict['Key Skills'] = i.replace("Key Skills #: ##", "") list1.append(dict) if i.find("Name") > -1: dict['Name'] = i.replace("Name #: #", "") list1.append(dict) if i.find("Total Experience ") > -1: dict['Total Experience'] = i.replace( "Total Experience #: #", "") list1.append(dict) if i.find("CTC ") > -1: dict['CTC'] = i.replace("CTC #: #", "") list1.append(dict) if i.find("Current Employer ") > -1: dict['Current Employer'] = i.replace( "Current Employer #: #", "") list1.append(dict) if i.find("Current Designation ") > -1: dict['Current Designation'] = i.replace( "Current Designation #: #", "") list1.append(dict) if i.find("Last Employer ") > -1: dict['Last Employer'] = i.replace("Last Employer ##: #", "") list1.append(dict) if i.find("Preferred Location ") > -1: dict['Preferred Location'] = i.replace( "Preferred Location #: #", "") list1.append(dict) if i.find("Current Location ") > -1: dict['Current Location'] = i.replace( "Current Location #: #", "") list1.append(dict) if i.find("Education ") > -1: dict['Education'] = i.replace("Education #: #", "") list1.append(dict) if i.find("Mobile ") > -1: dict['Mobile'] = i.replace("Mobile #: #", "") list1.append(dict) if i.find("Landline ") > -1: dict['Landline'] = i.replace("Landline #: #", "") list1.append(dict) if i.find("Recommendations ") > -1: dict['Recommendations'] = i.replace( "Recommendations #: #", "") list1.append(dict) if i.find("Last modified on ") > -1: dict['Last modified on'] = i.replace( "Last modified on #: #", "") list1.append(dict) body = "" for l in list1: body += l.keys()[0] + ":" + " " + l.values()[0] + "\n\n" msg_from = msg.get('from') priority = msg.get('priority') msg['attachments'] resume = "" for i in range(len(msg['attachments'])): l = list(msg['attachments'][i]) att_name = msg['attachments'][i][0] a = att_name.split('.') #======================================Find Current working path================================== cwd_path = os.getcwd() report_path = cwd_path + '/temp' if not os.path.exists(report_path): os.makedirs(report_path) #print report_path text_file = report_path + '/' + "resume.txt" resume_file = report_path + '/' + "resume." + a[-1] #================================================================================================= if a[-1] == 'doc': filename = open(resume_file, 'w') filename.write(str(msg['attachments'][i][1])) filename.close() commands.getoutput("catdoc '%s'> '%s'" % (resume_file, text_file)) resume_file = open(text_file, 'r') resume = resume_file.read() elif a[-1] == 'docx': filename = open(resume_file, 'w') filename.write(str(msg['attachments'][i][1])) filename.close() commands.getoutput("docx2txt.pl '%s' '%s' " % (resume_file, text_file)) resume_file = open(text_file, 'r') resume = resume_file.read() elif a[-1] == 'pdf': filename = open(resume_file, 'w') filename.write(str(msg['attachments'][i][1])) filename.close() commands.getoutput("pdftotext '%s' " % (resume_file)) resume_file = open(text_file, 'r') resume = resume_file.read() name = msg['attachments'][i][0] + ' ' + str( time.strftime('%Y-%m-%d %H:%M:%S')) l[0] = name msg['attachments'][i] = tuple(l) cr.execute('SELECT * FROM hr_job') ids = map(lambda x: x[0], cr.fetchall()) rows = self.pool.get('hr.job').browse(cr, uid, ids, context=None) k = [] for row in rows: job_code = row.job_code if job_code: if subject.find(job_code) > -1: code = subject.find(job_code) k.append(row) cr.execute( 'SELECT state,stage_id FROM hr_applicant where email_from=%s and not_visiblel=%s ', (msg_from, "TEST")) hr_applicant_ids = cr.fetchall() stage_id = False state = 'draft' if hr_applicant_ids: state = hr_applicant_ids[0][0] stage_id = hr_applicant_ids[0][1] hr_app_ids = self.pool.get('hr.applicant').search( cr, uid, [('email_from', '=', msg_from), ('state', '=', 'cancel'), ('not_visiblel', '=', 'TEST')]) if hr_app_ids and k: hr_app_obj = self.pool.get('hr.applicant').browse( cr, uid, hr_app_ids[0]) create_date = hr_app_obj.create_date c1 = create_date.split(' ') c_main = c1[0].split('-') create_date_last = datetime.datetime(int(c_main[0]), int(c_main[1]), int(c_main[2]), 0, 0, 0, 0) today = datetime.datetime.today() mon = int(k[0].month) after_six_months = create_date_last + relativedelta(months=mon) if after_six_months <= today: vals = { 'name': subject, 'email_from': msg_from, 'email_cc': msg.get('cc'), 'job_id': k[0].id, 'state': 'draft', 'description': body, 'user_id': False, 'resume_note': resume, 'not_visiblel': "TEST", } else: vals = { 'name': subject, 'email_from': msg_from, 'email_cc': msg.get('cc'), 'job_id': k[0].id, 'state': 'cancel', 'description': body, 'resume_note': resume, 'user_id': False, 'not_visiblel': "TEST", } if priority: vals['priority'] = priority vals.update( self.message_partner_by_email(cr, uid, msg.get('from', False))) res_id = super(hr_applicant, self).message_new(cr, uid, msg, custom_values=custom_values, context=context) self.write(cr, uid, [res_id], vals, context) if after_six_months <= today: self.write(cr, uid, hr_app_ids, {'state': 'draft'}, context) return res_id elif k and not hr_app_ids: vals = { 'name': subject, 'email_from': msg_from, 'email_cc': msg.get('cc'), 'job_id': k[0].id, 'state': state, 'stage_id': stage_id or False, 'description': body, 'resume_note': resume, 'user_id': False, 'not_visiblel': "TEST", } if priority: vals['priority'] = priority vals.update( self.message_partner_by_email(cr, uid, msg.get('from', False))) res_id = super(hr_applicant, self).message_new(cr, uid, msg, custom_values=custom_values, context=context) self.write(cr, uid, [res_id], vals, context) return res_id else: vals = { 'name': subject, 'email_from': msg_from, 'email_cc': msg.get('cc'), 'stage_id': stage_id or False, 'description': body, 'resume_note': resume, 'user_id': False, 'not_visiblel': "NOTEST", } if priority: vals['priority'] = priority vals.update( self.message_partner_by_email(cr, uid, msg.get('from', False))) res_id = super(hr_applicant, self).message_new(cr, uid, msg, custom_values=custom_values, context=context) self.write(cr, uid, [res_id], vals, context) return res_id
class OeMedicalAppointment(osv.Model): _name = 'oemedical.appointment' _columns = { 'patient_id': fields.many2one('oemedical.patient', string='Patient', required=True, select=True, help='Patient Name'), 'name': fields.char(size=256, string='Appointment ID', readonly=True), 'appointment_date': fields.datetime(string='Date and Time'), 'appointment_day': fields.date(string='Date'), 'appointment_hour': fields.selection([ ('01', '01'), ('02', '02'), ('03', '03'), ('04', '04'), ('05', '05'), ('06', '06'), ('07', '07'), ('08', '08'), ('09', '09'), ('10', '10'), ('11', '11'), ('12', '12'), ('13', '13'), ('14', '14'), ('15', '15'), ('16', '16'), ('17', '17'), ('18', '18'), ('19', '19'), ('20', '20'), ('21', '21'), ('22', '22'), ('23', '23'), ], string='Hour'), 'appointment_minute': fields.selection([ ('05', '05'), ('10', '10'), ('15', '15'), ('20', '20'), ('25', '25'), ('30', '30'), ('35', '35'), ('40', '40'), ('45', '45'), ('50', '50'), ('55', '55'), ], string='Minute'), 'duration': fields.float('Duration'), 'doctor': fields.many2one('oemedical.physician', string='Physician', select=True, help='Physician\'s Name'), 'alias': fields.char( size=256, string='Alias', ), 'comments': fields.text(string='Comments'), 'appointment_type': fields.selection([ ('ambulatory', 'Ambulatory'), ('outpatient', 'Outpatient'), ('inpatient', 'Inpatient'), ], string='Type'), 'institution': fields.many2one('res.partner', string='Health Center', help='Medical Center', domain="[('category_id', '=', 'Doctor Office')]"), 'consultations': fields.many2one('product.product', string='Consultation Services', help='Consultation Services', domain="[('type', '=', 'service'), ]"), 'urgency': fields.selection([ ('a', 'Normal'), ('b', 'Urgent'), ('c', 'Medical Emergency'), ], string='Urgency Level'), 'speciality': fields.many2one('oemedical.specialty', string='Specialty', help='Medical Specialty / Sector'), 'state': fields.selection([ ('draft', 'Draft'), ('confirm', 'Confirm'), ('waiting', 'Wating'), ('in_consultation', 'In consultation'), ('done', 'Done'), ('canceled', 'Canceled'), ], string='State'), 'history_ids': fields.one2many('oemedical.appointment.history', 'appointment_id_history', 'History lines', states={'start': [('readonly', True)]}), } _defaults = { 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get( cr, uid, 'oemedical.appointment'), 'duration': 30.00, 'urgency': 'a', 'state': 'draft', } def create(self, cr, uid, vals, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to Comfirm ------------------------------------\n" vals['history_ids'] = val_history print "create", vals[ 'history_ids'], val_history, ' ------ ', vals return super(OeMedicalAppointment, self).create(cr, uid, vals, context=context) def button_back(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') for order in self.browse(cr, uid, ids, context=context): if order.state == 'confirm': self.write(cr, uid, ids, {'state': 'draft'}, context=context) val_history[ 'action'] = "-------------------------------- Changed to Draft ------------------------------------\n" if order.state == 'waiting': val_history[ 'action'] = "-------------------------------- Changed to Confirm ------------------------------------\n" self.write(cr, uid, ids, {'state': 'confirm'}, context=context) if order.state == 'in_consultation': val_history[ 'action'] = "-------------------------------- Changed to Waiting ------------------------------------\n" self.write(cr, uid, ids, {'state': 'waiting'}, context=context) if order.state == 'done': val_history[ 'action'] = "-------------------------------- Changed to In Consultation ------------------------------------\n" self.write(cr, uid, ids, {'state': 'in_consultation'}, context=context) if order.state == 'canceled': val_history[ 'action'] = "-------------------------------- Changed to Draft ------------------------------------\n" self.write(cr, uid, ids, {'state': 'draft'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') ait_obj.create(cr, uid, val_history) return True def button_confirm(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') self.write(cr, uid, ids, {'state': 'confirm'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to Comfirm ------------------------------------\n" ait_obj.create(cr, uid, val_history) return True def button_waiting(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') self.write(cr, uid, ids, {'state': 'waiting'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to Waiting ------------------------------------\n" ait_obj.create(cr, uid, val_history) return True def button_in_consultation(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') self.write(cr, uid, ids, {'state': 'in_consultation'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to In Consultation ------------------------------------\n" ait_obj.create(cr, uid, val_history) return True def button_done(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') self.write(cr, uid, ids, {'state': 'done'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to Done ------------------------------------\n" ait_obj.create(cr, uid, val_history) return True def button_cancel(self, cr, uid, ids, context=None): val_history = {} ait_obj = self.pool.get('oemedical.appointment.history') self.write(cr, uid, ids, {'state': 'canceled'}, context=context) val_history['appointment_id_history'] = ids[0] val_history['name'] = uid val_history['date'] = time.strftime('%Y-%m-%d %H:%M:%S') val_history[ 'action'] = "-------------------------------- Changed to Canceled ------------------------------------\n" ait_obj.create(cr, uid, val_history) return True
class WizardPaperInvoiceSom(osv.osv_memory): _name = 'wizard.paper.invoice.som' _columns = { 'state': fields.selection(STATES, _(u'Estat del wizard de imprimir report')), 'date_from': fields.date('Data desde'), 'date_to': fields.date('Data fins'), 'info': fields.text('Informació', readonly=True), 'invoice_ids': fields.text('Factures'), 'file': fields.binary('Fitxer generat'), 'file_name': fields.text('Nom del fitxer'), 'progress': fields.float(u'Progrés general'), } _defaults = { 'state': lambda *a: 'init', 'file_name': lambda *a: 'factures.zip', 'date_to': lambda *a: (datetime.today() - timedelta(days=1)).strftime("%Y-%m-%d") } def search_invoices(self, cursor, uid, ids, context=None): if not context: context = {} pol_obj = self.pool.get('giscedata.polissa') fact_obj = self.pool.get('giscedata.facturacio.factura') wiz = self.browse(cursor, uid, ids[0], context=context) ctxt = context.copy() ctxt['active_test'] = False pol_ids = pol_obj.search(cursor, uid, [('enviament', '!=', 'email')], context=ctxt) fact_ids = fact_obj.search(cursor, uid, [ ('polissa_id', 'in', pol_ids), ('date_invoice', '>=', wiz.date_from), ('date_invoice', '<=', wiz.date_to), ('state', 'in', ('open', 'paid')), ('type', 'in', ('out_refund', 'out_invoice')), ], context=context) fact_datas = fact_obj.read(cursor, uid, fact_ids, ['number']) fact_names = ', '.join([f['number'] for f in fact_datas]) wiz.write({ 'state': 'info', 'invoice_ids': json.dumps(fact_ids), 'info': "Trobades {} polisses amb enviament postal.\nEs generanan {} pdf's de les seguents factures:\n{}" .format(len(pol_ids), len(fact_ids), fact_names), }) def generate_invoices(self, cursor, uid, ids, context=None): if context is None: context = {} gen_thread = threading.Thread(target=self.generate_invoices_threaded, args=(cursor, uid, ids, context)) gen_thread.start() self.write(cursor, uid, ids, {'state': 'working'}) return True def generate_invoices_threaded(self, cr, uid, ids, context=None): if not context: context = {} cursor = pooler.get_db(cr.dbname).cursor() wiz = self.browse(cursor, uid, ids[0], context=context) fact_ids = json.loads(wiz.invoice_ids) tmp_dir = tempfile.mkdtemp() failed_invoices, info_inv = self.generate_inv(cursor, uid, wiz, fact_ids, tmp_dir, context) clean_invoices = list(set(fact_ids) - set(failed_invoices)) info_csv = self.generate_csv(cursor, uid, wiz, clean_invoices, tmp_dir, 'Adreces.csv', context) info_reb = self.generate_reb(cursor, uid, wiz, clean_invoices, tmp_dir, context) wiz.write({ 'state': 'done', 'file': self.get_zip_from_directory(tmp_dir, True), 'info': wiz.info + "\n" + info_inv + "\n" + info_csv + "\n" + info_reb, }) def generate_inv(self, cursor, uid, wiz, fact_ids, dirname, context=None): fact_obj = self.pool.get('giscedata.facturacio.factura') report = 'report.giscedata.facturacio.factura' j_pool = ProgressJobsPool(wiz) for factura_done, fact_id in enumerate(fact_ids): fact = fact_obj.browse(cursor, uid, fact_id, context=context) file_name = u"{} {} {}.pdf".format( fact.polissa_id.name, fact.number, fact.polissa_id.direccio_notificacio.name, ).encode('latin-1') j_pool.add_job( self.render_to_file(cursor, uid, [fact_id], report, dirname, file_name, context)) wiz.write( {'progress': (float(factura_done + 1) / len(fact_ids)) * 98}) j_pool.join() failed_invoice = [] for status, result in j_pool.results.values(): if not status: failed_invoice.extend(result) if failed_invoice: fact_data = fact_obj.read(cursor, uid, failed_invoice, ["number"]) facts = ', '.join([f['number'] for f in fact_data]) info = u'Les següents {} factures han tingut error: {}'.format( len(failed_invoice), facts) else: info = u"{} factures generades correctament.".format(len(fact_ids)) return failed_invoice, info @job(queue=config.get('som_factures_paper_render_queue', 'poweremail_render'), result_ttl=24 * 3600) def render_to_file(self, cursor, uid, fids, report, dirname, file_name, context=None): """Return a tuple of status (True: OK, False: Failed) and the invoice path. """ if context is None: context = {} try: report = netsvc.service_exist(report) values = { 'model': 'giscedata.facturacio.factura', 'id': fids, 'report_type': 'pdf' } content = report.create(cursor, uid, fids, values, context)[0] # Escriure report a "fitxer" fitxer_name = os.path.join(dirname, file_name) with open(fitxer_name, 'wb') as f: f.write(content) return True, fids except Exception: import traceback traceback.print_exc() sentry = self.pool.get('sentry.setup') if sentry is not None: sentry.client.captureException() return False, fids def generate_csv(self, cursor, uid, wiz, fact_ids, dirname, file_name, context=None): def blank(thing): return thing if thing else "" to_sort = {} fact_obj = self.pool.get('giscedata.facturacio.factura') for fact_id in fact_ids: fact = fact_obj.browse(cursor, uid, fact_id, context=context) name = "{}".format(fact.polissa_id.name) to_sort[name] = ( fact.polissa_id.direccio_notificacio.name, fact.polissa_id.name, fact.polissa_id.direccio_notificacio.street, fact.polissa_id.direccio_notificacio.zip, fact.polissa_id.direccio_notificacio.city, blank(fact.polissa_id.direccio_notificacio.street2), blank(fact.polissa_id.direccio_notificacio.apartat_correus), ) output = StringIO() writer = csv.writer( output, delimiter=';', ) writer.writerow([ u'Persona notificacio', u'Polissa', u'Carrer', u'CP', u'Ciutat', u'Carrer alt', u'Apartat correus', ]) for k in sorted(to_sort.keys()): writer.writerow(to_sort[k]) try: fitxer_name = '{}/{}'.format(dirname, file_name) with open(fitxer_name, 'wb') as f: f.write(output.getvalue()) except Exception: import traceback traceback.print_exc() sentry = self.pool.get('sentry.setup') if sentry is not None: sentry.client.captureException() wiz.write({'progress': 99}) return u"Generat csv amb {} files.".format(len(fact_ids)) def generate_reb(self, cursor, uid, wiz, fact_ids, dirname, context=None): fact_obj = self.pool.get('giscedata.facturacio.factura') report = 'report.giscedata.facturacio.factura.rebut' facts_with_rebs_ids = [] for fact_id in fact_ids: fact = fact_obj.browse(cursor, uid, fact_id, context=context) if fact.polissa_id.postal_rebut: facts_with_rebs_ids.append(fact_id) if not facts_with_rebs_ids: return u"Cap rebut generat." j_pool = ProgressJobsPool(wiz) for fact_id in facts_with_rebs_ids: fact = fact_obj.browse(cursor, uid, fact_id, context=context) file_name = "{} {} {} rebut.pdf".format( fact.polissa_id.name, fact.number, fact.polissa_id.direccio_notificacio.name, ) j_pool.add_job( self.render_to_file(cursor, uid, [fact_id], report, dirname, file_name, context)) wiz.write({'progress': 100}) j_pool.join() failed_invoice = [] for status, result in j_pool.results.values(): if not status: failed_invoice.extend(result) if failed_invoice: fact_data = fact_obj.read(cursor, uid, failed_invoice, ["number"]) facts = ', '.join([f['number'] for f in fact_data]) info = u'Els següents {} rebuts han tingut error: {}'.format( len(failed_invoice), facts) else: info = u"{} Rebuts generats correctament.".format( len(facts_with_rebs_ids)) return info def get_zip_from_directory(self, directory, b64enc=True): def _zippy(archive, path): path = os.path.abspath(path) base = os.path.basename(path) for f in tools.osutil.listdir(path, True): archive.write(os.path.join(path, f), os.path.join(base, f)) archname = StringIO() archive = PyZipFile(archname, "w", ZIP_DEFLATED) archive.writepy(directory) _zippy(archive, directory) archive.close() val = archname.getvalue() archname.close() if b64enc: val = base64.encodestring(val) return val
data_obj = self.pool.get('ir.model.data') id2 = data_obj._get_id(cr, uid, 'caldav', 'view_calendar_event_subscribe_display') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id value = { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'calendar.event.subscribe', 'views': [(id2,'form'),(False,'tree'),(False,'calendar'),(False,'graph')], 'type': 'ir.actions.act_window', 'target': 'new' } return value _name = "calendar.event.subscribe" _description = "Event subscribe" _columns = { 'url_path': fields.char('Provide path for remote calendar', size=124, required=True), 'msg': fields.text('', readonly=True), } _defaults = { 'msg':lambda *a:'Import Sucessful.' } calendar_event_subscribe() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
class network_software_logpass(osv.osv): """ Couples of login/password """ _inherit = "network.software.logpass" _columns = { 'name': fields.char('Name', size=100), 'note': fields.text('Note'), 'material': fields.related('software_id', 'material_id', type='many2one', relation='network.material', string='Material', readonly=True), 'encrypted': fields.boolean('Encrypted'), 'superuser': fields.boolean('Super User'), } _defaults = { 'encrypted': lambda obj, cursor, user, context: False, } def onchange_password(self, cr, uid, ids, encrypted, context={}): return {'value': {'encrypted': False}} def _encrypt_password(self, cr, uid, ids, *args): for rec in self.browse(cr, uid, ids): try: from Crypto.Cipher import ARC4 except ImportError: raise osv.except_osv(_('Error !'), _('Package python-crypto no installed.')) if not rec.encrypted: obj_encrypt_password = self.pool.get( 'network.encrypt.password') encrypt_password_ids = obj_encrypt_password.search( cr, uid, [('create_uid', '=', uid), ('write_uid', '=', uid)]) encrypt_password_id = encrypt_password_ids and encrypt_password_ids[ 0] or False if encrypt_password_id: passwordkey = obj_encrypt_password.browse( cr, uid, encrypt_password_id).name enc = ARC4.new(passwordkey) try: encripted = base64.b64encode(enc.encrypt(rec.password)) except UnicodeEncodeError: break self.write(cr, uid, [rec.id], { 'password': encripted, 'encrypted': True }) else: raise osv.except_osv( _('Error !'), _('Not encrypt/decrypt password has given.')) return True def _decrypt_password(self, cr, uid, ids, *args): for rec in self.browse(cr, uid, ids): try: from Crypto.Cipher import ARC4 except ImportError: raise osv.except_osv(_('Error !'), _('Package python-crypto no installed.')) if rec.encrypted: obj_encrypt_password = self.pool.get( 'network.encrypt.password') encrypt_password_ids = obj_encrypt_password.search( cr, uid, [('create_uid', '=', uid), ('write_uid', '=', uid)]) encrypt_password_id = encrypt_password_ids and encrypt_password_ids[ 0] or False if encrypt_password_id: passwordkey = obj_encrypt_password.browse( cr, uid, encrypt_password_id).name dec = ARC4.new(passwordkey) try: desencripted = dec.decrypt( base64.b64decode(rec.password)) unicode(desencripted, 'ascii') raise osv.except_osv(rec.login + _(' password:'******'Error !'), _('Wrong encrypt/decrypt password.')) else: raise osv.except_osv( _('Error !'), _('Not encrypt/decrypt password has given.')) return True
fp = file(fname,'wb') v = base64.decodestring(value) fp.write(v) filesize = os.stat(fname).st_size cr.execute('update ir_attachment set store_fname=%s,store_method=%s,file_size=%s where id=%s', (os.path.join(flag,filename),'fs',len(v),id)) return True except Exception,e : raise except_orm(_('Error!'), str(e)) _columns = { 'user_id': fields.many2one('res.users', 'Owner', select=1), 'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'), 'parent_id': fields.many2one('document.directory', 'Directory', select=1), 'file_size': fields.integer('File Size', required=True), 'file_type': fields.char('Content Type', size=32), 'index_content': fields.text('Indexed Content'), 'write_date': fields.datetime('Date Modified', readonly=True), 'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'store_method': fields.selection([('db','Database'),('fs','Filesystem'),('link','Link')], "Storing Method"), 'datas': fields.function(_data_get,method=True,fnct_inv=_data_set,string='File Content',type="binary"), 'store_fname': fields.char('Stored Filename', size=200), 'res_model': fields.char('Attached Model', size=64), #res_model 'res_id': fields.integer('Attached ID'), #res_id 'partner_id':fields.many2one('res.partner', 'Partner', select=1), 'title': fields.char('Resource Title',size=64), } _defaults = { 'user_id': lambda self,cr,uid,ctx:uid,
class stock_return_picking(osv.osv_memory): _name = 'stock.return.picking' _description = 'Return Picking' _columns = { 'product_return_moves' : fields.one2many('stock.return.picking.memory', 'wizard_id', 'Moves'), 'invoice_state': fields.selection([('2binvoiced', 'To be refunded/invoiced'), ('none', 'No invoicing')], 'Invoicing',required=True), 'note': fields.text('Notes', readonly=True, states={'draft': [('readonly', False)]}), } def default_get(self, cr, uid, fields, context=None): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary with default values for all field in ``fields`` """ result1 = [] if context is None: context = {} res = super(stock_return_picking, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id, context=context) if pick: if 'invoice_state' in fields: if pick.invoice_state in ['invoiced','2binvoiced']: res['invoice_state'] = '2binvoiced' else: res['invoice_state'] = 'none' return_history = self.get_return_history(cr, uid, record_id, context) for line in pick.move_lines: qty = line.product_qty - return_history.get(line.id, 0) if qty > 0: result1.append({'product_id': line.product_id.id, 'quantity': qty,'move_id':line.id, 'prodlot_id': line.prodlot_id and line.prodlot_id.id or False}) if 'product_return_moves' in fields: res.update({'product_return_moves': result1}) return res def view_init(self, cr, uid, fields_list, context=None): """ Creates view dynamically and adding fields at runtime. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view with new columns. """ if context is None: context = {} res = super(stock_return_picking, self).view_init(cr, uid, fields_list, context=context) record_id = context and context.get('active_id', False) if record_id: pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id, context=context) if pick.state not in ['done','confirmed','assigned']: raise osv.except_osv(_('Warning !'), _("You may only return pickings that are Confirmed, Available or Done!")) valid_lines = 0 return_history = self.get_return_history(cr, uid, record_id, context) for m in pick.move_lines: if m.state == 'done' and m.product_qty * m.product_uom.factor > return_history.get(m.id, 0): valid_lines += 1 if not valid_lines: raise osv.except_osv(_('Warning!'), _("No products to return (only lines in Done state and not fully returned yet can be returned)!")) return res def get_return_history(self, cr, uid, pick_id, context=None): """ Get return_history. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param pick_id: Picking id @param context: A standard dictionary @return: A dictionary which of values. """ pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, pick_id, context=context) return_history = {} for m in pick.move_lines: if m.state == 'done': return_history[m.id] = 0 for rec in m.move_history_ids2: # only take into account 'product return' moves, ignoring any other # kind of upstream moves, such as internal procurements, etc. # a valid return move will be the exact opposite of ours: # (src location, dest location) <=> (dest location, src location)) if rec.location_dest_id.id == m.location_id.id \ and rec.location_id.id == m.location_dest_id.id: return_history[m.id] += (rec.product_qty * rec.product_uom.factor) return return_history def create_returns(self, cr, uid, ids, context=None): """ Creates return picking. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of ids selected @param context: A standard dictionary @return: A dictionary which of fields with values. """ if context is None: context = {} record_id = context and context.get('active_id', False) or False move_obj = self.pool.get('stock.move') pick_obj = self.pool.get('stock.picking') uom_obj = self.pool.get('product.uom') data_obj = self.pool.get('stock.return.picking.memory') wf_service = netsvc.LocalService("workflow") pick = pick_obj.browse(cr, uid, record_id, context=context) data = self.read(cr, uid, ids[0], context=context) new_picking = None date_cur = time.strftime('%Y-%m-%d %H:%M:%S') set_invoice_state_to_none = True returned_lines = 0 # Create new picking for returned products if pick.type=='out': new_type = 'in' elif pick.type=='in': new_type = 'out' else: new_type = 'internal' seq_obj_name = 'stock.picking.' + new_type new_pick_name = self.pool.get('ir.sequence').get(cr, uid, seq_obj_name) new_picking = pick_obj.copy(cr, uid, pick.id, { 'name': _('%s-%s-return') % (new_pick_name, (pick.name).replace(_('-return'), '')), 'move_lines': [], 'state':'draft', 'type': new_type, 'origin': _('%s-return') % (pick.origin).replace(_('-return'), ''), 'date':date_cur, 'invoice_state': data['invoice_state'], 'note': data['note'], }) val_id = data['product_return_moves'] for v in val_id: data_get = data_obj.browse(cr, uid, v, context=context) mov_id = data_get.move_id.id if not mov_id: raise osv.except_osv( _('Warning !'), _("You have manually created product lines, please delete them to proceed")) new_qty = data_get.quantity move = move_obj.browse(cr, uid, mov_id, context=context) new_location = move.location_dest_id.id returned_qty = move.product_qty for rec in move.move_history_ids2: returned_qty -= rec.product_qty if returned_qty != new_qty: set_invoice_state_to_none = False if new_qty: returned_lines += 1 new_move=move_obj.copy(cr, uid, move.id, { 'product_qty': new_qty, 'product_uos_qty': uom_obj._compute_qty(cr, uid, move.product_uom.id, new_qty, move.product_uos.id), 'picking_id': new_picking, 'state': 'draft', 'location_id': new_location, 'location_dest_id': move.location_id.id, 'date': date_cur, 'prodlot_id': data_get.prodlot_id.id, }) move_obj.write(cr, uid, [move.id], {'move_history_ids2':[(4,new_move)]}, context=context) if not returned_lines: raise osv.except_osv( _('Warning!'), _("Please specify at least one non-zero quantity.")) if set_invoice_state_to_none: pick_obj.write(cr, uid, [pick.id], {'invoice_state':'none'}, context=context) wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_confirm', cr) pick_obj.force_assign(cr, uid, [new_picking], context) # Update view id in context, lp:702939 view_list = { 'out': 'action_picking_tree', 'in': 'action_picking_tree4', 'internal': 'action_picking_tree6', } data_pool = self.pool.get('ir.model.data') action = {} try: action_model,action_id = data_pool.get_object_reference( cr, uid, 'stock', view_list.get( new_type, 'action_picking_tree6')) except ValueError: raise osv.except_osv( _('Error'), _('Object reference %s not found') % view_list.get(new_type, 'action_picking_tree6')) if action_model: action_pool = self.pool.get(action_model) action = action_pool.read(cr, uid, action_id, context=context) action['domain'] = "[('id','=', "+str(new_picking)+")]" action['context'] = context return action
'state' : fields.selection([('pending', 'Pending'), ('validated', 'Validated'), ('invalidated', 'Invalidated'), ('updated', 'Updated')], 'State'), 'email':fields.char('Contact Email', size=512), 'user_id': fields.many2one('res.users', 'User', ondelete='restrict', required=True), #just in case, since the many2one exist it has no cost in database 'children_ids' : fields.one2many('sync.server.entity', 'parent_id', 'Children Instances'), 'update_token' : fields.char('Update security token', size=256), 'activity' : fields.function(_get_activity, type='char', string="Activity", method=True, multi="_get_act"), 'last_dateactivity': fields.function(_get_activity, type='datetime', string="Date of last activity", method=True, multi="_get_act"), #'last_activity' : fields.datetime("Date of last activity", readonly=True), 'parent_left' : fields.integer("Left Parent", select=1), 'parent_right' : fields.integer("Right Parent", select=1), 'msg_ids_tmp':fields.text('List of temporary ids of message to be pulled'), 'version': fields.integer('version'), 'last_sequence': fields.integer('Last update sequence pulled', readonly=True), } _defaults = { 'version': lambda *a: 0, 'last_sequence': lambda *a: 0, } def unlink(self, cr, uid, ids, context=None): for rec in self.browse(cr, uid, ids, context=context): if rec.parent_id: raise osv.except_osv(_("Error!"), _("Can not delete an instance that have children!")) return super(entity, self).unlink(cr, uid, ids, context=None) def get_security_token(self):
'email_cc':fields.char( 'CC', size=250), 'email_bcc':fields.char( 'BCC', size=250), 'reply_to':fields.char( 'Reply-To', size=250), 'message_id':fields.char( 'Message-ID', size=250), 'subject':fields.char( 'Subject', size=200,), 'body_text':fields.text( 'Standard Body (Text)'), 'body_html':fields.text( 'Body (Rich Text Clients Only)'), 'attachments_ids':fields.many2many( 'ir.attachment', 'mail_attachments_rel', 'mail_id', 'att_id', 'Attachments'), 'account_id' :fields.many2one( 'email_template.account', 'User account', required=True), 'user':fields.related( 'account_id', 'user',
def _set_image(self, cr, uid, id, name, value, arg, context=None): image = self.browse(cr, uid, id, context=context) full_path = self._image_path(cr, uid, image, context=context) if full_path: return self._save_file(full_path, value) return self.write(cr, uid, id, {"file_db_store": value}, context=context) _columns = { "name": fields.char("Image Title", translate=True, size=100, required=True), "extention": fields.char("file extention", size=6), "link": fields.boolean( "Link?", help="Images can be linked from files on your file system or remote (Preferred)" ), "file_db_store": fields.binary("Image stored in database"), "file": fields.function( _get_image, fnct_inv=_set_image, type="binary", method=True, filters="*.png,*.jpg,*.gif" ), "url": fields.char("File Location", size=128), "comments": fields.text("Comments", translate=True), "product_id": fields.many2one("product.product", "Product"), } _defaults = {"link": lambda *a: False} _sql_constraints = [ ("uniq_name_product_id", "UNIQUE(product_id, name)", _("A product can have only one image with the same name")) ] product_images()
class stock_production_lot(osv.osv): _name = "stock.production.lot" _inherit = "stock.production.lot" _columns = { "description": fields.text("Description"), "model_no": fields.char("Model No", 255), "customer": fields.many2one("res.partner", string="Customer", domain="[('customer','=', 1)]"), "user": fields.char("End User", 255), "user_department": fields.char("End User Dept", 255), "installation_date": fields.date("Installation Date"), #TODO The context would complete the partner search field to # match the customer name "customer_invoice": fields.many2one( "account.invoice", string="Customer Invoice", domain="[('type','=','out_invoice')]", ), "attachments": fields.text("Attachments"), "manufacturer": fields.char("Manufacturer", 255), "manufact_item_no": fields.char("Part. Number", 255), "supplier": fields.many2one("res.partner", string="Supplier", domain="[('supplier','=', 1)]"), "supplier_item_no": fields.char("Supplier Item No", 255), #TODO The context would complete the partner search field to # match the supplier name "supplier_invoice": fields.many2one("account.invoice", string="Supplier Invoice", domain="[('type','=','in_invoice')]"), "delivery_date": fields.date("Delivery Date"), "remarks": fields.text("Remarks"), # TODO Faire en sorte que le champ lot de production de la # garantie soit mis a jour immediatement lorsqu'on le cree # depuis le lot de production. 'warranties_ids': fields.one2many('prisme.warranty.warranty', 'lot_id', 'Warranties'), } def onchange_product(self, cr, uid, ids, product_id): value_to_return = {} if (product_id): products = \ self.pool.get('product.product').browse(cr, uid, [product_id]) i = 0 for product in products: i += 1 value_to_return['description'] = \ product.product_tmpl_id.description value_to_return['warranty_description'] = \ product.product_tmpl_id.description return {'value': value_to_return}
v = view_obj.browse(cr,uid,data_id.res_id) aa = v.inherit_id and '* INHERIT ' or '' res[mnames[data_id.module]]['views_by_module'] += aa + v.name + ' ('+v.type+')\n' elif key=='ir.actions.report.xml': res[mnames[data_id.module]]['reports_by_module'] += report_obj.browse(cr,uid,data_id.res_id).name + '\n' elif key=='ir.ui.menu': res[mnames[data_id.module]]['menus_by_module'] += menu_obj.browse(cr,uid,data_id.res_id).complete_name + '\n' except KeyError, e: pass return res _columns = { 'name': fields.char("Name", size=128, readonly=True, required=True), 'category_id': fields.many2one('ir.module.category', 'Category', readonly=True), 'shortdesc': fields.char('Short Description', size=256, readonly=True, translate=True), 'description': fields.text("Description", readonly=True, translate=True), 'author': fields.char("Author", size=128, readonly=True), 'website': fields.char("Website", size=256, readonly=True), # attention: Incorrect field names !! # installed_version refer the latest version (the one on disk) # latest_version refer the installed version (the one in database) # published_version refer the version available on the repository 'installed_version': fields.function(_get_latest_version, method=True, string='Latest version', type='char'), 'latest_version': fields.char('Installed version', size=64, readonly=True), 'published_version': fields.char('Published Version', size=64, readonly=True), 'url': fields.char('URL', size=128), 'dependencies_id': fields.one2many('ir.module.module.dependency', 'module_id', 'Dependencies', readonly=True),
class account_tax_code(osv.osv): _name = "account.tax.code" _inherit = "account.tax.code" _columns = { 'python_invoice': fields.text( 'Invoice Python Code', help='Python code to apply or not the tax at invoice level'), 'applicable_invoice': fields.boolean( 'Applicable Invoice', help='Use python code to apply this tax code at invoice'), } _defaults = { 'python_invoice': '''# amount\n# base\n# fiscal_unit\n# invoice: account.invoice object or False# address: res.partner.address object or False\n# partner: res.partner object or None\n# table: base.element object or None\n\n#result = table.get_element_percent(cr,uid,'COD_TABLE','COD_ELEMENT')/100\n#result = base > fiscal_unit * 4\n\nresult = True''', 'applicable_invoice': False, } _order = 'sequence' def _applicable_invoice(self, cr, uid, tax_code_id, invoice_id, amount, base, context=None): localdict = { 'amount': amount, 'base': base, 'cr': cr, 'uid': uid, 'table': self.pool.get('base.element') } code = self.browse(cr, uid, tax_code_id, context=context) if code.applicable_invoice: invoice = self.pool.get('account.invoice').browse( cr, uid, invoice_id) fiscal_unit = 0.0 ctx = context.copy() ctx.update({'company_id': invoice.company_id.id}) fiscalyear_obj = self.pool.get('account.fiscalyear') if invoice.period_id: fiscal_unit = invoice.period_id.fiscalyear_id.fiscal_unit else: fiscalyear_ids = fiscalyear_obj.find( cr, uid, invoice.date_invoice or fields.date.context_today(self, cr, uid, context=ctx), context=ctx) fiscalyear = fiscalyear_obj.browse(cr, uid, fiscalyear_ids, context=context) fiscal_unit = fiscalyear.fiscal_unit localdict['fiscal_unit'] = fiscal_unit localdict['invoice'] = invoice localdict['address'] = invoice.address_invoice_id localdict['partner'] = invoice.partner_id exec code.python_invoice in localdict return localdict.get('result', True)
'zone_str': fields.related('partner_id', 'street', type='char'), 'type_id': fields.many2one('bag.type', 'Tipo'), 'format_id': fields.many2one('bag.format', 'Formato'), 'color_id': fields.many2one('bag.color', 'Color'), 'material_id': fields.many2one('bag.material', 'Material'), 'size_id': fields.many2one('bag.size', 'Tamano'), 'description': fields.char('Descripcion', size=64), 'brand': fields.char('Marca', size=64), 'model': fields.char('Modelo', size=64), 'airline_id': fields.many2one('bag.airline', 'Aerolinea', required=True), 'branch': fields.char('Sucursal', size=32), 'scale_id': fields.many2one('bag.scale', 'Escala'), 'incoming_guide': fields.char('Guia Entrante', size=32), 'case_number': fields.char('Numero Caso', size=32), 'outgoing_guide': fields.char('Guia Saliente', size=32), 'internal_notes': fields.text('Nota'), 'estimated_price': fields.float('Costo Estimado', digits=(12, 2)), 'price_buffer': fields.float('Buffer Importe', digits=(12, 2)), 'base_discount': fields.float('Descuento', digits=(4, 2)), #'prepayment': fields.function(_get_total_sena, string='Sena', store=True, readonly=True), 'prepayment': fields.float(digits=(12,2)), 'shipping_cost': fields.float('Costo Envio', digits=(12, 2)), 'action': fields.selection([('reparar', 'Reparar'), ('reemplazar', 'Reemplazar')], 'Action', required=True), 'user_id': fields.many2one('res.users', 'Usuario'), 'state_id': fields.many2one('bag.state', 'Estado', required=True), 'shelving_id': fields.many2one('bag.shelving', 'Estanteria'), 'urgent': fields.boolean('Urgente'), 'papers': fields.boolean('Papeles'), 'attention': fields.boolean('Atencion'), 'monitoring': fields.boolean('Seguimiento'), 'send': fields.boolean('Enviar'),
class delivery_route_line(osv.osv): _name = 'delivery.route.line' def _get_drivers(self, cr, uid, ids, fields, args, context=None): result = {} for route in self.browse(cr, uid, ids): res = {} if route.route_id: res['picker'] = route.route_id.picker_id and route.route_id.picker_id.name or " " res['driver'] = route.route_id.driver_id and route.route_id.driver_id.name or " " else: res['picker'] = " " res['driver'] = " " result[route.id] = res return result def _get_origin(self, cr, uid, ids, fields, args, context=None): result = {} for route in self.browse(cr, uid, ids): res = {} res['origin'] = route.picking_id.origin or route.picking_id.name or "" res['sale_order_id'] = route.picking_id.sale_id and route.picking_id.sale_id.id or False res['purchase_id'] = route.picking_id.purchase_id and route.picking_id.purchase_id.id or False res['address_id'] = route.picking_id.partner_id and route.picking_id.partner_id.id or False res['so_payment_method'] = route.picking_id.sale_id and route.picking_id.sale_id.so_payment_method or False res['picking_note'] = route.picking_id.note or " " result[route.id] = res return result def _get_box_type(self, cr, uid, ids, fields, args, context=None): res = {} for route in self.browse(cr, uid, ids): box_type = '' iced = False warm = False other = False pack_set = set([ move.product_id.deliver_in for move in route.picking_id.move_lines ]) for pack in pack_set: if pack in ['warm', 'iced', 'iced_n_warm'] and not iced: if pack in ['iced', 'iced_n_warm']: box_type += '冷, ' iced = True if pack in ['warm', 'iced_n_warm'] and not warm: box_type += '热, ' warm = True else: if not other: box_type += '正常, ' other = True if box_type: box_type = box_type[:-2] res[route.id] = box_type return res def _route_to_update_after_picking_change(self, cr, uid, ids, fields=None, arg=None, context=None): if type(ids) != type([]): ids = [ids] return self.pool.get('delivery.route.line').search( cr, uid, [('picking_id', 'in', ids)]) or [] def _route_to_update_after_parent_change(self, cr, uid, ids, fields=None, arg=None, context=None): if type(ids) != type([]): ids = [ids] return self.pool.get('delivery.route.line').search( cr, uid, [('route_id', 'in', ids)]) or [] _store_origin = { 'delivery.route.line': (lambda self, cr, uid, ids, context: ids, ['picking_id'], 10), 'stock.picking': (_route_to_update_after_picking_change, [ 'sale_id', 'purchase_id', 'origin', 'note', 'so_payment_method', 'partner_id' ], 10), } _store_drivers = { 'delivery.route.line': (lambda self, cr, uid, ids, context: ids, ['route_id'], 10), 'delivery.route': (_route_to_update_after_parent_change, ['picker_id', 'driver_id'], 10), } _columns = { 'sequence': fields.integer('Sequence'), 'route_id': fields.many2one('delivery.route', 'Delivery Route', required=False, readonly=True, states={'draft': [('readonly', False)]}, ondelete="cascade"), 'picking_id': fields.many2one('stock.picking', 'Picking', required=True, select=True, readonly=True, states={'draft': [('readonly', False)]}), 'purchase_id': fields.function(_get_origin, type='many2one', obj='purchase.order', store=_store_origin, multi="origin", string='Purchase Order'), 'sale_order_id': fields.function(_get_origin, type='many2one', obj='sale.order', store=_store_origin, multi="origin", string='Sale Order'), 'origin': fields.function(_get_origin, type='char', size=256, store=_store_origin, multi="origin", string='Origin'), 'confirm_cs': fields.related('route_id', 'confirm_cs', type='boolean', string='Confirmed by CS'), 'address_id': fields.function(_get_origin, type='many2one', relation='res.partner', multi="origin", string='Delivery Address'), 'street': fields.related('address_id', 'street', type='char', size=256, string='Street'), 'partner_phone': fields.related('address_id', 'phone', type='char', size=128, string='Partner Phone', readonly=True), 'picker': fields.function(_get_drivers, type='char', size=128, store=_store_drivers, multi="drivers", string='Clerk'), 'driver': fields.function(_get_drivers, type='char', size=128, store=_store_drivers, multi="drivers", string='Driver'), 'driver_phone': fields.related('route_id', 'driver_id', 'employee_id', 'mobile_phone', type='char', size=128, string='Driver Phone'), 'so_payment_method': fields.function(_get_origin, type='char', size=128, multi="origin", string='Payment Method'), 'picking_note': fields.function(_get_origin, type='html', multi="origin", string='DO Notes'), 'box_type': fields.function(_get_box_type, type='char', size=32, store=False, string='Box Type'), 'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirm'), ('delivered', 'In delivery'), ('received', 'Delivered'), ('returned', 'Returned'), ('cancel', 'Cancel')], 'State', readonly=True), 'visit_date': fields.datetime('Visit Date', states={ 'delivered': [('required', True)], 'received': [('readonly', True)], 'returned': [('readonly', True)], }), 'note': fields.text('Notes'), 'color': fields.integer('Color Index'), 'exceptions': fields.boolean('Received with exceptions'), 'complete_state': fields.selection([("not_planned", _("Not planned")), ("planned", _("Planned")), ("in_del", _("In delivery")), ("del_ok", _("Delivered")), ("del_ex", _("Exception")), ("del_rt", _("Returned")), ("del_rt_exp", _("No redelivery")), ("cancel", _("Cancel"))], 'Delivery State'), } _defaults = { 'state': 'draft', 'complete_state': 'not_planned', } _order = 'sequence' def _read_group_route_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): context = context or {} route_obj = self.pool.get('delivery.route') args = [('state', '=', 'draft')] if 'force_dts_id_kanban' in context: args.append(('dts_id', '=', context['force_dts_id_kanban'])) route_ids = route_obj.search(cr, uid, args, order='name', context=context) result = route_obj.name_get(cr, uid, route_ids, context=context) fold = {} return result, fold def unlink(self, cr, uid, ids, context=None): for o in self.browse(cr, uid, ids, context=context): if o.state not in ('draft', 'cancel'): raise osv.except_osv( _('Invalid action !'), _('Cannot delete Delivery Route Line(s) which are already received, returned or delivered !' )) return super(delivery_route_line, self).unlink(cr, uid, ids, context=context) def action_draft(self, cr, uid, ids, context=None): self.write(cr, uid, ids, { 'state': 'draft', 'delivery_state': 'not_planned' }, context=context) return True def action_received_do_line(self, cr, uid, line, context=None): self.pool.get('stock.picking').write(cr, uid, [line.picking_id.id], { 'delivered': True, 'delivery_state': 'del_ok' }, context=context) self.notify_related_order(cr, uid, line, 'The Order has been <b>Delivered</b>', context) return True def action_received_exp_do_line(self, cr, uid, line, context=None): self.pool.get('stock.picking').write(cr, uid, [line.picking_id.id], { 'delivered': True, 'delivery_state': 'del_ex' }, context=context) self.notify_related_order( cr, uid, line, 'The Order has been <b>Delivered with exceptions</b>', context) return True def action_delivered_do_line(self, cr, uid, line, context=None): delivered_cpt = line.picking_id.delivered_cpt + 1 self.pool.get('stock.picking').write( cr, uid, [line.picking_id.id], { 'delivered_cpt': delivered_cpt, 'delivery_state': 'in_del' }, context=context) self.notify_related_order(cr, uid, line, 'The Order is <b>in Delivery</b>', context) return True def action_returned_do_line(self, cr, uid, line, context=None): contexet = context or {} context.update({'set_dts': False}) self.pool.get('stock.picking').write(cr, uid, [line.picking_id.id], {'delivery_state': 'del_rt'}, context=context) #self.copy(cr, uid, line.id, {'dts_id':False,'note': 'Re-delivery for ' + str(line.origin),'route_id':False,'return_reasons':[],'exceptions':False,'state':'draft','complete_state':'not_planned','visit_date':False,'color':0}, context=context) self.create(cr, uid, { 'dts_id': False, 'note': 'Re-delivery for ' + str(line.origin), 'route_id': False, 'return_reasons': [], 'exceptions': False, 'color': 0, 'picking_id': line.picking_id and line.picking_id.id, }, context=context) self.notify_related_order( cr, uid, line, 'The Order has been <b>Returned (Redelivery)</b>', context) return True def action_returned_exp_do_line(self, cr, uid, line, context=None): self.pool.get('stock.picking').write(cr, uid, [line.picking_id.id], { 'delivered': True, 'delivery_state': 'del_rt_exp' }, context=context) self.notify_related_order( cr, uid, line, 'The Order has been <b>Returned (No Redelivery)</b>', context) return True def action_delivered(self, cr, uid, ids, context=None): picking_obj = self.pool.get('stock.picking') for line in self.browse(cr, uid, ids, context=context): self.action_delivered_do_line(cr, uid, line, context=context) self.write(cr, uid, ids, { 'complete_state': 'in_del', 'state': 'delivered' }, context=context) return True def action_received(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): self.action_received_do_line(cr, uid, line, context=context) self.write( cr, uid, ids, { 'complete_state': 'del_ok', 'state': 'received', 'visit_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') }, context=context) return True def action_received_exp(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): self.action_received_exp_do_line(cr, uid, line, context=context) self.write( cr, uid, ids, { 'complete_state': 'del_ex', 'state': 'received', 'exceptions': True, 'visit_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') }, context=context) return True def action_returned(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): self.action_returned_do_line(cr, uid, line, context=context) self.write( cr, uid, ids, { 'complete_state': 'del_rt', 'state': 'returned', 'visit_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') }, context=context) return True def action_returned_exp(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): self.action_returned_exp_do_line(cr, uid, line, context=context) self.write( cr, uid, ids, { 'complete_state': 'del_rt_exp', 'state': 'returned', 'exceptions': True, 'visit_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') }, context=context) return True def action_cancel_do_line(self, cr, uid, line, context=None): delivered_cpt = line.picking_id.delivered_cpt - 1 if delivered_cpt < 0: delivered_cpt = 0 self.pool.get('stock.picking').write( cr, uid, line.picking_id.id, { 'delivered': False, 'delivered_cpt': delivered_cpt, 'delivery_state': 'not_planned' }, context=context) self.notify_related_order(cr, uid, line, 'The Delivery has been <b>Canceled</b>', context) return True def action_cancel(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): self.action_cancel_do_line(cr, uid, line, context=context) self.write(cr, uid, ids, { 'state': 'cancel', 'complete_state': 'cancel', 'exceptions': False }, context=context) return True def action_confirm_do_line(self, cr, uid, line, context=None): self.pool.get('stock.picking').write(cr, uid, line.picking_id.id, {'delivery_state': 'planned'}, context=context) self.notify_related_order(cr, uid, line, 'The Delivery has been <b>Planned</b>', context) return True def action_confirm(self, cr, uid, ids, context=None): for line in self.browse(cr, uid, ids, context=context): if line.picking_id.delivered: raise osv.except_osv( _('Error'), _('The picking %s (origin:%s) was delivered in other delivery route' % (line.picking_id.name, line.picking_id.origin))) # if line.picking_id.type == 'out' and line.picking_id.state not in ('done'): # raise osv.except_osv(_('Error'), _('The picking %s (origin:%s) must be in done state'%(line.picking_id.name,line.picking_id.origin))) self.action_confirm_do_line(cr, uid, line, context=context) self.write(cr, uid, ids, { 'complete_state': 'planned', 'state': 'confirm' }, context=context) return True def notify_related_order(self, cr, uid, line, delivery_state, context=None): res_id = False model = False if line.sale_order_id: res_id = line.sale_order_id.id model = 'sale.order' elif line.purchase_id: res_id = line.purchase_id.id model = 'purchase.order' if res_id and model: drivers = '' body = str(delivery_state) if line.visit_date: body += " at " + str(line.visit_date) body += "<br />" if line.route_id.name: body += "<b>Route</b>: " + str(line.route_id.name) + "<br />" if line.route_id.driver_id: drivers += str(line.route_id.driver_id.name.encode('utf-8')) if line.route_id.driver_id.employee_id and line.route_id.driver_id.employee_id.mobile_phone: drivers += " (" + str( line.route_id.driver_id.employee_id.mobile_phone) + ")" if line.route_id.picker_id: if drivers: drivers += ' & ' drivers += str(line.route_id.picker_id.name.encode('utf-8')) if drivers: body += "by: " + drivers + ")" self.pool.get('mail.message').create( cr, uid, { 'type': 'notification', 'record_name': 'Delivery Route Line', 'body': body, 'res_id': res_id, 'model': model, }) return True _group_by_full = { 'route_id': _read_group_route_ids, }
return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all _columns = { 'host': fields.char('Host', size=64, required=True), 'port': fields.integer('Port', required=True), 'ooo_restart_cmd': fields.char('OOO restart command', size=256, \ help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \ 'The command will be executed as the user of the OpenERP server process,'+ \ 'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'), 'state':fields.selection([ ('init','Init'), ('error','Error'), ('done','Done'), ], 'State', select=True, readonly=True), 'msg': fields.text('Message', readonly=True), 'error_details': fields.text('Error Details', readonly=True), 'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True), 'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True), } def default_get(self, cr, uid, fields, context=None): config_obj = self.pool.get('oo.config') data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context) ids = config_obj.search(cr, 1, [], context=context) if ids: res = config_obj.read(cr, 1, ids[0], context=context) del res['id'] data.update(res) return data
class crm_lead(crm_case, osv.osv): """ CRM Lead Case """ _name = "crm.lead" _description = "Lead/Opportunity" _order = "priority,date_action,id desc" _inherit = ['mail.thread', 'res.partner.address'] def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('crm.case.stage') order = stage_obj._order if read_group_order == 'stage_id desc': # lame hack to allow reverting search, should just work in the trivial case order = "%s desc" % order stage_ids = stage_obj._search( cr, uid, ['|', ('id', 'in', ids), ('case_default', '=', 1)], order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort( lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) return result _group_by_full = {'stage_id': _read_group_stage_ids} # overridden because res.partner.address has an inconvenient name_get, # especially if base_contact is installed. def name_get(self, cr, user, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] return [(r['id'], tools.ustr(r[self._rec_name])) for r in self.read(cr, user, ids, [self._rec_name], context)] # overridden because if 'base_contact' is installed - their default_get() will remove # 'default_type' from context making it impossible to record an 'opportunity' def default_get(self, cr, uid, fields_list, context=None): return super(osv.osv, self).default_get(cr, uid, fields_list, context=context) def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ cal_obj = self.pool.get('resource.calendar') res_obj = self.pool.get('resource.resource') res = {} for lead in self.browse(cr, uid, ids, context=context): for field in fields: res[lead.id] = {} duration = 0 ans = False if field == 'day_open': if lead.date_open: date_create = datetime.strptime( lead.create_date, "%Y-%m-%d %H:%M:%S") date_open = datetime.strptime(lead.date_open, "%Y-%m-%d %H:%M:%S") ans = date_open - date_create date_until = lead.date_open elif field == 'day_close': if lead.date_closed: date_create = datetime.strptime( lead.create_date, "%Y-%m-%d %H:%M:%S") date_close = datetime.strptime(lead.date_closed, "%Y-%m-%d %H:%M:%S") date_until = lead.date_closed ans = date_close - date_create if ans: resource_id = False if lead.user_id: resource_ids = res_obj.search( cr, uid, [('user_id', '=', lead.user_id.id)]) if len(resource_ids): resource_id = resource_ids[0] duration = float(ans.days) if lead.section_id and lead.section_id.resource_calendar_id: duration = float(ans.days) * 24 new_dates = cal_obj.interval_get( cr, uid, lead.section_id.resource_calendar_id and lead.section_id.resource_calendar_id.id or False, datetime.strptime(lead.create_date, '%Y-%m-%d %H:%M:%S'), duration, resource=resource_id) no_days = [] date_until = datetime.strptime(date_until, '%Y-%m-%d %H:%M:%S') for in_time, out_time in new_dates: if in_time.date not in no_days: no_days.append(in_time.date) if out_time > date_until: break duration = len(no_days) res[lead.id][field] = abs(int(duration)) return res def _history_search(self, cr, uid, obj, name, args, context=None): res = [] msg_obj = self.pool.get('mail.message') message_ids = msg_obj.search(cr, uid, [('email_from', '!=', False), ('subject', args[0][1], args[0][2])], context=context) lead_ids = self.search(cr, uid, [('message_ids', 'in', message_ids)], context=context) if lead_ids: return [('id', 'in', lead_ids)] else: return [('id', '=', '0')] def _get_email_subject(self, cr, uid, ids, fields, args, context=None): res = {} for obj in self.browse(cr, uid, ids, context=context): res[obj.id] = '' for msg in obj.message_ids: if msg.email_from: res[obj.id] = msg.subject break return res _columns = { # Overridden from res.partner.address: 'partner_id': fields.many2one('res.partner', 'Partner', ondelete='set null', select=True, help="Optional linked partner, usually after conversion of the lead"), 'id': fields.integer('ID', readonly=True), 'name': fields.char('Name', size=64, select=1), 'active': fields.boolean('Active', required=False), 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'email_from': fields.char('Email', size=128, help="E-mail address of the contact", select=1), 'section_id': fields.many2one('crm.case.section', 'Sales Team', \ select=True, help='When sending mails, the default email address is taken from the sales team.'), 'create_date': fields.datetime('Creation Date' , readonly=True), 'email_cc': fields.text('Global CC', size=252 , help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'description': fields.text('Notes'), 'write_date': fields.datetime('Update Date' , readonly=True), 'categ_id': fields.many2one('crm.case.categ', 'Category', \ domain="['|',('section_id','=',section_id),('section_id','=',False), ('object_id.model', '=', 'crm.lead')]"), 'type_id': fields.many2one('crm.case.resource.type', 'Campaign', \ domain="['|',('section_id','=',section_id),('section_id','=',False)]", help="From which campaign (seminar, marketing campaign, mass mailing, ...) did this contact come from?"), 'channel_id': fields.many2one('crm.case.channel', 'Channel', help="Communication channel (mail, direct, phone, ...)"), 'contact_name': fields.char('Contact Name', size=64), 'partner_name': fields.char("Customer Name", size=64,help='The name of the future partner that will be created while converting the lead into opportunity', select=1), 'optin': fields.boolean('Opt-In', help="If opt-in is checked, this contact has accepted to receive emails."), 'optout': fields.boolean('Opt-Out', help="If opt-out is checked, this contact has refused to receive emails or unsubscribed to a campaign."), 'type':fields.selection([ ('lead','Lead'), ('opportunity','Opportunity'), ],'Type', help="Type is used to separate Leads and Opportunities"), 'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority', select=True), 'date_closed': fields.datetime('Closed', readonly=True), 'stage_id': fields.many2one('crm.case.stage', 'Stage', domain="[('section_ids', '=', section_id)]"), 'user_id': fields.many2one('res.users', 'Salesman', select=1), 'referred': fields.char('Referred By', size=64), 'date_open': fields.datetime('Opened', readonly=True), 'day_open': fields.function(_compute_day, string='Days to Open', \ multi='day_open', type="float", store=True), 'day_close': fields.function(_compute_day, string='Days to Close', \ multi='day_close', type="float", store=True), 'state': fields.selection(crm.AVAILABLE_STATES, 'State', size=16, readonly=True, help='The state is set to \'Draft\', when a case is created.\ \nIf the case is in progress the state is set to \'Open\'.\ \nWhen the case is over, the state is set to \'Done\'.\ \nIf the case needs to be reviewed then the state is set to \'Pending\'.' ), 'message_ids': fields.one2many('mail.message', 'res_id', 'Messages', domain=[('model','=',_name)]), 'subjects': fields.function(_get_email_subject, fnct_search=_history_search, string='Subject of Email', type='char', size=64), # Only used for type opportunity 'partner_address_id': fields.many2one('res.partner.address', 'Partner Contact', domain="[('partner_id','=',partner_id)]"), 'probability': fields.float('Probability (%)',group_operator="avg"), 'planned_revenue': fields.float('Expected Revenue'), 'ref': fields.reference('Reference', selection=crm._links_get, size=128), 'ref2': fields.reference('Reference 2', selection=crm._links_get, size=128), 'phone': fields.char("Phone", size=64), 'date_deadline': fields.date('Expected Closing'), 'date_action': fields.date('Next Action Date', select=True), 'title_action': fields.char('Next Action', size=64), 'stage_id': fields.many2one('crm.case.stage', 'Stage', domain="[('section_ids', '=', section_id)]"), 'color': fields.integer('Color Index'), 'partner_address_name': fields.related('partner_address_id', 'name', type='char', string='Partner Contact Name', readonly=True), 'partner_address_email': fields.related('partner_address_id', 'email', type='char', string='Partner Contact Email', readonly=True), 'company_currency': fields.related('company_id', 'currency_id', 'symbol', type='char', string='Company Currency', readonly=True), 'user_email': fields.related('user_id', 'user_email', type='char', string='User Email', readonly=True), 'user_login': fields.related('user_id', 'login', type='char', string='User Login', readonly=True), } _defaults = { 'active': lambda *a: 1, 'user_id': crm_case._get_default_user, 'email_from': crm_case._get_default_email, 'state': lambda *a: 'draft', 'type': lambda *a: 'lead', 'section_id': crm_case._get_section, 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'crm.lead', context=c), 'priority': lambda *a: crm.AVAILABLE_PRIORITIES[2][0], 'color': 0, } def onchange_partner_address_id(self, cr, uid, ids, add, email=False): """This function returns value of partner email based on Partner Address """ if not add: return {'value': {'email_from': False, 'country_id': False}} address = self.pool.get('res.partner.address').browse(cr, uid, add) return { 'value': { 'email_from': address.email, 'phone': address.phone, 'country_id': address.country_id.id } } def on_change_optin(self, cr, uid, ids, optin): return {'value': {'optin': optin, 'optout': False}} def on_change_optout(self, cr, uid, ids, optout): return {'value': {'optout': optout, 'optin': False}} def onchange_stage_id(self, cr, uid, ids, stage_id, context={}): if not stage_id: return {'value': {}} stage = self.pool.get('crm.case.stage').browse(cr, uid, stage_id, context) if not stage.on_change: return {'value': {}} return {'value': {'probability': stage.probability}} def stage_find_percent(self, cr, uid, percent, section_id): """ Return the first stage with a probability == percent """ stage_pool = self.pool.get('crm.case.stage') if section_id: ids = stage_pool.search(cr, uid, [("probability", '=', percent), ("section_ids", 'in', [section_id])]) else: ids = stage_pool.search(cr, uid, [("probability", '=', percent)]) if ids: return ids[0] return False def stage_find_lost(self, cr, uid, section_id): return self.stage_find_percent(cr, uid, 0.0, section_id) def stage_find_won(self, cr, uid, section_id): return self.stage_find_percent(cr, uid, 100.0, section_id) def case_open(self, cr, uid, ids, *args): for l in self.browse(cr, uid, ids): # When coming from draft override date and stage otherwise just set state if l.state == 'draft': if l.type == 'lead': message = _("The lead '%s' has been opened.") % l.name elif l.type == 'opportunity': message = _( "The opportunity '%s' has been opened.") % l.name else: message = _("The case '%s' has been opened.") % l.name self.log(cr, uid, l.id, message) value = {'date_open': time.strftime('%Y-%m-%d %H:%M:%S')} self.write(cr, uid, [l.id], value) if l.type == 'opportunity' and not l.stage_id: stage_id = self.stage_find(cr, uid, l.section_id.id or False, [('sequence', '>', 0)]) if stage_id: self.stage_set(cr, uid, [l.id], stage_id) res = super(crm_lead, self).case_open(cr, uid, ids, *args) return res def case_close(self, cr, uid, ids, *args): res = super(crm_lead, self).case_close(cr, uid, ids, *args) self.write(cr, uid, ids, {'date_closed': time.strftime('%Y-%m-%d %H:%M:%S')}) for case in self.browse(cr, uid, ids): if case.type == 'lead': message = _("The lead '%s' has been closed.") % case.name else: message = _("The case '%s' has been closed.") % case.name self.log(cr, uid, case.id, message) return res
_columns = { 'name': fields.char(string='Name', select="1", size=150, readonly=True), 'service_type': fields.many2one('ups.codes', 'Service Type', domain=[('type', '=', 'service')], select="1"), 'package_det': fields.one2many('ups.shippingregister.package', 'shipping_register_rel', string='Packages',), 'to_address': fields.many2one('res.partner.address', 'Shipping Address', required=True), 'from_address': fields.many2one('res.partner.address', 'From Address', required=True), 'shipper_address': fields.many2one('res.partner.address', 'Shipper Address', required=True), 'saturday_delivery': fields.boolean('Saturday Delivery?'), 'description': fields.text('Description'), 'state':fields.selection(STATE_SELECTION, 'Status', readonly=True,), # The following are UPS filled information 'billed_weight':fields.float('Billed Weight', digits=(10, 4), readonly=True, help=( 'The billed weght may be different from the actual weight.' 'This is computed by UPS.')), 'billed_weight_uom':fields.many2one('product.uom', 'Billed Weight UOM', readonly=True), 'total_amount':fields.float('Total Amount', digits=(14, 4), select="1", readonly=True), 'total_amount_currency':fields.many2one('res.currency', 'Total Amount Currency', select="2", readonly=True,), 'digest': fields.binary('Information digest for DIGEST'), 'notificationemailaddr': fields.char('Notification eMail Addresses',
class stuffing_memo_line(osv.osv): _name = "stuffing.memo.line" _columns = { 'name': fields.char('Description'), 'product_id': fields.many2one('product.product', 'Product'), 'manufacturer': fields.related('product_id', 'manufacturer', type='many2one', relation='res.partner', string='Manufacturer'), 'product_qty': fields.float( 'Quantity UoM', digits_compute=dp.get_precision('Product Unit of Measure'), help="This is the quantity of products from an inventory " "point of view. For moves in the state 'done', this is the " "quantity of products that were actually moved. For other " "moves, this is the quantity of product that is planned to " "be moved. Lowering this quantity does not generate a " "backorder. Changing this quantity on assigned moves affects " "the product reservation, and should be done with care."), 'product_uom': fields.many2one('product.uom', 'Unit of Measure'), 'product_uop_qty': fields.float( 'Quantity UoP', digits_compute=dp.get_precision('Product Unit of Measure'), help="This is the quantity of products from an inventory " "point of view. For moves in the state 'done', this is the " "quantity of products that were actually moved. For other " "moves, this is the quantity of product that is planned to " "be moved. Lowering this quantity does not generate a " "backorder. Changing this quantity on assigned moves affects " "the product reservation, and should be done with care."), 'product_uop': fields.many2one('product.uom', 'Unit of Packaging'), 'stock_move_id': fields.many2one('stock.move', 'Stock Move'), # 'picking_id' : fields.related('stock_move_id','picking_id',type='many2one',relation='stock.picking',string='Delivery Order', store=True), 'picking_id': fields.many2one('stock.picking', string='Delivery Order'), 'sale_id': fields.related('picking_id', 'sale_id', type='many2one', relation='sale.order', string='SC', store=True), 'booking_id': fields.related('picking_id', 'container_book_id', type='many2one', relation='container.booking', string='SI No.', store=True), 'partner_id': fields.related('picking_id', 'partner_id', type='many2one', relation='res.partner', string='Customer', store=True), 'dest_port_id': fields.related('booking_id', 'port_to', type='many2one', relation='res.port', string='Destination', store=True), 'prodlot_id': fields.many2one('stock.production.lot', 'Serial Number'), 'tracking_id': fields.many2one('stock.tracking', 'Pack'), 'stuffing_id': fields.many2one('stuffing.memo', 'Stuffing Memo'), 'priority': fields.selection([('red', 'Priority 1'), ('orange', 'Priority 2')], 'Priority', required=False), 'priority_reason': fields.text('Priority Reason'), 'container_size': fields.many2one('container.size', 'Container Size'), 'remark': fields.text('Remark'), }
('wait', 'Waiting'), ('confirmed', 'Waiting Procurement Manager Approve'), ('confirmed2', 'Waiting Head of Procurement Division'), ('confirmed3', 'Waiting Head of Division Approve'), ('confirmed4', 'Waiting CEO Approve'), ('approved', 'Approved'), ('except_picking', 'Shipping Exception'), ('except_invoice', 'Invoice Exception'), ('done', 'Done'), ('cancel', 'Cancelled') ] _columns = { 'state' : fields.selection(STATE_SELECTION, 'State', readonly=True, help="The state of the purchase order or the quotation request. A quotation is a purchase order in a 'Draft' state. Then the order has to be confirmed by the user, the state switch to 'Confirmed'. Then the supplier must confirm the order to change the state to 'Approved'. When the purchase order is paid and received, the state becomes 'Done'. If a cancel action occurs in the invoice or in the reception of goods, the state becomes in exception.", select=True), 'budget_info_ids_po' : fields.many2many('budget.info.po', 'budget_info_rel_po', 'order_id', 'budget_info_id_po', 'Budget Line', readonly=True), 'budget_note' : fields.text('Budget Note'), 'budget_note_line_ids' : fields.one2many('budget.note.po', 'order_id', 'Budget Note History'), #######DICOUNT##################### 'amount_untaxed': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Purchase Price'), string='Untaxed Amount', store={ 'purchase.order.line': (_get_order, None, 10), 'purchase.order': (lambda self, cr, uid, ids, c={}: ids, ['discount_total'], 20), }, multi="sums", help="The amount without tax"), 'amount_tax': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Purchase Price'), string='Taxes', store={ 'purchase.order.line': (_get_order, None, 10), 'purchase.order': (lambda self, cr, uid, ids, c={}: ids, ['discount_total'], 20), }, multi="sums", help="The tax amount"), 'amount_total': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Purchase Price'), string='Total', store={
class xml_template(osv.osv): _name = "xml.template" _description = "XML Template" _order = "name" _columns = \ { "comment" : fields.text("Comment") , "content" : fields.text("Content", required=True, help="Contains XML-template specification") , "name" : fields.char("Name", size=256, required=True) , "schema" : fields.char("XML schema", size=256, help="Generated XML-file will be checked against this") , "reference_ids" : fields.one2many("xml.template.ref", "xml_template_id", "References") , "regulation_ids" : fields.one2many("xml.template.url", "xml_template_id", "Regulations") , "valid_from" : fields.date("Valid from") , "valid_to" : fields.date("Valid to") } _sql_constraints = \ [( "xml_template_name_index" , "unique (name)" , "The Name has to be unique!" ) ] def button_generate_template(self, cr, uid, ids, id): for obj in self.browse(cr, uid, ids): if obj.schema: parser = etree.XMLParser(no_network=False) schema_root = etree.parse(obj.schema, parser) if ".xsd" in obj.schema.lower(): xslt_root = etree.parse( "http://www.swing-system.com/xsl/xsd2xml.xsl", parser) # can be optimized as function field! elif ".rng" in obj.schema.lower(): xslt_root = etree.parse( "http://www.swing-system.com/xsl/rng2xml.xsl", parser) # can be optimized as function field! else: raise osv.except_osv \ ( _("Data Error !") , _("Unknown schema type: %s" % obj.schmea) ) transform = etree.XSLT(xslt_root) template = transform(schema_root) self.write(cr, uid, [obj.id], {'content': template}) # end def button_generate_template def generate_xml(self, cr, uid, id, nsmap=None, **scope_dict): """Generates the XML and returns it :param nsmap: (dictionary of) namespaces :param scope_dict: (dictionary of) navigation roots :return: root of XML-structure """ obj = self.browse(cr, uid, id) if obj and obj.content: generator = XML_Generator.XML_Generator( obj.content, nsmap) # can be optimized as function field! xml = generator.generate(**scope_dict) if obj.schema: if not self.is_schema_valid(cr, uid, id, xml): raise osv.except_osv \ ( _("Data Error !") , _("The generated XML does not conform to schema '%s'" % obj.schema) ) return xml raise osv.except_osv \ ( _("Data Error !") , _("Invalid Template with ID: %s" % id) ) # end def generate_xml def is_schema_valid(self, cr, uid, id, xml): """Checks the validity of the provided XML according to the specified schema :param xml: root of XML-structure to be checked :return: Boolean """ obj = self.browse(cr, uid, id) if obj: parser = etree.XMLParser(no_network=False) schema_root = etree.parse(obj.schema, parser) if ".xsd" in obj.schema.lower(): schema = etree.XMLSchema(schema_root) elif ".rng" in obj.schema.lower(): schema = etree.RelaxNG(schema_root) else: raise osv.except_osv \ ( _("Data Error !") , _("Unknown schema type: %s" % obj.schmea) ) return schema.validate(xml) else: raise osv.except_osv \ ( _("Data Error !") , _("Invalid Template with ID: %s" % id) ) return False # end def is_schema_valid def _remove_attachments(self, cr, uid, attach_to, name, fname, description, context=None): attachment_obj = self.pool.get('ir.attachment') att_ids = attachment_obj.search \ ( cr, uid , [ ('res_model', '=', attach_to._table_name) , ('res_id', '=', attach_to.id) , ('name', '=', name) , ('description', '=', description) , ('datas_fname', '=', "%s.xml" % fname) ] ) if att_ids: attachment_obj.unlink(cr, uid, att_ids, context=context) # end def _remove_attachments def attach_xml(self, cr, uid, id, attach_to, xml, name, fname, description=False, context=None): """ Creates an attachment and returns its ID Note that attachments with identical name/filename/description will be replaced :param attach_to: object that receives the attachment :param xml: root of XML-structure that will be attached :param name: name of the attachment :param fname: filename of the attachment (without extension .xml) :param description: description of the attachment :return: ID of new attachment object """ obj = self.browse(cr, uid, id) if not obj: raise osv.except_osv \ ( _("Data Error !") , _("Invalid Template with ID: %s" % id) ) attachment_obj = self.pool.get('ir.attachment') attach_ref_obj = self.pool.get('ir.attachment.ref') self._remove_attachments(cr, uid, attach_to, name, fname, description, context=context) vals = \ { 'name' : name , 'datas' : base64.encodestring(etree.tostring(xml, pretty_print=False)) , 'datas_fname' : "%s.xml" % fname , 'res_model' : attach_to._table_name , 'res_id' : attach_to.id , 'description' : description } res = attachment_obj.create(cr, uid, vals, context=context) for reference in obj.reference_ids: vals = \ { "ir_attachment_id" : res , "name" : reference.name } attach_ref_obj.create(cr, uid, vals, context=context) return res # end def attach_xml def write_file(self, cr, uid, id, xml, filename): """Writes the XML to the specified filename :param xml: root of XML-structure to be written :param filename: full file name """ f = open(filename, "w") f.write(etree.tostring(xml, pretty_print=True)) f.close()
_description = "CMS Placeholder" _rec_name = "slot_id" _order = "slot_id" SHORT_BODY_LENGTH = 100 def _get_short_body(self, cr, uid, ids, field_name, arg, context=None): res = {} for r in self.browse(cr, uid, ids, context=context): if not r.body: res[r.id] = r.body try: strip_body = html.fromstring(r.body).text_content().strip() except Exception, exc: strip_body = "NOT VALID HTML_TEXT" short_body = strip_body[: self.SHORT_BODY_LENGTH] if len(strip_body) > self.SHORT_BODY_LENGTH: short_body += "..." res[r.id] = short_body return res _columns = { "slot_id": fields.many2one("cms.slot", "Slot", required=True, select=1), "body": fields.text("Body"), "title_id": fields.many2one("cms.title"), "short_body": fields.function(_get_short_body, method=True, string="Short Body", type="char"), } cms_placeholder()