class user_story_phase(osv.Model): _name = "user.story.phase" _description = "User Story Phase" def _check_recursion(self, cr, uid, ids, context=None): if context is None: context = {} data_phase = self.browse(cr, uid, ids[0], context=context) prev_ids = data_phase.previous_phase_ids next_ids = data_phase.next_phase_ids # it should neither be in prev_ids nor in next_ids if (data_phase in prev_ids) or (data_phase in next_ids): return False ids = [id for id in prev_ids if id in next_ids] # both prev_ids and next_ids must be unique if ids: return False # unrelated user_story prev_ids = [rec.id for rec in prev_ids] next_ids = [rec.id for rec in next_ids] # iter prev_ids while prev_ids: cr.execute( 'SELECT distinct prv_phase_id FROM user_story_phase_rel WHERE next_phase_id IN %s', (tuple(prev_ids), )) prv_phase_ids = filter(None, map(lambda x: x[0], cr.fetchall())) if data_phase.id in prv_phase_ids: return False ids = [id for id in prv_phase_ids if id in next_ids] if ids: return False prev_ids = prv_phase_ids # iter next_ids while next_ids: cr.execute( 'SELECT distinct next_phase_id FROM user_story_phase_rel WHERE prv_phase_id IN %s', (tuple(next_ids), )) next_phase_ids = filter(None, map(lambda x: x[0], cr.fetchall())) if data_phase.id in next_phase_ids: return False ids = [id for id in next_phase_ids if id in prev_ids] if ids: return False next_ids = next_phase_ids return True def _check_dates(self, cr, uid, ids, context=None): for phase in self.read(cr, uid, ids, ['date_start', 'date_end'], context=context): if phase['date_start'] and phase[ 'date_end'] and phase['date_start'] > phase['date_end']: return False return True # # def _compute_progress(self, cr, uid, ids, field_name, arg, context=None): # res = {} # if not ids: # return res # for phase in self.browse(cr, uid, ids, context=context): # if phase.state=='done': # res[phase.id] = 100.0 # continue # elif phase.state=="cancelled": # res[phase.id] = 0.0 # continue # elif not phase.task_ids: # res[phase.id] = 0.0 # continue # # tot = done = 0.0 # for task in phase.task_ids: # tot += task.total_hours # done += min(task.effective_hours, task.total_hours) # # if not tot: # res[phase.id] = 0.0 # else: # res[phase.id] = round(100.0 * done / tot, 2) # return res _columns = { 'name': fields.char("Name", size=64, required=True), 'user_story_id': fields.many2one('user.story', 'User Story', required=True, select=True), 'state': fields.selection( [('draft', 'New'), ('cancelled', 'Cancelled'), ('open', 'In Progress'), ('pending', 'Pending'), ('done', 'Done')], 'Status', readonly=True, required=True, help= 'If the phase is created the status \'Draft\'.\n If the phase is started, the status becomes \'In Progress\'.\n If review is needed the phase is in \'Pending\' status.\ \n If the phase is over, the status is set to \'Done\'.' ), 'date_start': fields.datetime( 'Start Date', select=True, help= "It's computed by the scheduler according the project date or the end date of the previous phase.", states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'date_end': fields.datetime( 'End Date', help= " It's computed by the scheduler according to the start date and the duration.", states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'sequence': fields.integer( 'Sequence', select=True, help="Gives the sequence order when displaying a list of phases."), 'duration': fields.float('Duration', required=True, help="By default in days", states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'next_phase_ids': fields.many2many('user.story.phase', 'user_story_phase_rel', 'prv_phase_id', 'next_phase_id', 'Next Phases', states={'cancelled': [('readonly', True)]}), 'previous_phase_ids': fields.many2many('user.story.phase', 'user_story_phase_rel', 'next_phase_id', 'prv_phase_id', 'Previous Phases', states={'cancelled': [('readonly', True)]}), 'product_uom': fields.many2one( 'product.uom', 'Duration Unit of Measure', required=True, help= "Unit of Measure (Unit of Measure) is the unit of measurement for Duration", states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'constraint_date_start': fields.datetime('Minimum Start Date', help='force the phase to start after this date', states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'constraint_date_end': fields.datetime('Deadline', help='force the phase to finish before this date', states={ 'done': [('readonly', True)], 'cancelled': [('readonly', True)] }), 'user_force_ids': fields.many2many('res.users', string='Force Assigned Users'), 'user_ids': fields.one2many( 'user.story.user.allocation', 'phase_id', "Assigned Users", states={ 'done': [('readonly', True) ], 'cancelled': [('readonly', True)] }, help= "The resources on the project can be computed automatically by the scheduler." ), } # 'task_ids': fields.one2many('project.task', 'phase_id', "Project Tasks", states={'done':[('readonly',True)], 'cancelled':[('readonly',True)]}), # 'progress': fields.function(_compute_progress, string='Progress', help="Computed based on related tasks"), _defaults = { 'state': 'draft', 'sequence': 10, } _order = "user_story_id, date_start, sequence" _constraints = [ (_check_recursion, 'Loops in phases not allowed', ['next_phase_ids', 'previous_phase_ids']), (_check_dates, 'Phase start-date must be lower than phase end-date.', ['date_start', 'date_end']), ] def onchange_user_story(self, cr, uid, ids, user_story, context=None): return {} def copy(self, cr, uid, id, default=None, context=None): if default is None: default = {} if not default.get('name', False): default.update(name=_('%s (copy)') % (self.browse(cr, uid, id, context=context).name)) return super(user_story_phase, self).copy(cr, uid, id, default, context) def set_draft(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'draft'}) return True def set_open(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'open'}) return True def set_pending(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'pending'}) return True def set_cancel(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'cancelled'}) return True def set_done(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'done'}) return True
'context':context, } def onchange_shipping_service(self, cr, uid, ids, ups_shipper_id=False, context=None): vals = {} service_type_ids = [] if ups_shipper_id: shipper_obj = self.pool.get('ups.account.shipping').browse(cr, uid, ups_shipper_id) for shipper in shipper_obj.ups_shipping_service_ids: service_type_ids.append(shipper.id) domain = [('id', 'in', service_type_ids)] return {'domain': {'ups_service_id': domain}} _columns= { 'ship_company_code': fields.selection(_get_company_code, 'Ship Company', method=True, size=64), 'ups_shipper_id': fields.many2one('ups.account.shipping', 'Shipper'), 'ups_service_id': fields.many2one('ups.shipping.service.type', 'Shipping Service'), 'ups_pickup_type': fields.selection([ ('01', 'Daily Pickup'), ('03', 'Customer Counter'), ('06', 'One Time Pickup'), ('07', 'On Call Air'), ('11', 'Suggested Retail Rates'), ('19', 'Letter Center'), ('20', 'Air Service Center'), ], 'Pickup Type'), 'ups_packaging_type': fields.many2one('shipping.package.type','Packaging Type'), 'partner_id': fields.many2one('res.partner', 'Customer'), 'partner_shipping_id': fields.many2one('res.partner', 'Shipping Address'), }
# published_version refers the version available on the repository 'installed_version': fields.function(_get_latest_version, string='Latest Version', type='char'), 'latest_version': fields.char('Installed Version', size=64, readonly=True), 'published_version': fields.char('Published Version', size=64, readonly=True), 'url': fields.char('URL', size=128, readonly=True), 'sequence': fields.integer('Sequence'), 'dependencies_id': fields.one2many('ir.module.module.dependency', 'module_id', 'Dependencies', readonly=True), 'auto_install': fields.boolean('Automatic Installation', help='An auto-installable module is automatically installed by the ' 'system when all its dependencies are satisfied. ' 'If the module has no dependency, it is always installed.'), 'state': fields.selection([ ('uninstallable', 'Not Installable'), ('uninstalled', 'Not Installed'), ('installed', 'Installed'), ('to upgrade', 'To be upgraded'), ('to remove', 'To be removed'), ('to install', 'To be installed') ], string='Status', readonly=True, select=True), 'demo': fields.boolean('Demo Data', readonly=True), 'license': fields.selection([ ('GPL-2', 'GPL Version 2'), ('GPL-2 or any later version', 'GPL-2 or later version'), ('GPL-3', 'GPL Version 3'), ('GPL-3 or any later version', 'GPL-3 or later version'), ('AGPL-3', 'Affero GPL-3'), ('Other OSI approved licence', 'Other OSI Approved Licence'), ('Other proprietary', 'Other Proprietary') ], string='License', readonly=True), 'menus_by_module': fields.function(_get_views, string='Menus', type='text', multi="meta", store=True), 'reports_by_module': fields.function(_get_views, string='Reports', type='text', multi="meta", store=True),
return [i[0] for i in cr.fetchall()] _name = 'sale.order.line' _description = 'Sales Order Line' _columns = { 'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}), 'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True), 'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True), 'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean', store={ 'account.invoice': (_order_lines_from_invoice, ['state'], 10), 'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)}), 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}), 'type': fields.selection([('make_to_stock', 'from stock'), ('make_to_order', 'on order')], 'Procurement Method', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="From stock: When needed, the product is taken from the stock or we wait for replenishment.\nOn order: When needed, the product is purchased or produced."), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')), 'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}), 'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."), 'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_uos_qty': fields.float('Quantity (UoS)' ,digits_compute= dp.get_precision('Product UoS'), readonly=True, states={'draft': [('readonly', False)]}), 'product_uos': fields.many2one('product.uom', 'Product UoS'), 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}), 'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}), 'state': fields.selection([('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')], 'Status', required=True, readonly=True, help='* The \'Draft\' status is set when the related sales order in draft status. \ \n* The \'Confirmed\' status is set when the related sales order is confirmed. \ \n* The \'Exception\' status is set when the related sales order is set as exception. \ \n* The \'Done\' status is set when the sales order line has been picked. \ \n* The \'Cancelled\' status is set when a user cancel the sales order related.'),
def _payment_type_name_get(self, cr, uid, ids, field_name, arg, context=None): result = {} for rec in self.browse(cr, uid, ids, context): result[rec.id] = rec.mode and rec.mode.type.name or "" return result def _name_get(self, cr, uid, ids, field_name, arg, context=None): result = {} for rec in self.browse(cr, uid, ids, context): result[rec.id] = rec.reference return result _columns = { 'type': fields.selection([ ('payable','Payable'), ('receivable','Receivable'), ],'Type', readonly=True, select=True), # invisible field to filter payment order lines by payment type 'payment_type_name': fields.function(_payment_type_name_get, method=True, type="char", size=64, string="Payment type name"), # The field name is necessary to add attachement documents to payment orders 'name': fields.function(_name_get, method=True, type="char", size=64, string="Name"), 'create_account_moves': fields.selection([('bank-statement','Bank Statement'),('direct-payment','Direct Payment')], 'Create Account Moves', required=True, states={'done':[('readonly',True)]}, help='Indicates when account moves should be created for order payment lines. "Bank Statement" '\ 'will wait until user introduces those payments in bank a bank statement. "Direct Payment" '\ 'will mark all payment lines as payied once the order is done.'), 'period_id': fields.many2one('account.period', 'Period', states={'done':[('readonly',True)]}), } _defaults = {
>>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d _columns = { 'cc_number':fields.char('Credit Card Number', size=32, required=True), 'cc_ed_month':fields.char('Expiration Date MM', size=2, required=True), 'cc_ed_year':fields.char('Expiration Date YYYY', size=4 , required=True), <<<<<<< HEAD 'cc_verify_code':fields.char('Card Code Verification', size=4), ======= 'cc_verify_code':fields.char('Card Code Verification', size=3), >>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d 'partner_id':fields.many2one('res.partner', 'Customer'), 'address_id':fields.many2one('res.partner', 'Address'), 'description':fields.char('Description', size=128), 'cc_card_type' :fields.selection([('Discovery','Discovery'), ('AMEX','AMEX'), ('MasterCard','MasterCard'), ('Visa','Visa')],'Credit Card Type') } _defaults = { 'partner_id' : _get_partner, 'cc_card_type': 'Visa' } def request_to_server(self, Request_string, url, url_path): ''' Sends a POST request to url and returns the response from the server''' # url = 'https://apitest.authorize.net:80/xml/v1/request.api' conn = httplib.HTTPSConnection(url) # conn = urllib2.urlopen(url) conn.putrequest('POST', url_path) conn.putheader('content-type', 'text/xml')
('SM Flat Rate Box', 'SM Flat Rate Box'), ('MD Flat Rate Box', 'MD Flat Rate Box'), ('LG Flat Rate Box', 'LG Flat Rate Box'), ('RegionalRateBoxA', 'RegionalRateBoxA'), ('RegionalRateBoxB', 'RegionalRateBoxB'), ('Rectangular', 'Rectangular'), ('Non-Rectangular', 'Non-Rectangular'), ] def _get_size_usps(self, cr, uid, context=None): return [ ('REGULAR', 'Regular'), ('LARGE', 'Large'), ] _columns= { 'ship_company_code': fields.selection(_get_company_code, 'Ship Company', method=True, size=64), 'usps_service_type' : fields.selection(_get_service_type_usps, 'Service Type', size=100), 'usps_package_location' : fields.selection([ ('Front Door','Front Door'), ('Back Door','Back Door'), ('Side Door','Side Door'), ('Knock on Door/Ring Bell','Knock on Door/Ring Bell'), ('Mail Room','Mail Room'), ('Office','Office'), ('Reception','Reception'), ('In/At Mailbox','In/At Mailbox'), ('Other','Other'), ],'Package Location'), 'usps_first_class_mail_type' : fields.selection(_get_first_class_mail_type_usps, 'First Class Mail Type', size=50), 'usps_container' : fields.selection(_get_container_usps,'Container', size=100), 'usps_size' : fields.selection(_get_size_usps,'Size'),
class accounting_report(osv.osv_memory): _name = "accounting.report" _inherit = "account.common.report" _description = "Accounting Report" _columns = { 'enable_filter': fields.boolean('Enable Comparison'), 'account_report_id': fields.many2one('account.financial.report', 'Account Reports', required=True), 'label_filter': fields.char( 'Column Label', help= "This label will be displayed on report to show the balance computed for the given comparison filter." ), 'fiscalyear_id_cmp': fields.many2one('account.fiscalyear', 'Fiscal Year', help='Keep empty for all open fiscal year'), 'filter_cmp': fields.selection([('filter_no', 'No Filters'), ('filter_date', 'Date'), ('filter_period', 'Periods')], "Filter by", required=True), 'period_from_cmp': fields.many2one('account.period', 'Start Period'), 'period_to_cmp': fields.many2one('account.period', 'End Period'), 'date_from_cmp': fields.date("Start Date"), 'date_to_cmp': fields.date("End Date"), 'debit_credit': fields.boolean( 'Display Debit/Credit Columns', help= "This option allows you to get more details about the way your balances are computed. Because it is space consuming, we do not allow to use it while doing a comparison." ), } def _get_account_report(self, cr, uid, context=None): # TODO deprecate this it doesnt work in web menu_obj = self.pool.get('ir.ui.menu') report_obj = self.pool.get('account.financial.report') report_ids = [] if context.get('active_id'): menu = menu_obj.browse(cr, uid, context.get('active_id')).name report_ids = report_obj.search(cr, uid, [('name', 'ilike', menu)]) return report_ids and report_ids[0] or False _defaults = { 'filter_cmp': 'filter_no', 'target_move': 'posted', 'account_report_id': _get_account_report, } def _build_comparison_context(self, cr, uid, ids, data, context=None): if context is None: context = {} result = {} result['fiscalyear'] = 'fiscalyear_id_cmp' in data['form'] and data[ 'form']['fiscalyear_id_cmp'] or False result['journal_ids'] = 'journal_ids' in data['form'] and data['form'][ 'journal_ids'] or False result['chart_account_id'] = 'chart_account_id' in data[ 'form'] and data['form']['chart_account_id'] or False result['state'] = 'target_move' in data['form'] and data['form'][ 'target_move'] or '' if data['form']['filter_cmp'] == 'filter_date': result['date_from'] = data['form']['date_from_cmp'] result['date_to'] = data['form']['date_to_cmp'] elif data['form']['filter_cmp'] == 'filter_period': if not data['form']['period_from_cmp'] or not data['form'][ 'period_to_cmp']: raise osv.except_osv( _('Error!'), _('Select a starting and an ending period')) result['period_from'] = data['form']['period_from_cmp'] result['period_to'] = data['form']['period_to_cmp'] return result def check_report(self, cr, uid, ids, context=None): if context is None: context = {} res = super(accounting_report, self).check_report(cr, uid, ids, context=context) data = {} data['form'] = self.read( cr, uid, ids, [ 'account_report_id', 'date_from_cmp', 'date_to_cmp', 'fiscalyear_id_cmp', 'journal_ids', 'period_from_cmp', 'period_to_cmp', 'filter_cmp', 'chart_account_id', 'target_move' ], context=context)[0] for field in [ 'fiscalyear_id_cmp', 'chart_account_id', 'period_from_cmp', 'period_to_cmp', 'account_report_id' ]: if isinstance(data['form'][field], tuple): data['form'][field] = data['form'][field][0] comparison_context = self._build_comparison_context(cr, uid, ids, data, context=context) res['data']['form']['comparison_context'] = comparison_context return res def _print_report(self, cr, uid, ids, data, context=None): data['form'].update( self.read(cr, uid, ids, [ 'date_from_cmp', 'debit_credit', 'date_to_cmp', 'fiscalyear_id_cmp', 'period_from_cmp', 'period_to_cmp', 'filter_cmp', 'account_report_id', 'enable_filter', 'label_filter', 'target_move' ], context=context)[0]) return self.pool['report'].get_action(cr, uid, [], 'account.report_financial', data=data, context=context)
class wkf_activity(osv.osv): _name = "workflow.activity" _table = "wkf_activity" _order = "name" _columns = { 'name': fields.char('Name', required=True), 'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'), 'split_mode': fields.selection([('XOR', 'Xor'), ('OR', 'Or'), ('AND', 'And')], 'Split Mode', size=3, required=True), 'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True), 'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', required=True), 'action': fields.text('Python Action'), 'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'), 'flow_start': fields.boolean('Flow Start'), 'flow_stop': fields.boolean('Flow Stop'), 'subflow_id': fields.many2one('workflow', 'Subflow'), 'signal_send': fields.char('Signal (subflow.*)'), 'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing Transitions'), 'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming Transitions'), } _defaults = { 'kind': lambda *a: 'dummy', 'join_mode': lambda *a: 'XOR', 'split_mode': lambda *a: 'XOR', } def unlink(self, cr, uid, ids, context=None): if context is None: context = {} if not context.get('_force_unlink') and self.pool.get( 'workflow.workitem').search(cr, uid, [('act_id', 'in', ids)]): raise osv.except_osv( _('Operation Forbidden'), _('Please make sure no workitems refer to an activity before deleting it!' )) super(wkf_activity, self).unlink(cr, uid, ids, context=context)
class survey(osv.osv): _name = 'survey' _description = 'Survey' _rec_name = 'title' def default_get(self, cr, uid, fields, context=None): data = super(survey, self).default_get(cr, uid, fields, context) return data _columns = { 'id': fields.integer('ID'), 'title': fields.char('Survey Title', size=128, required=1), 'page_ids': fields.one2many('survey.page', 'survey_id', 'Page'), 'date_open': fields.datetime('Survey Open Date', readonly=1), 'date_close': fields.datetime('Survey Close Date', readonly=1), 'max_response_limit': fields.integer('Maximum Answer Limit', help="Set to one if survey is answerable only once"), 'response_user': fields.integer( 'Maximum Answer per User', help="Set to one if you require only one Answer per user"), 'state': fields.selection([('open', 'Open'), ('cancel', 'Cancelled'), ('close', 'Closed')], 'Status', readonly=True), 'responsible_id': fields.many2one('res.users', 'Responsible', help="User responsible for survey"), 'tot_start_survey': fields.integer("Total Started Survey", readonly=1), 'tot_comp_survey': fields.integer("Total Completed Survey", readonly=1), 'note': fields.text('Description', size=128), 'history': fields.one2many('survey.history', 'survey_id', 'History Lines', readonly=True), 'users': fields.many2many('res.users', 'survey_users_rel', 'sid', 'uid', 'Users'), 'send_response': fields.boolean('Email Notification on Answer'), 'type': fields.many2one('survey.type', 'Type'), 'color': fields.integer('Color Index'), 'invited_user_ids': fields.many2many('res.users', 'survey_invited_user_rel', 'sid', 'uid', 'Invited User'), } _defaults = { 'state': lambda *a: "open", 'tot_start_survey': lambda *a: 0, 'tot_comp_survey': lambda *a: 0, 'send_response': lambda *a: 1, 'response_user': lambda *a: 1, 'date_open': fields.datetime.now, } def survey_open(self, cr, uid, ids, arg): self.write(cr, uid, ids, { 'state': 'open', 'date_open': strftime("%Y-%m-%d %H:%M:%S") }) return True def survey_close(self, cr, uid, ids, arg): self.write(cr, uid, ids, { 'state': 'close', 'date_close': strftime("%Y-%m-%d %H:%M:%S") }) return True def survey_cancel(self, cr, uid, ids, arg): self.write(cr, uid, ids, {'state': 'cancel'}) return True def copy(self, cr, uid, ids, default=None, context=None): vals = {} current_rec = self.read(cr, uid, ids, context=context) title = _("%s (copy)") % (current_rec.get('title')) vals.update({'title': title}) vals.update({ 'history': [], 'tot_start_survey': 0, 'tot_comp_survey': 0 }) return super(survey, self).copy(cr, uid, ids, vals, context=context) def action_print_survey(self, cr, uid, ids, context=None): """ If response is available then print this response otherwise print survey form(print template of the survey). @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Survey IDs @param context: A standard dictionary for contextual values @return : Dictionary value for print survey form. """ if context is None: context = {} datas = {} if 'response_id' in context: response_id = context.get('response_id', 0) datas['ids'] = [context.get('survey_id', 0)] else: response_id = self.pool.get('survey.response').search( cr, uid, [('survey_id', '=', ids)], context=context) datas['ids'] = ids page_setting = { 'orientation': 'vertical', 'without_pagebreak': 0, 'paper_size': 'letter', 'page_number': 1, 'survey_title': 1 } report = {} if response_id and response_id[0]: context.update({'survey_id': datas['ids']}) datas['form'] = page_setting datas['model'] = 'survey.print.answer' report = { 'type': 'ir.actions.report.xml', 'report_name': 'survey.browse.response', 'datas': datas, 'context': context, 'nodestroy': True, } else: datas['form'] = page_setting datas['model'] = 'survey.print' report = { 'type': 'ir.actions.report.xml', 'report_name': 'survey.form', 'datas': datas, 'context': context, 'nodestroy': True, } return report def fill_survey(self, cr, uid, ids, context=None): sur_obj = self.read(cr, uid, ids, ['title', 'page_ids'], context=context) for sur in sur_obj: name = sur['title'] pages = sur['page_ids'] if not pages: raise osv.except_osv( _('Warning!'), _('This survey has no question defined. Please define the questions and answers first.' )) context.update({'active': False, 'survey_id': ids[0]}) return { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'survey.question.wiz', 'type': 'ir.actions.act_window', 'target': 'new', 'name': name, 'context': context } def test_survey(self, cr, uid, ids, context=None): sur_obj = self.read(cr, uid, ids, ['title', 'page_ids'], context=context) for sur in sur_obj: name = sur['title'] pages = sur['page_ids'] if not pages: raise osv.except_osv( _('Warning!'), _('This survey has no pages defined. Please define pages first.' )) context.update({'active': False, 'survey_id': ids[0]}) return { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'survey.question.wiz', 'type': 'ir.actions.act_window', 'target': 'new', 'name': name, 'context': context } def edit_survey(self, cr, uid, ids, context=None): sur_obj = self.read(cr, uid, ids, ['title', 'page_ids'], context=context) for sur in sur_obj: name = sur['title'] pages = sur['page_ids'] if not pages: raise osv.except_osv( _('Warning!'), _('This survey has no question defined. Please define the questions and answers first.' )) context.update({'survey_id': ids[0]}) return { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'survey.question.wiz', 'type': 'ir.actions.act_window', 'target': 'new', 'name': name, 'context': context }
class account_vat_period_end_statement(orm.Model): def _compute_authority_vat_amount(self, cr, uid, ids, field_name, arg, context): res = {} for i in ids: statement = self.browse(cr, uid, i) debit_vat_amount = 0.0 credit_vat_amount = 0.0 generic_vat_amount = 0.0 for debit_line in statement.debit_vat_account_line_ids: debit_vat_amount += debit_line.amount for credit_line in statement.credit_vat_account_line_ids: credit_vat_amount += credit_line.amount for generic_line in statement.generic_vat_account_line_ids: generic_vat_amount += generic_line.amount authority_amount = (debit_vat_amount - credit_vat_amount - generic_vat_amount - statement.previous_credit_vat_amount + statement.previous_debit_vat_amount) res[i] = authority_amount return res def _compute_payable_vat_amount(self, cr, uid, ids, field_name, arg, context): res = {} for i in ids: statement = self.browse(cr, uid, i) debit_vat_amount = 0.0 for debit_line in statement.debit_vat_account_line_ids: debit_vat_amount += debit_line.amount res[i] = debit_vat_amount return res def _compute_deductible_vat_amount(self, cr, uid, ids, field_name, arg, context): res = {} for i in ids: statement = self.browse(cr, uid, i) credit_vat_amount = 0.0 for credit_line in statement.credit_vat_account_line_ids: credit_vat_amount += credit_line.amount res[i] = credit_vat_amount return res # Workflow stuff ################# def _reconciled(self, cr, uid, ids, name, args, context=None): res = {} for id in ids: res[id] = self.test_paid(cr, uid, [id]) return res def move_line_id_payment_gets(self, cr, uid, ids, *args): res = {} if not ids: return res cr.execute('SELECT statement.id, l.id '\ 'FROM account_move_line l '\ 'LEFT JOIN account_vat_period_end_statement statement ON (statement.move_id=l.move_id) '\ 'WHERE statement.id IN %s '\ 'AND l.account_id=statement.authority_vat_account_id', (tuple(ids),)) for r in cr.fetchall(): res.setdefault(r[0], []) res[r[0]].append(r[1]) return res # return the ids of the move lines which has the same account than the statement # whose id is in ids def move_line_id_payment_get(self, cr, uid, ids, *args): if not ids: return [] result = self.move_line_id_payment_gets(cr, uid, ids, *args) return result.get(ids[0], []) def test_paid(self, cr, uid, ids, *args): res = self.move_line_id_payment_get(cr, uid, ids) if not res: return False ok = True for id in res: cr.execute( 'select reconcile_id from account_move_line where id=%s', (id, )) ok = ok and bool(cr.fetchone()[0]) return ok def _get_statement_from_line(self, cr, uid, ids, context=None): move = {} for line in self.pool.get('account.move.line').browse(cr, uid, ids, context=context): if line.reconcile_partial_id: for line2 in line.reconcile_partial_id.line_partial_ids: move[line2.move_id.id] = True if line.reconcile_id: for line2 in line.reconcile_id.line_id: move[line2.move_id.id] = True statement_ids = [] if move: statement_ids = self.pool.get( 'account.vat.period.end.statement').search( cr, uid, [('move_id', 'in', move.keys())], context=context) return statement_ids def _get_statement_from_move(self, cr, uid, ids, context=None): move = {} statement_ids = [] for move in self.pool.get('account.move').browse(cr, uid, ids, context=context): found_ids = self.pool.get( 'account.vat.period.end.statement').search( cr, uid, [('move_id', '=', move.id)], context=context) for found_id in found_ids: if found_id not in statement_ids: statement_ids.append(found_id) return statement_ids def _get_statement_from_reconcile(self, cr, uid, ids, context=None): move = {} for r in self.pool.get('account.move.reconcile').browse( cr, uid, ids, context=context): for line in r.line_partial_ids: move[line.move_id.id] = True for line in r.line_id: move[line.move_id.id] = True statement_ids = [] if move: statement_ids = self.pool.get( 'account.vat.period.end.statement').search( cr, uid, [('move_id', 'in', move.keys())], context=context) return statement_ids def _get_credit_line(self, cr, uid, ids, context=None): result = {} for line in self.pool.get('statement.credit.account.line').browse( cr, uid, ids, context=context): result[line.statement_id.id] = True return result.keys() def _get_debit_line(self, cr, uid, ids, context=None): result = {} for line in self.pool.get('statement.debit.account.line').browse( cr, uid, ids, context=context): result[line.statement_id.id] = True return result.keys() def _get_generic_line(self, cr, uid, ids, context=None): result = {} for line in self.pool.get('statement.generic.account.line').browse( cr, uid, ids, context=context): result[line.statement_id.id] = True return result.keys() def _amount_residual(self, cr, uid, ids, name, args, context=None): result = {} for statement in self.browse(cr, uid, ids, context=context): result[statement.id] = 0.0 if statement.move_id: for m in statement.move_id.line_id: if m.account_id.type in ('receivable', 'payable'): result[statement.id] += m.amount_residual_currency return result def _compute_lines(self, cr, uid, ids, name, args, context=None): result = {} for statement in self.browse(cr, uid, ids, context=context): src = [] lines = [] if statement.move_id: for m in statement.move_id.line_id: temp_lines = [] if m.reconcile_id: temp_lines = map(lambda x: x.id, m.reconcile_id.line_id) elif m.reconcile_partial_id: temp_lines = map( lambda x: x.id, m.reconcile_partial_id.line_partial_ids) lines += [x for x in temp_lines if x not in lines] src.append(m.id) lines = filter(lambda x: x not in src, lines) result[statement.id] = lines return result _name = "account.vat.period.end.statement" _rec_name = 'date' _columns = { 'debit_vat_account_line_ids': fields.one2many( 'statement.debit.account.line', 'statement_id', 'Debit VAT', help='The accounts containing the debit VAT amount to write-off', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'credit_vat_account_line_ids': fields.one2many( 'statement.credit.account.line', 'statement_id', 'Credit VAT', help='The accounts containing the credit VAT amount to write-off', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'previous_credit_vat_account_id': fields.many2one('account.account', 'Previous Credits VAT', help='Credit VAT from previous periods', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'previous_credit_vat_amount': fields.float('Previous Credits VAT Amount', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }, digits_compute=dp.get_precision('Account')), 'previous_debit_vat_account_id': fields.many2one('account.account', 'Previous Debits VAT', help='Debit VAT from previous periods', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'previous_debit_vat_amount': fields.float('Previous Debits VAT Amount', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }, digits_compute=dp.get_precision('Account')), 'generic_vat_account_line_ids': fields.one2many('statement.generic.account.line', 'statement_id', 'Other VAT Credits / Debits or Tax Compensations', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'authority_partner_id': fields.many2one('res.partner', 'Tax Authority Partner', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'authority_vat_account_id': fields.many2one('account.account', 'Tax Authority VAT Account', required=True, states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'authority_vat_amount': fields.function(_compute_authority_vat_amount, method=True, string='Authority VAT Amount'), 'payable_vat_amount': fields.function(_compute_payable_vat_amount, method=True, string='Payable VAT Amount'), 'deductible_vat_amount': fields.function(_compute_deductible_vat_amount, method=True, string='Deductible VAT Amount'), 'journal_id': fields.many2one('account.journal', 'Journal', required=True, states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'date': fields.date('Date', required=True, states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'move_id': fields.many2one('account.move', 'VAT statement move', readonly=True), #'voucher_id': fields.many2one('account.voucher', 'VAT payment', readonly=True), 'state': fields.selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('paid', 'Paid'), ], 'State', readonly=True), 'payment_term_id': fields.many2one('account.payment.term', 'Payment Term', states={ 'confirmed': [('readonly', True)], 'paid': [('readonly', True)], 'draft': [('readonly', False)] }), 'reconciled': fields.function( _reconciled, string='Paid/Reconciled', type='boolean', store={ 'account.vat.period.end.statement': (lambda self, cr, uid, ids, c={}: ids, None, 50), 'account.move.line': (_get_statement_from_line, None, 50), 'account.move.reconcile': (_get_statement_from_reconcile, None, 50), }, help= "It indicates that the statement has been paid and the journal entry of the statement has been reconciled with one or several journal entries of payment." ), 'residual': fields.function( _amount_residual, digits_compute=dp.get_precision('Account'), string='Balance', store={ 'account.vat.period.end.statement': (lambda self, cr, uid, ids, c={}: ids, [ 'debit_vat_account_line_ids', 'credit_vat_account_line_ids', 'generic_vat_account_line_ids', 'move_id', 'state' ], 50), 'statement.credit.account.line': (_get_credit_line, ['amount', 'statement_id'], 50), 'statement.debit.account.line': (_get_debit_line, ['amount', 'statement_id'], 50), 'statement.generic.account.line': (_get_generic_line, ['amount', 'statement_id'], 50), 'account.move': (_get_statement_from_move, None, 50), 'account.move.line': (_get_statement_from_line, None, 50), 'account.move.reconcile': (_get_statement_from_reconcile, None, 50), }, help="Remaining amount due."), 'payment_ids': fields.function(_compute_lines, relation='account.move.line', type="many2many", string='Payments'), 'period_ids': fields.one2many('account.period', 'vat_statement_id', 'Periods'), } _defaults = { 'date': fields.date.context_today, } def _get_tax_code_amount(self, cr, uid, tax_code_id, period_id, context): if not context: context = {} context['period_id'] = period_id return self.pool.get('account.tax.code').browse( cr, uid, tax_code_id, context)._sum_period(None, None, context)[tax_code_id] def unlink(self, cr, uid, ids, context=None): if isinstance(ids, (long, int)): ids = [ids] for statement in self.browse(cr, uid, ids, context): if statement.state == 'confirmed' or statement.state == 'paid': raise orm.except_orm( _('Error!'), _('You cannot delete a confirmed or paid statement')) res = super(account_vat_period_end_statement, self).unlink(cr, uid, ids, context) return res def statement_draft(self, cr, uid, ids, context=None): for statement in self.browse(cr, uid, ids, context): if statement.move_id: statement.move_id.unlink() ''' if statement.voucher_id: statement.voucher_id.unlink() ''' self.write(cr, uid, ids, {'state': 'draft'}) def statement_paid(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'paid'}) def create_move(self, cr, uid, ids, context=None): move_obj = self.pool.get('account.move') term_pool = self.pool.get('account.payment.term') line_obj = self.pool.get('account.move.line') period_obj = self.pool.get('account.period') for statement in self.browse(cr, uid, ids, context): period_ids = period_obj.find(cr, uid, dt=statement.date, context=context) if len(period_ids) != 1: raise orm.except_orm( _('Encoding error'), _('No period found or more than one period found for the given date.' )) move_data = { 'name': _('VAT statement') + ' - ' + statement.date, 'date': statement.date, 'journal_id': statement.journal_id.id, 'period_id': period_ids[0], } move_id = move_obj.create(cr, uid, move_data) statement.write({'move_id': move_id}) for debit_line in statement.debit_vat_account_line_ids: debit_vat_data = { 'name': _('Debit VAT'), 'account_id': debit_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement.date, 'period_id': period_ids[0], } if debit_line.amount > 0: debit_vat_data['debit'] = math.fabs(debit_line.amount) else: debit_vat_data['credit'] = math.fabs(debit_line.amount) line_obj.create(cr, uid, debit_vat_data) for credit_line in statement.credit_vat_account_line_ids: credit_vat_data = { 'name': _('Credit VAT'), 'account_id': credit_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement.date, 'period_id': period_ids[0], } if credit_line.amount < 0: credit_vat_data['debit'] = math.fabs(credit_line.amount) else: credit_vat_data['credit'] = math.fabs(credit_line.amount) line_obj.create(cr, uid, credit_vat_data) if statement.previous_credit_vat_amount: previous_credit_vat_data = { 'name': _('Previous Credits VAT'), 'account_id': statement.previous_credit_vat_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement.date, 'period_id': period_ids[0], } if statement.previous_credit_vat_amount < 0: previous_credit_vat_data['debit'] = math.fabs( statement.previous_credit_vat_amount) else: previous_credit_vat_data['credit'] = math.fabs( statement.previous_credit_vat_amount) line_obj.create(cr, uid, previous_credit_vat_data) if statement.previous_debit_vat_amount: previous_debit_vat_data = { 'name': _('Previous Debits VAT'), 'account_id': statement.previous_debit_vat_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement.date, 'period_id': period_ids[0], } if statement.previous_debit_vat_amount > 0: previous_debit_vat_data['debit'] = math.fabs( statement.previous_debit_vat_amount) else: previous_debit_vat_data['credit'] = math.fabs( statement.previous_debit_vat_amount) line_obj.create(cr, uid, previous_debit_vat_data) for generic_line in statement.generic_vat_account_line_ids: generic_vat_data = { 'name': _('Other VAT Credits / Debits'), 'account_id': generic_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement.date, 'period_id': period_ids[0], } if generic_line.amount < 0: generic_vat_data['debit'] = math.fabs(generic_line.amount) else: generic_vat_data['credit'] = math.fabs(generic_line.amount) line_obj.create(cr, uid, generic_vat_data) end_debit_vat_data = { 'name': _('Tax Authority VAT'), 'account_id': statement.authority_vat_account_id.id, 'partner_id': statement.authority_partner_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'date': statement.date, 'period_id': period_ids[0], } if statement.authority_vat_amount > 0: end_debit_vat_data['debit'] = 0.0 end_debit_vat_data['credit'] = math.fabs( statement.authority_vat_amount) if statement.payment_term_id: due_list = term_pool.compute( cr, uid, statement.payment_term_id.id, math.fabs(statement.authority_vat_amount), date_ref=statement.date, context=context) if len(due_list) == 0: raise orm.except_orm( _('Error'), _('The payment term %s does not have due dates') % statement.payment_term_id.name) for term in due_list: current_line = end_debit_vat_data current_line['credit'] = term[1] current_line['date_maturity'] = term[0] line_obj.create(cr, uid, current_line) else: line_obj.create(cr, uid, end_debit_vat_data) elif statement.authority_vat_amount < 0: end_debit_vat_data['debit'] = math.fabs( statement.authority_vat_amount) end_debit_vat_data['credit'] = 0.0 line_obj.create(cr, uid, end_debit_vat_data) self.write(cr, uid, statement.id, {'state': 'confirmed'}) return True """ def open_chart_of_taxes(self, cr, uid, ids, context=None): result = {} if context is None: context = {} for statement in self.browse(cr, uid, ids, context): mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') period_obj = self.pool.get('account.period') period_ids = period_obj.find(cr, uid, dt=statement.date, context=context) if len(period_ids)> 1: raise orm.except_orm(_('Error'), _('Too many periods for date %s') % str(statement.date)) period = period_obj.browse(cr, uid, period_ids[0], context) result = mod_obj.get_object_reference(cr, uid, 'account', 'action_tax_code_tree') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context=context)[0] fiscalyear_id = period.fiscalyear_id.id result['context'] = str({'period_id': period.id, \ 'fiscalyear_id': fiscalyear_id, \ 'state': 'posted'}) period_code = period.code result['name'] += period_code and (':' + period_code) or '' result['nodestroy'] = True return result """ def compute_amounts(self, cr, uid, ids, context=None): debit_line_pool = self.pool.get('statement.debit.account.line') credit_line_pool = self.pool.get('statement.credit.account.line') for statement in self.browse(cr, uid, ids, context): statement.write({'previous_debit_vat_amount': 0.0}) prev_statement_ids = self.search(cr, uid, [('date', '<', statement.date)], order='date') if prev_statement_ids: prev_statement = self.browse( cr, uid, prev_statement_ids[len(prev_statement_ids) - 1], context) if prev_statement.residual > 0 and prev_statement.authority_vat_amount > 0: statement.write( {'previous_debit_vat_amount': prev_statement.residual}) elif prev_statement.authority_vat_amount < 0: statement.write({ 'previous_credit_vat_amount': -prev_statement.authority_vat_amount }) credit_line_ids = [] debit_line_ids = [] tax_code_pool = self.pool.get('account.tax.code') debit_tax_code_ids = tax_code_pool.search( cr, uid, [ ('vat_statement_account_id', '!=', False), ('vat_statement_type', '=', 'debit'), ], context=context) for debit_tax_code_id in debit_tax_code_ids: debit_tax_code = tax_code_pool.browse(cr, uid, debit_tax_code_id, context) total = 0.0 for period in statement.period_ids: context['period_id'] = period.id total += tax_code_pool.browse(cr, uid, debit_tax_code_id, context).sum_period debit_line_ids.append({ 'account_id': debit_tax_code.vat_statement_account_id.id, 'tax_code_id': debit_tax_code.id, 'amount': total * debit_tax_code.vat_statement_sign, }) credit_tax_code_ids = tax_code_pool.search( cr, uid, [ ('vat_statement_account_id', '!=', False), ('vat_statement_type', '=', 'credit'), ], context=context) for credit_tax_code_id in credit_tax_code_ids: credit_tax_code = tax_code_pool.browse(cr, uid, credit_tax_code_id, context) total = 0.0 for period in statement.period_ids: context['period_id'] = period.id total += tax_code_pool.browse(cr, uid, credit_tax_code_id, context).sum_period credit_line_ids.append({ 'account_id': credit_tax_code.vat_statement_account_id.id, 'tax_code_id': credit_tax_code.id, 'amount': total * credit_tax_code.vat_statement_sign, }) for debit_line in statement.debit_vat_account_line_ids: debit_line.unlink() for credit_line in statement.credit_vat_account_line_ids: credit_line.unlink() for debit_vals in debit_line_ids: debit_vals.update({'statement_id': statement.id}) debit_line_pool.create(cr, uid, debit_vals, context=context) for credit_vals in credit_line_ids: credit_vals.update({'statement_id': statement.id}) credit_line_pool.create(cr, uid, credit_vals, context=context) return True def on_change_partner_id(self, cr, uid, ids, partner_id, context=None): partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context) return { 'value': { 'authority_vat_account_id': partner.property_account_payable.id } }
class survey_question(osv.osv): _name = 'survey.question' _description = 'Survey Question' _rec_name = 'question' _order = 'sequence' def _calc_response(self, cr, uid, ids, field_name, arg, context=None): if len(ids) == 0: return {} val = {} cr.execute( "select question_id, count(id) as Total_response from \ survey_response_line where state='done' and question_id IN %s\ group by question_id", (tuple(ids), )) ids1 = copy.deepcopy(ids) for rec in cr.fetchall(): ids1.remove(rec[0]) val[rec[0]] = int(rec[1]) for id in ids1: val[id] = 0 return val _columns = { 'page_id': fields.many2one('survey.page', 'Survey Page', ondelete='cascade', required=1), 'question': fields.char('Question', size=128, required=1), 'answer_choice_ids': fields.one2many('survey.answer', 'question_id', 'Answer'), 'is_require_answer': fields.boolean('Require Answer to Question'), 'required_type': fields.selection([('all','All'), ('at least','At Least'), ('at most','At Most'), ('exactly','Exactly'), ('a range','A Range')], 'Respondent must answer'), 'req_ans': fields.integer('#Required Answer'), 'maximum_req_ans': fields.integer('Maximum Required Answer'), 'minimum_req_ans': fields.integer('Minimum Required Answer'), 'req_error_msg': fields.text('Error Message'), 'allow_comment': fields.boolean('Allow Comment Field'), 'sequence': fields.integer('Sequence'), 'tot_resp': fields.function(_calc_response, string="Total Answer"), 'survey': fields.related('page_id', 'survey_id', type='many2one', relation='survey', string='Survey'), 'descriptive_text': fields.text('Descriptive Text', size=255), 'column_heading_ids': fields.one2many('survey.question.column.heading', 'question_id',' Column heading'), 'type': fields.selection([('multiple_choice_only_one_ans','Multiple Choice (Only One Answer)'), ('multiple_choice_multiple_ans','Multiple Choice (Multiple Answer)'), ('matrix_of_choices_only_one_ans','Matrix of Choices (Only One Answers Per Row)'), ('matrix_of_choices_only_multi_ans','Matrix of Choices (Multiple Answers Per Row)'), ('matrix_of_drop_down_menus','Matrix of Drop-down Menus'), ('rating_scale','Rating Scale'),('single_textbox','Single Textbox'), ('multiple_textboxes','Multiple Textboxes'), ('multiple_textboxes_diff_type','Multiple Textboxes With Different Type'), ('comment','Comment/Essay Box'), ('numerical_textboxes','Numerical Textboxes'),('date','Date'), ('date_and_time','Date and Time'),('descriptive_text','Descriptive Text'), ('table','Table'), ], 'Question Type', required=1,), 'is_comment_require': fields.boolean('Add Comment Field'), 'comment_label': fields.char('Field Label', size = 255), 'comment_field_type': fields.selection([('char', 'Single Line Of Text'), ('text', 'Paragraph of Text')], 'Comment Field Type'), 'comment_valid_type': fields.selection([('do_not_validate', '''Don't Validate Comment Text.'''), ('must_be_specific_length', 'Must Be Specific Length'), ('must_be_whole_number', 'Must Be A Whole Number'), ('must_be_decimal_number', 'Must Be A Decimal Number'), ('must_be_date', 'Must Be A Date'), ('must_be_email_address', 'Must Be An Email Address'), ], 'Text Validation'), 'comment_minimum_no': fields.integer('Minimum number'), 'comment_maximum_no': fields.integer('Maximum number'), 'comment_minimum_float': fields.float('Minimum decimal number'), 'comment_maximum_float': fields.float('Maximum decimal number'), 'comment_minimum_date': fields.date('Minimum date'), 'comment_maximum_date': fields.date('Maximum date'), 'comment_valid_err_msg': fields.text('Error message'), 'make_comment_field': fields.boolean('Make Comment Field an Answer Choice'), 'make_comment_field_err_msg': fields.text('Error message'), 'is_validation_require': fields.boolean('Validate Text'), 'validation_type': fields.selection([('do_not_validate', '''Don't Validate Comment Text.'''),\ ('must_be_specific_length', 'Must Be Specific Length'),\ ('must_be_whole_number', 'Must Be A Whole Number'),\ ('must_be_decimal_number', 'Must Be A Decimal Number'),\ ('must_be_date', 'Must Be A Date'),\ ('must_be_email_address', 'Must Be An Email Address')\ ], 'Text Validation'), 'validation_minimum_no': fields.integer('Minimum number'), 'validation_maximum_no': fields.integer('Maximum number'), 'validation_minimum_float': fields.float('Minimum decimal number'), 'validation_maximum_float': fields.float('Maximum decimal number'), 'validation_minimum_date': fields.date('Minimum date'), 'validation_maximum_date': fields.date('Maximum date'), 'validation_valid_err_msg': fields.text('Error message'), 'numeric_required_sum': fields.integer('Sum of all choices'), 'numeric_required_sum_err_msg': fields.text('Error message'), 'rating_allow_one_column_require': fields.boolean('Allow Only One Answer per Column (Forced Ranking)'), 'in_visible_rating_weight': fields.boolean('Is Rating Scale Invisible?'), 'in_visible_menu_choice': fields.boolean('Is Menu Choice Invisible?'), 'in_visible_answer_type': fields.boolean('Is Answer Type Invisible?'), 'comment_column': fields.boolean('Add comment column in matrix'), 'column_name': fields.char('Column Name',size=256), 'no_of_rows': fields.integer('No of Rows'), } _defaults = { 'sequence': lambda *a: 1, 'type': lambda *a: 'multiple_choice_multiple_ans', 'req_error_msg': lambda *a: 'This question requires an answer.', 'required_type': lambda *a: 'at least', 'req_ans': lambda *a: 1, 'comment_field_type': lambda *a: 'char', 'comment_label': lambda *a: 'Other (please specify)', 'comment_valid_type': lambda *a: 'do_not_validate', 'comment_valid_err_msg': lambda *a: 'The comment you entered is in an invalid format.', 'validation_type': lambda *a: 'do_not_validate', 'validation_valid_err_msg': lambda *a: 'The comment you entered is in an invalid format.', 'numeric_required_sum_err_msg': lambda *a: 'The choices need to add up to [enter sum here].', 'make_comment_field_err_msg': lambda *a: 'Please enter a comment.', 'in_visible_answer_type': lambda *a: 1 } def on_change_type(self, cr, uid, ids, type, context=None): val = {} val['is_require_answer'] = False val['is_comment_require'] = False val['is_validation_require'] = False val['comment_column'] = False if type in ['multiple_textboxes_diff_type']: val['in_visible_answer_type'] = False return {'value': val} if type in ['rating_scale']: val.update({ 'in_visible_rating_weight': False, 'in_visible_menu_choice': True }) return {'value': val} elif type in ['matrix_of_drop_down_menus']: val.update({ 'in_visible_rating_weight': True, 'in_visible_menu_choice': False }) return {'value': val} elif type in ['single_textbox']: val.update({ 'in_visible_rating_weight': True, 'in_visible_menu_choice': True }) return {'value': val} else: val.update({'in_visible_rating_weight':True, 'in_visible_menu_choice':True,\ 'in_visible_answer_type':True}) return {'value': val} def write(self, cr, uid, ids, vals, context=None): questions = self.read(cr,uid, ids, ['answer_choice_ids', 'type', 'required_type',\ 'req_ans', 'minimum_req_ans', 'maximum_req_ans', 'column_heading_ids', 'page_id', 'question']) for question in questions: col_len = len(question['column_heading_ids']) if vals.has_key('column_heading_ids'): for col in vals['column_heading_ids']: if type(col[2]) == type({}): col_len += 1 else: col_len -= 1 if vals.has_key('type'): que_type = vals['type'] else: que_type = question['type'] if que_type in ['matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans',\ 'matrix_of_drop_down_menus', 'rating_scale']: if not col_len: raise osv.except_osv( _('Warning!'), _('You must enter one or more column headings for question "%s" of page %s.' ) % (question['question'], question['page_id'][1])) ans_len = len(question['answer_choice_ids']) if vals.has_key('answer_choice_ids'): for ans in vals['answer_choice_ids']: if type(ans[2]) == type({}): ans_len += 1 else: ans_len -= 1 if que_type not in [ 'descriptive_text', 'single_textbox', 'comment', 'table' ]: if not ans_len: raise osv.except_osv( _('Warning!'), _('You must enter one or more Answers for question "%s" of page %s.' ) % (question['question'], question['page_id'][1])) req_type = "" if vals.has_key('required_type'): req_type = vals['required_type'] else: req_type = question['required_type'] if que_type in ['multiple_choice_multiple_ans','matrix_of_choices_only_one_ans', \ 'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus',\ 'rating_scale','multiple_textboxes','numerical_textboxes','date','date_and_time']: if req_type in ['at least', 'at most', 'exactly']: if vals.has_key('req_ans'): if not vals['req_ans'] or vals['req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("#Required Answer you entered \ is greater than the number of answer. \ Please use a number that is smaller than %d." ) % (ans_len + 1)) else: if not question[ 'req_ans'] or question['req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("#Required Answer you entered is \ greater than the number of answer.\ Please use a number that is smaller than %d." ) % (ans_len + 1)) if req_type == 'a range': minimum_ans = 0 maximum_ans = 0 if vals.has_key('minimum_req_ans'): minimum_ans = vals['minimum_req_ans'] if not vals['minimum_req_ans'] or vals[ 'minimum_req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("Minimum Required Answer\ you entered is greater than the number of answer.\ Please use a number that is smaller than %d." ) % (ans_len + 1)) else: minimum_ans = question['minimum_req_ans'] if not question['minimum_req_ans'] or question[ 'minimum_req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("Minimum Required Answer you\ entered is greater than the number of answer. \ Please use a number that is smaller than %d." ) % (ans_len + 1)) if vals.has_key('maximum_req_ans'): maximum_ans = vals['maximum_req_ans'] if not vals['maximum_req_ans'] or vals[ 'maximum_req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("Maximum Required Answer you \ entered for your maximum is greater than the number of answer.\ Please use a number that is smaller than %d." ) % (ans_len + 1)) else: maximum_ans = question['maximum_req_ans'] if not question['maximum_req_ans'] or question[ 'maximum_req_ans'] > ans_len: raise osv.except_osv( _('Warning!'), _("Maximum Required Answer you\ entered for your maximum is greater than the number of answer.\ Please use a number that is smaller than %d." ) % (ans_len + 1)) if maximum_ans <= minimum_ans: raise osv.except_osv( _('Warning!'), _("Maximum Required Answer is greater \ than Minimum Required Answer")) if question['type'] == 'matrix_of_drop_down_menus' and vals.has_key( 'column_heading_ids'): for col in vals['column_heading_ids']: if not col[2] or not col[2].has_key( 'menu_choice') or not col[2]['menu_choice']: raise osv.except_osv( _('Warning!'), _("You must enter one or more menu choices\ in column heading.")) elif not col[2] or not col[2].has_key('menu_choice') or\ col[2]['menu_choice'].strip() == '': raise osv.except_osv( _('Warning!'), _("You must enter one or more menu \ choices in column heading (white spaces not allowed)." )) return super(survey_question, self).write(cr, uid, ids, vals, context=context) def create(self, cr, uid, vals, context=None): minimum_ans = 0 maximum_ans = 0 page = self.pool.get('survey.page').browse(cr, uid, vals['page_id'], context=context).title if vals.has_key('answer_choice_ids') and not len( vals['answer_choice_ids']): if vals.has_key('type') and vals['type'] not in [ 'descriptive_text', 'single_textbox', 'comment', 'table' ]: raise osv.except_osv( _('Warning!'), _('You must enter one or more answers for question "%s" of page %s .' ) % (vals['question'], page)) if vals.has_key('column_heading_ids') and not len( vals['column_heading_ids']): if vals.has_key('type') and vals['type'] in [ 'matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus', 'rating_scale' ]: raise osv.except_osv( _('Warning!'), _('You must enter one or more column headings for question "%s" of page %s.' ) % (vals['question'], page)) if vals['type'] in [ 'multiple_choice_multiple_ans', 'matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus', 'rating_scale', 'multiple_textboxes', 'numerical_textboxes', 'date', 'date_and_time' ]: if vals.has_key('is_require_answer') and vals.has_key( 'required_type') and vals['required_type'] in [ 'at least', 'at most', 'exactly' ]: if vals.has_key('answer_choice_ids') and vals['req_ans'] > len( vals['answer_choice_ids']) or not vals['req_ans']: raise osv.except_osv( _('Warning!'), _("#Required Answer you entered is greater than the number of answer. Please use a number that is smaller than %d." ) % (len(vals['answer_choice_ids']) + 1)) if vals.has_key('is_require_answer') and vals.has_key( 'required_type') and vals['required_type'] == 'a range': minimum_ans = vals['minimum_req_ans'] maximum_ans = vals['maximum_req_ans'] if vals.has_key( 'answer_choice_ids') or vals['minimum_req_ans'] > len( vals['answer_choice_ids'] ) or not vals['minimum_req_ans']: raise osv.except_osv( _('Warning!'), _("Minimum Required Answer you entered is greater than the number of answer. Please use a number that is smaller than %d." ) % (len(vals['answer_choice_ids']) + 1)) if vals.has_key( 'answer_choice_ids') or vals['maximum_req_ans'] > len( vals['answer_choice_ids'] ) or not vals['maximum_req_ans']: raise osv.except_osv( _('Warning!'), _("Maximum Required Answer you entered for your maximum is greater than the number of answer. Please use a number that is smaller than %d." ) % (len(vals['answer_choice_ids']) + 1)) if maximum_ans <= minimum_ans: raise osv.except_osv( _('Warning!'), _("Maximum Required Answer is greater than Minimum Required Answer." )) if vals['type'] == 'matrix_of_drop_down_menus': for col in vals['column_heading_ids']: if not col[2] or not col[2].has_key( 'menu_choice') or not col[2]['menu_choice']: raise osv.except_osv( _('Warning!'), _("You must enter one or more menu choices in column heading." )) elif not col[2] or not col[2].has_key( 'menu_choice') or col[2]['menu_choice'].strip() == '': raise osv.except_osv( _('Warning!'), _("You must enter one or more menu choices in column heading (white spaces not allowed)." )) res = super(survey_question, self).create(cr, uid, vals, context) return res def survey_save(self, cr, uid, ids, context=None): if context is None: context = {} search_obj = self.pool.get('ir.ui.view') search_id = search_obj.search(cr, uid, [('model', '=', 'survey.question.wiz'), ('name', '=', 'Survey Search')]) surv_name_wiz = self.pool.get('survey.name.wiz') surv_name_wiz.write(cr, uid, [context.get('sur_name_id', False)], { 'transfer': True, 'page_no': context.get('page_number', False) }) return { 'view_type': 'form', 'view_mode': 'form', 'res_model': 'survey.question.wiz', 'type': 'ir.actions.act_window', 'target': 'new', 'search_view_id': search_id[0], 'context': context } def default_get(self, cr, uid, fields, context=None): if context is None: context = {} data = super(survey_question, self).default_get(cr, uid, fields, context) if context.has_key('page_id'): data['page_id'] = context.get('page_id', False) return data
class payment_order(osv.osv): _name = 'payment.order' _description = 'Payment Order' _rec_name = 'reference' _order = 'id desc' #dead code def get_wizard(self, type): _logger.warning("No wizard found for the payment type '%s'.", type) return None def _total(self, cursor, user, ids, name, args, context=None): if not ids: return {} res = {} for order in self.browse(cursor, user, ids, context=context): if order.line_ids: res[order.id] = reduce(lambda x, y: x + y.amount, order.line_ids, 0.0) else: res[order.id] = 0.0 return res _columns = { 'date_scheduled': fields.date( 'Scheduled Date', states={'done': [('readonly', True)]}, help='Select a date if you have chosen Preferred Date to be fixed.' ), 'reference': fields.char('Reference', required=1, states={'done': [('readonly', True)]}, copy=False), 'mode': fields.many2one('payment.mode', 'Payment Mode', select=True, required=1, states={'done': [('readonly', True)]}, help='Select the Payment Mode to be applied.'), 'state': fields.selection( [('draft', 'Draft'), ('cancel', 'Cancelled'), ('open', 'Confirmed'), ('done', 'Done')], 'Status', select=True, copy=False, help= 'When an order is placed the status is \'Draft\'.\n Once the bank is confirmed the status is set to \'Confirmed\'.\n Then the order is paid the status is \'Done\'.' ), 'line_ids': fields.one2many('payment.line', 'order_id', 'Payment lines', states={'done': [('readonly', True)]}), 'total': fields.function(_total, string="Total", type='float'), 'user_id': fields.many2one('res.users', 'Responsible', required=True, states={'done': [('readonly', True)]}), 'date_prefered': fields.selection( [('now', 'Directly'), ('due', 'Due date'), ('fixed', 'Fixed date')], "Preferred Date", change_default=True, required=True, states={'done': [('readonly', True)]}, help= "Choose an option for the Payment Order:'Fixed' stands for a date specified by you.'Directly' stands for the direct execution.'Due date' stands for the scheduled date of execution." ), 'date_created': fields.date('Creation Date', readonly=True), 'date_done': fields.date('Execution Date', readonly=True), 'company_id': fields.related('mode', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), } _defaults = { 'user_id': lambda self, cr, uid, context: uid, 'state': 'draft', 'date_prefered': 'due', 'date_created': lambda *a: time.strftime('%Y-%m-%d'), 'reference': lambda self, cr, uid, context: self.pool.get('ir.sequence').get( cr, uid, 'payment.order'), } def set_to_draft(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'draft'}) self.create_workflow(cr, uid, ids) return True def action_open(self, cr, uid, ids, *args): ir_seq_obj = self.pool.get('ir.sequence') for order in self.read(cr, uid, ids, ['reference']): if not order['reference']: reference = ir_seq_obj.get(cr, uid, 'payment.order') self.write(cr, uid, order['id'], {'reference': reference}) return True def set_done(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'date_done': time.strftime('%Y-%m-%d')}) self.signal_workflow(cr, uid, ids, 'done') return True def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} payment_line_obj = self.pool.get('payment.line') payment_line_ids = [] if (vals.get('date_prefered', False) == 'fixed' and not vals.get('date_scheduled', False)) or vals.get( 'date_scheduled', False): for order in self.browse(cr, uid, ids, context=context): for line in order.line_ids: payment_line_ids.append(line.id) payment_line_obj.write(cr, uid, payment_line_ids, {'date': vals.get('date_scheduled', False)}, context=context) elif vals.get('date_prefered', False) == 'due': vals.update({'date_scheduled': False}) for order in self.browse(cr, uid, ids, context=context): for line in order.line_ids: payment_line_obj.write(cr, uid, [line.id], {'date': line.ml_maturity_date}, context=context) elif vals.get('date_prefered', False) == 'now': vals.update({'date_scheduled': False}) for order in self.browse(cr, uid, ids, context=context): for line in order.line_ids: payment_line_ids.append(line.id) payment_line_obj.write(cr, uid, payment_line_ids, {'date': False}, context=context) return super(payment_order, self).write(cr, uid, ids, vals, context=context)
class payment_line(osv.osv): _name = 'payment.line' _description = 'Payment Line' def translate(self, orig): return { "due_date": "date_maturity", "reference": "ref" }.get(orig, orig) def _info_owner(self, cr, uid, ids, name=None, args=None, context=None): result = {} for line in self.browse(cr, uid, ids, context=context): owner = line.order_id.mode.bank_id.partner_id result[line.id] = self._get_info_partner(cr, uid, owner, context=context) return result def _get_info_partner(self, cr, uid, partner_record, context=None): if not partner_record: return False st = partner_record.street or '' st1 = partner_record.street2 or '' zip = partner_record.zip or '' city = partner_record.city or '' zip_city = zip + ' ' + city cntry = partner_record.country_id and partner_record.country_id.name or '' return partner_record.name + "\n" + st + " " + st1 + "\n" + zip_city + "\n" + cntry def _info_partner(self, cr, uid, ids, name=None, args=None, context=None): result = {} for line in self.browse(cr, uid, ids, context=context): result[line.id] = False if not line.partner_id: break result[line.id] = self._get_info_partner(cr, uid, line.partner_id, context=context) return result #dead code def select_by_name(self, cr, uid, ids, name, args, context=None): if not ids: return {} partner_obj = self.pool.get('res.partner') cr.execute( """SELECT pl.id, ml.%s FROM account_move_line ml INNER JOIN payment_line pl ON (ml.id = pl.move_line_id) WHERE pl.id IN %%s""" % self.translate(name), (tuple(ids), )) res = dict(cr.fetchall()) if name == 'partner_id': partner_name = {} for p_id, p_name in partner_obj.name_get( cr, uid, filter(lambda x: x and x != 0, res.values()), context=context): partner_name[p_id] = p_name for id in ids: if id in res and partner_name: res[id] = (res[id], partner_name[res[id]]) else: res[id] = (False, False) else: for id in ids: res.setdefault(id, (False, "")) return res def _amount(self, cursor, user, ids, name, args, context=None): if not ids: return {} currency_obj = self.pool.get('res.currency') if context is None: context = {} res = {} for line in self.browse(cursor, user, ids, context=context): ctx = context.copy() ctx['date'] = line.order_id.date_done or time.strftime('%Y-%m-%d') res[line.id] = currency_obj.compute(cursor, user, line.currency.id, line.company_currency.id, line.amount_currency, context=ctx) return res def _get_currency(self, cr, uid, context=None): user_obj = self.pool.get('res.users') currency_obj = self.pool.get('res.currency') user = user_obj.browse(cr, uid, uid, context=context) if user.company_id: return user.company_id.currency_id.id else: return currency_obj.search(cr, uid, [('rate', '=', 1.0)])[0] def _get_date(self, cr, uid, context=None): if context is None: context = {} payment_order_obj = self.pool.get('payment.order') date = False if context.get('order_id') and context['order_id']: order = payment_order_obj.browse(cr, uid, context['order_id'], context=context) if order.date_prefered == 'fixed': date = order.date_scheduled else: date = time.strftime('%Y-%m-%d') return date def _get_ml_inv_ref(self, cr, uid, ids, *a): res = {} for id in self.browse(cr, uid, ids): res[id.id] = False if id.move_line_id: if id.move_line_id.invoice: res[id.id] = id.move_line_id.invoice.id return res def _get_ml_maturity_date(self, cr, uid, ids, *a): res = {} for id in self.browse(cr, uid, ids): if id.move_line_id: res[id.id] = id.move_line_id.date_maturity else: res[id.id] = False return res def _get_ml_created_date(self, cr, uid, ids, *a): res = {} for id in self.browse(cr, uid, ids): if id.move_line_id: res[id.id] = id.move_line_id.date_created else: res[id.id] = False return res _columns = { 'name': fields.char('Your Reference', required=True), 'communication': fields.char( 'Communication', required=True, help= "Used as the message between ordering customer and current company. Depicts 'What do you want to say to the recipient about this order ?'" ), 'communication2': fields.char('Communication 2', help='The successor message of Communication.'), 'move_line_id': fields.many2one( 'account.move.line', 'Entry line', domain=[('reconcile_id', '=', False), ('account_id.type', '=', 'payable')], select=True, help= 'This Entry Line will be referred for the information of the ordering customer.' ), 'amount_currency': fields.float('Amount in Partner Currency', digits=(16, 2), required=True, help='Payment amount in the partner currency'), 'currency': fields.many2one('res.currency', 'Partner Currency', required=True), 'company_currency': fields.many2one('res.currency', 'Company Currency', readonly=True), 'bank_id': fields.many2one('res.partner.bank', 'Destination Bank Account'), 'order_id': fields.many2one('payment.order', 'Order', required=True, ondelete='cascade', select=True), 'partner_id': fields.many2one('res.partner', string="Partner", required=True, help='The Ordering Customer'), 'amount': fields.function(_amount, string='Amount in Company Currency', type='float', help='Payment amount in the company currency'), 'ml_date_created': fields.function(_get_ml_created_date, string="Effective Date", type='date', help="Invoice Effective Date"), 'ml_maturity_date': fields.function(_get_ml_maturity_date, type='date', string='Due Date'), 'ml_inv_ref': fields.function(_get_ml_inv_ref, type='many2one', relation='account.invoice', string='Invoice Ref.'), 'info_owner': fields.function(_info_owner, string="Owner Account", type="text", help='Address of the Main Partner'), 'info_partner': fields.function(_info_partner, string="Destination Account", type="text", help='Address of the Ordering Customer.'), 'date': fields.date( 'Payment Date', help= "If no payment date is specified, the bank will treat this payment line directly" ), 'create_date': fields.datetime('Created', readonly=True), 'state': fields.selection([('normal', 'Free'), ('structured', 'Structured')], 'Communication Type', required=True), 'bank_statement_line_id': fields.many2one('account.bank.statement.line', 'Bank statement line'), 'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), } _defaults = { 'name': lambda obj, cursor, user, context: obj.pool.get('ir.sequence').get( cursor, user, 'payment.line'), 'state': 'normal', 'currency': _get_currency, 'company_currency': _get_currency, 'date': _get_date, } _sql_constraints = [ ('name_uniq', 'UNIQUE(name, company_id)', 'The payment line name must be unique per company!'), ] def onchange_move_line(self, cr, uid, ids, move_line_id, payment_type, date_prefered, date_scheduled, currency=False, company_currency=False, context=None): data = {} move_line_obj = self.pool.get('account.move.line') data['amount_currency'] = data['communication'] = data[ 'partner_id'] = data['bank_id'] = data['amount'] = False if move_line_id: line = move_line_obj.browse(cr, uid, move_line_id, context=context) data['amount_currency'] = line.amount_residual_currency res = self.onchange_amount(cr, uid, ids, data['amount_currency'], currency, company_currency, context) if res: data['amount'] = res['value']['amount'] data['partner_id'] = line.partner_id.id temp = line.currency_id and line.currency_id.id or False if not temp: if line.invoice: data['currency'] = line.invoice.currency_id.id else: data['currency'] = temp # calling onchange of partner and updating data dictionary temp_dict = self.onchange_partner(cr, uid, ids, line.partner_id.id, payment_type) data.update(temp_dict['value']) data['communication'] = line.ref if date_prefered == 'now': #no payment date => immediate payment data['date'] = False elif date_prefered == 'due': data['date'] = line.date_maturity elif date_prefered == 'fixed': data['date'] = date_scheduled return {'value': data} def onchange_amount(self, cr, uid, ids, amount, currency, cmpny_currency, context=None): if (not amount) or (not cmpny_currency): return {'value': {'amount': False}} res = {} currency_obj = self.pool.get('res.currency') company_amount = currency_obj.compute(cr, uid, currency, cmpny_currency, amount) res['amount'] = company_amount return {'value': res} def onchange_partner(self, cr, uid, ids, partner_id, payment_type, context=None): data = {} partner_obj = self.pool.get('res.partner') payment_mode_obj = self.pool.get('payment.mode') data['info_partner'] = data['bank_id'] = False if partner_id: part_obj = partner_obj.browse(cr, uid, partner_id, context=context) partner = part_obj.name or '' data['info_partner'] = self._get_info_partner(cr, uid, part_obj, context=context) if part_obj.bank_ids and payment_type: bank_type = payment_mode_obj.suitable_bank_types( cr, uid, payment_type, context=context) for bank in part_obj.bank_ids: if bank.state in bank_type: data['bank_id'] = bank.id break return {'value': data} def fields_get(self, cr, uid, fields=None, context=None, write_access=True, attributes=None): res = super(payment_line, self).fields_get(cr, uid, fields, context, write_access, attributes) if 'communication2' in res: res['communication2'].setdefault('states', {}) res['communication2']['states']['structured'] = [('readonly', True) ] res['communication2']['states']['normal'] = [('readonly', False)] return res
#You should have received a copy of the GNU Affero General Public License # #along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import time from openerp.osv import orm, fields from openerp.addons import decimal_precision as dp from .res_company import COMPANY_FISCAL_TYPE, COMPANY_FISCAL_TYPE_DEFAULT FISCAL_RULE_COLUMNS = { 'partner_fiscal_type_id': fields.many2one( 'l10n_br_account.partner.fiscal.type', 'Tipo Fiscal do Parceiro'), 'fiscal_category_id': fields.many2one( 'l10n_br_account.fiscal.category', 'Categoria'), 'fiscal_type': fields.selection(COMPANY_FISCAL_TYPE, u'Regime Tributário', required=True), 'revenue_start': fields.float( 'Faturamento Inicial', digits_compute=dp.get_precision('Account'), help="Faixa inicial de faturamento bruto"), 'revenue_end': fields.float( 'Faturamento Final', digits_compute=dp.get_precision('Account'), help="Faixa inicial de faturamento bruto") } OTHERS_FISCAL_RULE_COLUMNS_TEMPLATE = { 'parent_id': fields.many2one( 'account.fiscal.position.rule.template', 'Regra Pai'), 'child_ids': fields.one2many( 'account.fiscal.position.rule.template', 'parent_id', 'Regras Filhas'), }
class travel_journey(orm.Model): """Journey of travel""" _name = 'travel.journey' _description = _(__doc__) _journey_type_classes = {} @staticmethod def _check_dep_arr_dates(departure, arrival): return not departure or not arrival or departure <= arrival def _estimate_datetime(self, cr, uid, ids, field_name, context=None): """If there is no start date from journey, get it from travel""" if type(ids) in (int, long): ids = [ids] res = {} for journey in self.browse(cr, uid, ids, context=context): date = False if journey.type: try: journey_class = self._journey_type_classes[journey.type] date = journey_class._estimate_typed_date( self, journey, field_name) except KeyError: _logger.error( _('Transportation type "%s" has not registered its ' 'class in _journey_types, skipping its dates') % journey.type) except AttributeError: _logger.error( _('Transportation type "%s" has not registered a ' '_estimate_typed_date() function, skipping its dates' ) % journey.type) if field_name == 'date_start': date = (date or journey.departure or journey.passenger_id.travel_id.date_start) elif field_name == 'date_stop': date = (date or journey.arrival or journey.passenger_id.travel_id.date_stop) # Make sure every date is in datetime format and not simply date try: date = datetime.strptime(date, DEFAULT_SERVER_DATE_FORMAT) except ValueError: date = datetime.strptime(date, DEFAULT_SERVER_DATETIME_FORMAT) res[journey.id] = date return res def _estimate_date(self, cr, uid, ids, field_name, arg=None, context=None): datetimes = self._estimate_datetime(cr, uid, ids, field_name, context=context) return { i: datetimes[i].strftime(DEFAULT_SERVER_DATE_FORMAT) for i in datetimes } def _estimate_time(self, cr, uid, ids, field_name, arg=None, context=None): datetimes = self._estimate_datetime(cr, uid, ids, field_name, context=context) return { i: datetimes[i].strftime(DEFAULT_SERVER_TIME_FORMAT) for i in datetimes } def _inv_estimate_date(self, cr, uid, ids, field_name, val, arg, context=None): """If there is no start date in journey, set it in travel""" if type(ids) in (int, long): ids = [ids] for journey in self.browse(cr, uid, ids, context=context): if journey.type: try: journey_class = self._journey_type_classes[journey.type] if (journey_class._inv_estimate_typed_date( self, journey, field_name, val)): continue except KeyError: _logger.error( _('Transportation type "%s" has not registered its ' 'class in _journey_types, skipping its dates') % journey.type) except AttributeError: _logger.error( _('Transportation type "%s" has not registered a ' '_inv_estimate_typed_date() function, skipping its ' 'dates') % journey.type) if field_name == 'date_start': if journey.departure: journey.write({'departure': val}) elif journey.passenger_id.travel_id.date_start: journey.passenger_id.travel_id.write({'date_start': val}) elif field_name == 'date_stop': if journey.arrival: journey.write({'arrival': val}) elif journey.passenger_id.travel_id.date_stop: journey.passenger_id.travel_id.write({'date_stop': val}) def _default_class(self, cr, uid, context=None): ir_model_data = self.pool.get('ir.model.data') return ir_model_data.get_object_reference( cr, uid, 'travel_journey', 'travel_journey_class_directive', )[1] def _get_type(self, cr, uid, context=None): acc_type_obj = self.pool.get('travel.journey.type') ids = acc_type_obj.search(cr, uid, []) res = acc_type_obj.read(cr, uid, ids, ['code', 'name'], context) return [(r['code'], r['name']) for r in res] def create(self, cr, uid, vals, context=None): """If is_return is checked, create a return trip after.""" def clear_return_vals(mVals): mVals = mVals.copy() if mVals.get('is_return'): mVals['is_return'] = False mVals['return_origin'] = False mVals['return_destination'] = False mVals['return_departure'] = False mVals['return_arrival'] = False return mVals return_vals = None if vals.get('is_return'): return_vals = clear_return_vals(vals.copy()) return_vals['is_return'] = False return_vals['origin'] = vals.get('destination', False) return_vals['destination'] = vals.get('origin', False) return_vals['departure'] = vals.get('return_departure', False) return_vals['arrival'] = vals.get('return_arrival', False) vals = clear_return_vals(vals) res = super(travel_journey, self).create(cr, uid, vals, context=context) if return_vals: super(travel_journey, self).create(cr, uid, return_vals, context=context) return res @staticmethod def on_change_return(cr, uid, ids, key, location, context=None): return {'value': {key: location}} def on_change_times(self, cr, uid, ids, departure, arrival, return_trip=False, context=None): if self._check_dep_arr_dates(departure, arrival): return {} return { 'value': { 'return_arrival' if return_trip else 'arrival': False, }, 'warning': { 'title': 'Arrival after Departure', 'message': ('Departure (%s) cannot be before Arrival (%s).' % (departure, arrival)), }, } def check_date_exists(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if not ids: # pragma: no cover return False journey = self.browse(cr, uid, ids[0], context=context) return journey.departure or journey.arrival def check_date_exists_return(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if not ids: # pragma: no cover return False journey = self.browse(cr, uid, ids[0], context=context) return (not journey.is_return or journey.return_departure or journey.return_arrival) def check_date(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if not ids: # pragma: no cover return False journey = self.browse(cr, uid, ids[0], context=context) return self._check_dep_arr_dates(journey.departure, journey.arrival) def check_date_return(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if not ids: # pragma: no cover return False journey = self.browse(cr, uid, ids[0], context=context) return self._check_dep_arr_dates(journey.return_departure, journey.return_arrival) def check_uom(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if not ids: # pragma: no cover return False journey = self.browse(cr, uid, ids[0], context=context) return not (bool(journey.baggage_weight) ^ bool(journey.baggage_weight_uom)) def name_get(self, cr, uid, ids, context=None): return [(journey.id, "%s (%s -> %s)" % (journey.passenger_id.partner_id.name, journey.origin.name_get()[0][1], journey.destination.name_get()[0][1])) for journey in self.browse(cr, uid, ids, context=context)] def company_get(self, cr, uid, ids, context=None): res = _("N/A") if type(ids) not in (int, long) and ids: ids = ids[0] journey = self.browse(cr, uid, ids, context=context) try: if journey.type: journey_class = self._journey_type_classes[journey.type] res = journey_class._company_typed_get(self, journey) except KeyError: _logger.error( _('Transportation type "%s" has not registered its ' 'class in _journey_types, skipping its company') % journey.type) except AttributeError: _logger.error( _('Transportation type "%s" has not registered a ' '_estimate_typed_date() function, skipping its company') % journey.type) finally: return res def origin_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self.browse(cr, uid, ids[0], context=context).origin def destination_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self.browse(cr, uid, ids[0], context=context).destination def departure_date_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self._estimate_date(cr, uid, ids, 'date_start', context=context)[ids[0]] def arrival_date_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self._estimate_date(cr, uid, ids, 'date_stop', context=context)[ids[0]] def departure_time_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self._estimate_time(cr, uid, ids, 'date_start', context=context)[ids[0]] def arrival_time_get(self, cr, uid, ids, context=None): if type(ids) is not list: ids = [ids] if ids: return self._estimate_time(cr, uid, ids, 'date_stop', context=context)[ids[0]] _columns = { 'origin': fields.many2one('res.better.zip', 'Origin', required='True', help='Source city of travel.'), 'destination': fields.many2one('res.better.zip', 'Destination', required='True', help='Destination city of travel.'), 'return_origin': fields.many2one('res.better.zip', 'Origin (return)'), 'return_destination': fields.many2one('res.better.zip', 'Destination (return)'), 'is_return': fields.boolean('Return Trip', help='Generate a return trip'), 'departure': fields.datetime('Desired Departure', help='Desired date and time of departure.'), 'arrival': fields.datetime('Desired Arrival', help='Desired date and time of Arrival.'), 'return_departure': fields.datetime('Desired Departure (return)'), 'return_arrival': fields.datetime('Desired Arrival (return)'), 'class_id': fields.many2one('travel.journey.class', 'Class', required=True, help='Desired class of voyage.'), 'baggage_qty': fields.integer('Baggage Quantity', help='Number of articles in baggage.'), 'baggage_weight': fields.float('Baggage Weight', help='Weight of baggage.'), 'baggage_weight_uom': fields.many2one('product.uom', 'Baggage Weight Unit of Measure', help='Unit of Measure for Baggage Weight'), 'comment': fields.text('Comments'), 'passenger_id': fields.many2one('travel.passenger', 'Passenger', required=True, help='Passenger on this journey.'), 'travel': fields.related('passenger_id', 'travel_name', type='char', string='Travel', store=True), 'state': fields.related('passenger_id', 'travel_state', type='selection', string='State', store=True), 'type': fields.selection(_get_type, 'Travel journey type', help='Travel journey type.'), 'reservation': fields.char('Reservation Number', size=256, help="Number of the ticket reservation."), 'cancellation': fields.text('Cancellation', help='Notes on cancellation.'), 'date_start': fields.function( _estimate_date, fnct_inv=_inv_estimate_date, type="date", help="Best estimate of start date calculated from filled fields."), 'date_stop': fields.function( _estimate_date, fnct_inv=_inv_estimate_date, type="date", help="Best estimate of end date calculated from filled fields."), } _defaults = {'class_id': _default_class} _constraints = [ (check_date_exists, _('A desired date of arrival or departure must be set on journey.'), ['departure', 'arrival']), (check_date_exists_return, _('A desired date of arrival or departure must be set on journey for ' 'return.'), ['return_departure', 'return_arrival']), (check_date, _('Departure date cannot be after arrival date on journey.'), ['departure', 'arrival']), (check_date_return, _('Departure date cannot be after arrival date on journey for ' 'return.'), ['return_departure', 'return_arrival']), (check_uom, _('Unit of Measure not specified for Baggage Weight.'), [ 'budget', 'budget_currency', ]) ]
#kurangi jam shift sign_in_date = datetime.strptime(v["name"],'%Y-%m-%d %H:%M:%S')-timedelta(hours=8) date = sign_in_date.strftime("%Y-%m-%d") sign_in_date = datetime.strptime( date ,'%Y-%m-%d')+timedelta(hours=hour_from - 7) v['action'] = 'sign_in' v['binary_action'] = '1' v['name']=sign_in_date.strftime('%Y-%m-%d %H:%M:%S') v['name_date']=sign_in_date.strftime('%Y-%m-%d') new_id = super(hr_attendance, self).create(cr, uid, v, context=context) return new_id _columns = { 'keterlambatan' : fields.char("keterlambatan"), 'employee_id' : fields.many2one('hr.employee', "Employee"), "binary_action" : fields.selection([('1','Sign In'),('0','Sign Out'),('2','Other')],'Kehadiran'), } _constraints = [(_altern_si_so, 'Error ! Sign in (resp. Sign out) must follow Sign out (resp. Sign in)', ['action'])] class tampung_eror_finger(osv.osv): _name = 'hr.tampung.error' _description = "untuk menampung eror fingerprint" _columns = { "name" : fields.integer('Fingerprint ID'), 'employee_id' : fields.many2one('hr.employee','Karyawan'), 'department_id' : fields.many2one('hr.department','Department'), 'lokasi' : fields.char('Lokasi Kerja'), 'no_mesin' : fields.integer('No Mesin'),
class account_print_journal(osv.osv_memory): _inherit = "account.common.journal.report" _name = 'account.print.journal' _description = 'Account Print Journal' _columns = { 'sort_selection': fields.selection([ ('l.date', 'Date'), ('am.name', 'Journal Entry Number'), ], 'Entries Sorted by', required=True), 'journal_ids': fields.many2many('account.journal', 'account_print_journal_journal_rel', 'account_id', 'journal_id', 'Journals', required=True), } _defaults = { 'sort_selection': 'am.name', 'filter': 'filter_period', 'journal_ids': False, } def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): ''' used to set the domain on 'journal_ids' field: we exclude or only propose the journals of type sale/purchase (+refund) accordingly to the presence of the key 'sale_purchase_only' in the context. ''' if context is None: context = {} res = super(account_print_journal, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) doc = etree.XML(res['arch']) if context.get('sale_purchase_only'): domain = "[('type', 'in', ('sale','purchase','sale_refund','purchase_refund'))]" else: domain = "[('type', 'not in', ('sale','purchase','sale_refund','purchase_refund'))]" nodes = doc.xpath("//field[@name='journal_ids']") for node in nodes: node.set('domain', domain) res['arch'] = etree.tostring(doc) return res def _print_report(self, cr, uid, ids, data, context=None): if context is None: context = {} data = self.pre_print_report(cr, uid, ids, data, context=context) data['form'].update( self.read(cr, uid, ids, ['sort_selection'], context=context)[0]) if context.get('sale_purchase_only'): report_name = 'account.journal.period.print.sale.purchase' else: report_name = 'account.journal.period.print' return { 'type': 'ir.actions.report.xml', 'report_name': report_name, 'datas': data }
'employment_start':fields.date('Employment Started'), 'date_request':fields.date('Request Date', required=True), 'date_done':fields.date('Done Date', required=False, readonly=True), 'approve_ids': fields.one2many('hr.dimission.item', 'dimission_id', 'Approvals', domain=[('type','=','approve')]), 'transfer_ids': fields.one2many('hr.dimission.item', 'dimission_id', 'Transfers', domain=[('type','=','transfer')]), 'payslip_id': fields.many2many('hr.emppay', string='Payslip'), 'attrpt_ids': fields.many2many('hr.rpt.attend.month', string='Attendance Reports'), 'hr_clock_ids': fields.function(_emp_clocks, string='HR Clocks', type='many2many', relation='hr.clock', readonly=True), 'attachment_lines': fields.one2many('ir.attachment', 'hr_admission_id','Attachment'), 'company_id':fields.many2one('res.company', 'Company', required=True), 'state': fields.selection([ ('draft', 'Draft'), ('in_progress', 'In Progress'), ('done', 'Done'), ('cancel', 'Cancel'), ], 'Status', select=True, readonly=True), } _defaults = { 'state': 'draft', } def default_get(self, cr, uid, fields_list, context=None): defaults = super(hr_dimission, self).default_get(cr, uid, fields_list, context=context) if not defaults: defaults = {} company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id item_tmpl_obj = self.pool.get('hr.dimission.item.template')
class reglement_piece(osv.osv): _name = "reglement.piece" _description = "Reglement Piece" _rec_name = "code_piece" def create(self, cr, uid, vals, context=None): if context.get('type') == 'out': vals['code_piece'] = self.pool.get('ir.sequence').get( cr, uid, 'reglement.piece.client') if context.get('type') == 'in': vals['code_piece'] = self.pool.get('ir.sequence').get( cr, uid, 'reglement.piece.fournisseur') vals['type'] = context.get('type') res = super(reglement_piece, self).create(cr, uid, vals, context) return res @api.depends('montant_piece', 'taux', 'currency_id') def _amount_line(self): for piece in self: print "*****************", piece.currency_id.id if piece.taux: if piece.currency_id.id == 137: piece.montant_monnaie_local = piece.montant_piece piece.taux = 0 #currency_obj = self.env['res.currency'].browse(self.currency_id.id) else: piece.montant_monnaie_local = piece.montant_piece * piece.taux else: piece.montant_monnaie_local = piece.montant_piece def _currency_get(self, cr, uid, context=None): ids = self.pool.get('res.currency').search(cr, uid, [('name', '=', 'TND')], context=context) return ids[0] # This function automatically sets the mode to Cheque by salwa. # def _get_default_mode(self, cr, uid, ids, context=None): # res = self.pool.get('reglement.mode').search(cr, uid, [('designation', '=', 'Chèque')], context=context) # return res and res[0] or False _columns = { 'code_piece': fields.char('Reference', size=64, select=True, readonly=True), 'num_cheque_traite': fields.char('N Cheque/Traite', states={'cashed': [('readonly', True)]}), 'num_compte': fields.char('Account Number', states={'cashed': [('readonly', True)]}), 'agence': fields.char('Bank Agency', size=254, states={ 'draft': [('readonly', False)], 'free': [('readonly', True)], 'cashed': [('readonly', True)], 'impaid': [('readonly', True)], }), 'titulaire': fields.char('Holder', size=254, required=True, readonly=True, states={'draft': [('readonly', False)]}), 'date_echance': fields.date('Maturity Date', required=True, readonly=False, states={'draft': [('readonly', False)]}), 'montant_piece': fields.float('Amount', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft': [('readonly', False)]}), 'partner_id': fields.many2one('res.partner', 'Partner', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'mode_reglement': fields.many2one('reglement.mode', 'Payment Mode', ondelete='set null', readonly=True, states={'draft': [('readonly', False)]}), 'banque_id': fields.many2one('reglement.banque', 'Bank', ondelete='set null', readonly=True, states={'draft': [('readonly', False)]}), 'state': fields.selection([('draft', 'Draft'), ('free', 'Free'), ('integrated', 'Integrated'), ('pimpaid', 'Partially Impaid'), ('impaid', 'Impaid'), ('cashed', 'Cashed')], 'State', readonly=True), 'montant_paye': fields.float('Montant Restant', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft': [('readonly', False)]}), 'currency_id': fields.many2one('res.currency', 'Devise', required=True), 'taux': fields.float('Taux de Change', digits_compute=dp.get_precision('Account'), readonly=True, states={'draft': [('readonly', False)]}), 'montant_monnaie_local': fields.float('Montant Monnaie Locale', readonly=True, digits_compute=dp.get_precision('Account'), store=True, compute='_amount_line'), 'nature_piece': fields.selection([('piece_client', 'Piece Client'), ('notre_piece', 'Notre Propre Piece')], 'Nature Piece', readonly=True, states={'draft': [('readonly', False)]}), 'type': fields.selection([('in', 'in'), ('out', 'out'), ('dep', 'dep')], 'Type'), 'date_encaissement': fields.date('Date Encaissement', readonly=False, states={ 'draft': [('readonly', False)], 'integrated': [('readonly', False)] }), } _defaults = { 'state': lambda *a: 'draft', 'montant_paye': 0.0, 'type': 'out', 'code_piece': '/', 'currency_id': _currency_get, # 'mode_reglement' :_get_default_mode, } def reg_draft(self, cr, uid, ids, context=None): print 'print draft .........................' return self.write(cr, uid, ids, {'state': 'draft'}) def reg_impaid(self, cr, uid, ids, context=None): print "impaid ....." return self.write(cr, uid, ids, {'state': 'impaid'}) def reg_free(self, cr, uid, ids, context=None): print 'freee ...................' for id in ids: montant = self.browse( cr, uid, id, ).montant_piece if montant == 0: raise osv.except_osv( _('le montant du cheque ne peut pas etre nul !'), _("Veuillez verifier la piece")) return self.write(cr, uid, ids, {'state': 'free'}) def reg_cashed(self, cr, uid, ids, context=None): for id in ids: date_encaiss = self.browse(cr, uid, id, context).date_encaissement if not date_encaiss: raise osv.except_osv( _('Date encaissement ne peut pas etre nul !'), _("Veuillez saisir la date d'encaissement")) self.write(cr, uid, ids, {'state': 'cashed'}) return True def reg_integrated(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'integrated'}) return True
###Function that will update the cron ###freqeuence self.pool.get("currency.rate.update").save_cron(cr, uid, {"interval_type": interval}) compagnies = self.search(cr, uid, []) for comp in compagnies: self.write(cr, uid, comp, {"interval_type": interval}) return {} _inherit = "res.company" _columns = { ### activate the currency update "auto_currency_up": fields.boolean("Automatical update of the currency this company"), "services_to_use": fields.one2many("currency.rate.update.service", "company_id", "Currency update services"), ###predifine cron frequence "interval_type": fields.selection( [("days", "Day(s)"), ("weeks", "Week(s)"), ("months", "Month(s)")], "Currency update frequence", help="""changing this value will also affect other compagnies""", ), ###function field that allows to know the ###mutli company currency implementation "multi_company_currency_enable": fields.function( _multi_curr_enable, method=True, type="boolean", string="Multi company currency", help="if this case is not check you can" + " not set currency is active on two company", ), }
class wizard(osv.TransientModel): """ A wizard to manage the modification of protocol object """ _name = 'protocollo.fascicola.wizard' _description = 'Fascicola Protocollo' def set_before(self, before, label, value): if not value: value = '' before += value + '\n' return before def set_after(self, after, label, value): after += value + '\n' return after _columns = { 'complete_name': fields.char('Numero Protocollo', size=256, required=True, readonly=True), 'registration_date': fields.datetime('Data Registrazione', readonly=True), 'type': fields.selection([('out', 'Uscita'), ('in', 'Ingresso'), ('internal', 'Interno')], 'Tipo', size=32, required=True, readonly=True), 'cause': fields.text('Motivo della Modifica', required=True), 'dossier_ids': fields.many2many('protocollo.dossier', 'protocollo_fascicola_wizard_dossier_rel', 'wizard_id', 'dossier_id', 'Fascicoli'), } def _default_complete_name(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse( cr, uid, context['active_id']) return protocollo.complete_name def _default_registration_date(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse( cr, uid, context['active_id']) return protocollo.registration_date def _default_type(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse( cr, uid, context['active_id']) return protocollo.type def _default_dossier_ids(self, cr, uid, context): protocollo = self.pool.get('protocollo.protocollo').browse( cr, uid, context['active_id']) dossier_ids = [] for dossier_id in protocollo.dossier_ids: dossier_ids.append(dossier_id.id) return [(6, 0, dossier_ids)] _defaults = { 'complete_name': _default_complete_name, 'registration_date': _default_registration_date, 'type': _default_type, 'dossier_ids': _default_dossier_ids, } def action_save(self, cr, uid, ids, context=None): vals = {} before = {} after = {} wizard = self.browse(cr, uid, ids[0], context) protocollo_obj = self.pool.get('protocollo.protocollo') protocollo = protocollo_obj.browse(cr, uid, context['active_id'], context=context) before['Fascicolo'] = "" after['Fascicolo'] = "" vals['dossier_ids'] = [[6, 0, [d.id for d in wizard.dossier_ids]]] before['Fascicolo'] = self.set_before( before['Fascicolo'], 'Fascicolo', ', '.join([d.name for d in protocollo.dossier_ids])) after['Fascicolo'] = self.set_after( after['Fascicolo'], 'Fascicolo', ', '.join([dw.name for dw in wizard.dossier_ids])) protocollo_obj.write(cr, uid, [context['active_id']], vals) action_class = "history_icon update" body = "<div class='%s'><ul>" % action_class for key, before_item in before.items(): if before[key] != after[key]: body = body + "<li>%s: <span style='color:#990000'> %s</span> -> <span style='color:#009900'> %s </span></li>" \ % (str(key), before_item.encode("utf-8"), after[key].encode("utf-8")) else: body = body + "<li>%s: <span style='color:#999'> %s</span> -> <span style='color:#999'> %s </span></li>" \ % (str(key), before_item.encode("utf-8"), after[key].encode("utf-8")) post_vars = { 'subject': "Modifica Fascicolazione: \'%s\'" % wizard.cause, 'body': body, 'model': "protocollo.protocollo", 'res_id': context['active_id'], } body += "</ul></div>" new_context = dict(context).copy() # if protocollo.typology.name == 'PEC': new_context.update({'pec_messages': True}) thread_pool = self.pool.get('protocollo.protocollo') thread_pool.message_post(cr, uid, context['active_id'], type="notification", context=new_context, **post_vars) return {'type': 'ir.actions.act_window_close'}
logger.debug('Trying to get connection to SERVER %s, PORT %s, USER %s, PWD %s, DRIVER %s' % (tk_pyodbc.server, tk_pyodbc.port, tk_pyodbc.user, tk_pyodbc.pwd, tk_pyodbc.driver)) logger.debug("DRIVER=%s;SERVER=%s;UID=%s;PWD=%s;PORT=%s;%s" % (tk_pyodbc.driver, tk_pyodbc.server, tk_pyodbc.user, tk_pyodbc.pwd, tk_pyodbc.port, tk_pyodbc.optional_params)) conn = pyodbc.connect("DRIVER=%s;SERVER=%s;UID=%s;PWD=%s;PORT=%s;%s" % (tk_pyodbc.driver, tk_pyodbc.server, tk_pyodbc.user, tk_pyodbc.pwd, tk_pyodbc.port, tk_pyodbc.optional_params), timeout=timeout or 0) logger.debug("Connection acquired") return conn except Exception, e: raise orm.except_orm(_('Error !'), _('Could not get a valid connection. %s') % e) def check_download_connection(self, cr, uid, ids, context=None): """ checks the connection to the sql server """ conn = self.get_connection(cr, uid, ids, context) conn.close() raise orm.except_orm(_('Success'), _('Connection to Oscar was successful!')) _columns = { 'name': fields.char('Name', size=64, required=True), 'database_type': fields.selection(__database_types, 'Database Type'), 'server': fields.char('Server address', size=256, required=False), 'database': fields.char('Database name', size=256, required=False), 'port': fields.integer('Database port'), 'user': fields.char('Username', size=256, required=False), 'pwd': fields.char('Password', size=256, required=False), 'driver': fields.char('Driver location', size=256), 'optional_params': fields.char('Optional parameters', size=256), } tk_pyodbc()
class ir_property(osv.osv): _name = 'ir.property' _columns = { 'name': fields.char('Name', select=1), 'res_id': fields.char( 'Resource', help="If not set, acts as a default value for new resources", select=1), 'company_id': fields.many2one('res.company', 'Company', select=1), 'fields_id': fields.many2one('ir.model.fields', 'Field', ondelete='cascade', required=True, select=1), 'value_float': fields.float('Value'), 'value_integer': fields.integer('Value'), 'value_text': fields.text('Value'), # will contain (char, text) 'value_binary': fields.binary('Value'), 'value_reference': fields.char('Value'), 'value_datetime': fields.datetime('Value'), 'type': fields.selection([ ('char', 'Char'), ('float', 'Float'), ('boolean', 'Boolean'), ('integer', 'Integer'), ('text', 'Text'), ('binary', 'Binary'), ('many2one', 'Many2One'), ('date', 'Date'), ('datetime', 'DateTime'), ('selection', 'Selection'), ], 'Type', required=True, select=1), } _defaults = { 'type': 'many2one', } def _update_values(self, cr, uid, ids, values): value = values.pop('value', None) if not value: return values prop = None type_ = values.get('type') if not type_: if ids: prop = self.browse(cr, uid, ids[0]) type_ = prop.type else: type_ = self._defaults['type'] field = TYPE2FIELD.get(type_) if not field: raise osv.except_osv('Error', 'Invalid type') if field == 'value_reference': if isinstance(value, orm.BaseModel): value = '%s,%d' % (value._name, value.id) elif isinstance(value, (int, long)): field_id = values.get('fields_id') if not field_id: if not prop: raise ValueError() field_id = prop.fields_id else: field_id = self.pool.get('ir.model.fields').browse( cr, uid, field_id) value = '%s,%d' % (field_id.relation, value) values[field] = value return values def write(self, cr, uid, ids, values, context=None): return super(ir_property, self).write(cr, uid, ids, self._update_values(cr, uid, ids, values), context=context) def create(self, cr, uid, values, context=None): return super(ir_property, self).create(cr, uid, self._update_values(cr, uid, None, values), context=context) def get_by_record(self, cr, uid, record, context=None): if record.type in ('char', 'text', 'selection'): return record.value_text elif record.type == 'float': return record.value_float elif record.type == 'boolean': return bool(record.value_integer) elif record.type == 'integer': return record.value_integer elif record.type == 'binary': return record.value_binary elif record.type == 'many2one': if not record.value_reference: return False model, resource_id = record.value_reference.split(',') value = self.pool[model].browse(cr, uid, int(resource_id), context=context) return value.exists() elif record.type == 'datetime': return record.value_datetime elif record.type == 'date': if not record.value_datetime: return False return time.strftime( '%Y-%m-%d', time.strptime(record.value_datetime, '%Y-%m-%d %H:%M:%S')) return False def get(self, cr, uid, name, model, res_id=False, context=None): domain = self._get_domain(cr, uid, name, model, context=context) if domain is not None: domain = [('res_id', '=', res_id)] + domain #make the search with company_id asc to make sure that properties specific to a company are given first nid = self.search(cr, uid, domain, limit=1, order='company_id asc', context=context) if not nid: return False record = self.browse(cr, uid, nid[0], context=context) return self.get_by_record(cr, uid, record, context=context) return False def _get_domain(self, cr, uid, prop_name, model, context=None): context = context or {} cr.execute('select id from ir_model_fields where name=%s and model=%s', (prop_name, model)) res = cr.fetchone() if not res: return None cid = context.get('force_company') if not cid: company = self.pool.get('res.company') cid = company._company_default_get(cr, uid, model, res[0], context=context) return [('fields_id', '=', res[0]), ('company_id', 'in', [cid, False])] @api.model def get_multi(self, name, model, ids): """ Read the property field `name` for the records of model `model` with the given `ids`, and return a dictionary mapping `ids` to their corresponding value. """ if not ids: return {} domain = self._get_domain(name, model) if domain is None: return dict.fromkeys(ids, False) # retrieve the values for the given ids and the default value, too refs = {('%s,%s' % (model, id)): id for id in ids} refs[False] = False domain += [('res_id', 'in', list(refs))] # note: order by 'company_id asc' will return non-null values first props = self.search(domain, order='company_id asc') result = {} for prop in props: # for a given res_id, take the first property only id = refs.pop(prop.res_id, None) if id is not None: result[id] = self.get_by_record(prop) # set the default value to the ids that are not in result default_value = result.pop(False, False) for id in ids: result.setdefault(id, default_value) return result @api.model def set_multi(self, name, model, values): """ Assign the property field `name` for the records of model `model` with `values` (dictionary mapping record ids to their value). """ def clean(value): return value.id if isinstance(value, models.BaseModel) else value if not values: return domain = self._get_domain(name, model) if domain is None: raise Exception() # retrieve the default value for the field default_value = clean(self.get(name, model)) # retrieve the properties corresponding to the given record ids self._cr.execute( "SELECT id FROM ir_model_fields WHERE name=%s AND model=%s", (name, model)) field_id = self._cr.fetchone()[0] company_id = self.env.context.get( 'force_company') or self.env['res.company']._company_default_get( model, field_id) refs = {('%s,%s' % (model, id)): id for id in values} props = self.search([ ('fields_id', '=', field_id), ('company_id', '=', company_id), ('res_id', 'in', list(refs)), ]) # modify existing properties for prop in props: id = refs.pop(prop.res_id) value = clean(values[id]) if value == default_value: prop.unlink() elif value != clean(prop.get_by_record(prop)): prop.write({'value': value}) # create new properties for records that do not have one yet for ref, id in refs.iteritems(): value = clean(values[id]) if value != default_value: self.create({ 'fields_id': field_id, 'company_id': company_id, 'res_id': ref, 'name': name, 'value': value, 'type': self.env[model]._fields[name].type, }) @api.model def search_multi(self, name, model, operator, value): """ Return a domain for the records that match the given condition. """ default_matches = False include_zero = False field = self.env[model]._fields[name] if field.type == 'many2one': comodel = field.comodel_name def makeref(value): return value and '%s,%s' % (comodel, value) if operator == "=": value = makeref(value) # if searching properties not set, search those not in those set if value is False: default_matches = True elif operator in ('!=', '<=', '<', '>', '>='): value = makeref(value) elif operator in ('in', 'not in'): value = map(makeref, value) elif operator in ('=like', '=ilike', 'like', 'not like', 'ilike', 'not ilike'): # most probably inefficient... but correct target = self.env[comodel] target_names = target.name_search(value, operator=operator, limit=None) target_ids = map(itemgetter(0), target_names) operator, value = 'in', map(makeref, target_ids) elif field.type in ('integer', 'float'): # No record is created in ir.property if the field's type is float or integer with a value # equal to 0. Then to match with the records that are linked to a property field equal to 0, # the negation of the operator must be taken to compute the goods and the domain returned # to match the searched records is just the opposite. if value == 0 and operator == '=': operator = '!=' include_zero = True elif value <= 0 and operator == '>=': operator = '<' include_zero = True elif value <= 0 and operator == '>': operator = '<=' include_zero = True elif value >= 0 and operator == '<=': operator = '>' include_zero = True elif value >= 0 and operator == '<': operator = '>=' include_zero = True # retrieve the properties that match the condition domain = self._get_domain(name, model) if domain is None: raise Exception() props = self.search(domain + [(TYPE2FIELD[field.type], operator, value)]) # retrieve the records corresponding to the properties that match good_ids = [] for prop in props: if prop.res_id: res_model, res_id = prop.res_id.split(',') good_ids.append(int(res_id)) else: default_matches = True if include_zero: return [('id', 'not in', good_ids)] elif default_matches: # exclude all records with a property that does not match all_ids = [] props = self.search(domain + [('res_id', '!=', False)]) for prop in props: res_model, res_id = prop.res_id.split(',') all_ids.append(int(res_id)) bad_ids = list(set(all_ids) - set(good_ids)) return [('id', 'not in', bad_ids)] else: return [('id', 'in', good_ids)]
# # #You should have received a copy of the GNU Affero General Public License # #along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from openerp.osv import orm, fields FISCAL_POSITION_COLUMNS = { 'name': fields.char('Fiscal Position', size=128, required=True), 'fiscal_category_id': fields.many2one( 'l10n_br_account.fiscal.category', 'Categoria Fiscal'), 'fiscal_category_fiscal_type': fields.related( 'fiscal_category_id', 'fiscal_type', type='char', readonly=True, relation='l10n_br_account.fiscal.category', store=True, string='Fiscal Type'), 'type': fields.selection([('input', 'Entrada'), ('output', 'Saida')], 'Tipo'), 'type_tax_use': fields.selection( [('sale', 'Sale'), ('purchase', 'Purchase'), ('all', 'All')], 'Tax Application'), 'inv_copy_note': fields.boolean(u'Copiar Observação na Nota Fiscal'), 'asset_operation': fields.boolean(u'Operação de Aquisição de Ativo', help=u"""Caso seja marcada essa opção, será incluido o IPI na base de calculo do ICMS."""), 'state': fields.selection([('draft', u'Rascunho'), ('review', u'Revisão'), ('approved', u'Aprovada'), ('unapproved', u'Não Aprovada')], 'Status', readonly=True, track_visibility='onchange', select=True), } FISCAL_POSITION_DEFAULTS = { 'state': 'draft',
class kg_partner(osv.osv): _name = "res.partner" _inherit = "res.partner" _description = "Partner Managment" def _get_modify(self, cr, uid, ids, field_name, arg, context=None): res={} if field_name == 'modify': for h in self.browse(cr, uid, ids, context=None): res[h.id] = 'no' cr.execute(""" select * from (SELECT tc.table_schema, tc.constraint_name, tc.table_name, kcu.column_name, ccu.table_name AS foreign_table_name, ccu.column_name AS foreign_column_name FROM information_schema.table_constraints tc JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name WHERE constraint_type = 'FOREIGN KEY' AND ccu.table_name='%s') as sam """ %('res_partner')) data = cr.dictfetchall() if data: for var in data: data = var chk_sql = 'Select COALESCE(count(*),0) as cnt from '+str(data['table_name'])+' where '+data['column_name']+' = '+str(ids[0]) cr.execute(chk_sql) out_data = cr.dictfetchone() if out_data: if out_data['cnt'] > 0: res[h.id] = 'yes' return res _columns = { 'city_id' : fields.many2one('res.city', 'City'), 'tin_no' : fields.char('TIN'), 'vat_no' : fields.char('VAT'), 'pan_no' : fields.char('PAN'), 'tan_no' : fields.char('TAN'), 'cst_no' : fields.char('CST'), 'gst_no' : fields.char('GST'), 'supply_type': fields.selection([('material','Material'),('service','Service'),('contractor','Contractor'),('labour','Labour'),('all','All')],'Supply Type'), 'company_type': fields.selection([('individual','Individual'),('company','Company'),('trust','Trust')],'Type'), 'tds': fields.selection([('yes','Yes'),('no','No')],'TDS Applicable'), 'grade': fields.selection([('a','A'),('b','B'),('c','C')],'Grade'), 'payment_id': fields.many2one('kg.payment.master','Payment Terms'), 'language': fields.selection([('tamil', 'Tamil'),('english', 'English'),('hindi', 'Hindi'),('malayalam', 'Malayalam'),('others','Others')],'Preferred Language'), 'cheque_in_favour': fields.char('Cheque in Favor Of'), 'advance_limit': fields.float('Credit Limit'), 'transport_id': fields.many2one('kg.transport','Transport'), 'contact_person': fields.char('Contact Person', size=128), 'landmark': fields.char('Landmark', size=128), #~ 'partner_state': fields.selection([('draft','Draft'),('confirm','WFA'),('approve','Approved'),('reject','Rejected'),('cancel','Cancelled')],'Status'), 'group_flag': fields.boolean('Is Group Company'), 'delivery_id': fields.many2one('kg.delivery.master','Delivery Type'), #'child_ids': fields.one2many('res.partner', 'parent_id', 'Contacts', domain=[('active','=',True)]), 'con_designation': fields.char('Designation'), 'con_whatsapp': fields.char('Whatsapp No'), #~ 'acc_number': fields.char('Whatsapp No'), #~ 'bank_name': fields.char('Whatsapp No'), #~ 'bank_bic': fields.char('Whatsapp No'), 'delivery_ids':fields.one2many('kg.delivery.address', 'src_id', 'Delivery Address'), 'billing_ids':fields.one2many('kg.billing.address', 'bill_id', 'Billing Address'), 'consult_ids':fields.one2many('kg.consultant.fee', 'consult_id', 'Consultant Fees'), 'dealer': fields.boolean('Dealer'), 'economic_category': fields.selection([('budget','Budget'),('loyalty','Loyalty')],'Economic Category'), 'sector': fields.selection([('cp','CP'),('ip','IP'),('both','Both')],'Marketing Division'), 'industry_id': fields.many2one('kg.industry.master','Sector'), 'dealer_id': fields.many2one('res.partner','Dealer Name',domain=[('dealer','=',True)]), 'remark': fields.text('Approve/Reject'), 'cancel_remark': fields.text('Cancel Remarks'), 'modify': fields.function(_get_modify, string='Modify', method=True, type='char', size=3), 'user_ref_id': fields.many2one('res.users','User Name'), 'adhar_id': fields.char('Adhar ID'), 'contractor': fields.boolean('Contractor'), 'tin_flag': fields.boolean('TIN Flag'), 'mobile_2': fields.char('Mobile2',size=12), 'email_applicable': fields.selection([('yes','Yes'),('no','No')],'Email Applicable'), 'sms_applicable': fields.selection([('yes','Yes'),('no','No')],'SMS Applicable'), ## Entry Info 'creation_date': fields.datetime('Created Date',readonly=True), 'created_by': fields.many2one('res.users', 'Created by',readonly=True), 'confirmed_date': fields.datetime('Confirmed Date',readonly=True), 'confirmed_by': fields.many2one('res.users','Confirmed By',readonly=True), 'rej_user_id': fields.many2one('res.users', 'Rejected By', readonly=True), 'reject_date': fields.datetime('Reject Date', readonly=True), 'approved_date': fields.datetime('Approved Date',readonly=True), 'approved_by': fields.many2one('res.users','Approved By',readonly=True), 'cancel_date': fields.datetime('Cancelled Date', readonly=True), 'cancel_user_id': fields.many2one('res.users', 'Cancelled By', readonly=True), 'updated_date': fields.datetime('Last Updated Date',readonly=True), 'updated_by': fields.many2one('res.users','Last Updated By',readonly=True), } _defaults = { 'is_company': True, 'creation_date': lambda * a: time.strftime('%Y-%m-%d %H:%M:%S'), 'created_by': lambda obj, cr, uid, context: uid, 'partner_state': 'draft', 'modify': 'no', 'tin_flag': False, 'company_type': 'company', } def onchange_city(self, cr, uid, ids, city_id, context=None): if city_id: state_id = self.pool.get('res.city').browse(cr, uid, city_id, context).state_id.id return {'value':{'state_id':state_id}} return {} def onchange_zip(self,cr,uid,ids,zip,context=None): if len(str(zip)) in (6,7,8): value = {'zip':zip} else: raise osv.except_osv(_('Check zip number !!'), _('zip should contain 6-8 digit numerics. Else system not allow to save. !!')) if zip.isdigit() == False: raise osv.except_osv(_('Check zip number !!'), _('Please enter numeric values !!')) return {'value': value} #~ def onchange_tin_cst(self,cr,uid,ids,tin_no,cst_no,context=None): #~ if tin_no: #~ if len(str(tin_no)) == 11: #~ value = {'tin_no':tin_no} #~ else: #~ raise osv.except_osv(_('Check TIN number !!'), #~ _('Please enter 11 digit number !!')) #~ if cst_no: #~ if len(str(cst_no)) == 11: #~ value = {'cst_no':cst_no} #~ else: #~ raise osv.except_osv(_('Check CST number !!'), #~ _('Please enter 11 digit number !!')) #~ return {'value': value} def confirm_partner(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.partner_state == 'draft': self.write(cr, uid, ids, {'partner_state': 'confirm','confirmed_by':uid,'confirmed_date': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def reject_partner(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.partner_state == 'confirm': if rec.remark: self.write(cr, uid, ids, {'partner_state': 'reject','update_user_id':uid,'reject_date': time.strftime('%Y-%m-%d %H:%M:%S')}) else: raise osv.except_osv(_('Rejection remark is must !!'), _('Enter rejection remark in remark field !!')) return True def approve_partner(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.partner_state == 'confirm': self.write(cr, uid, ids, {'partner_state': 'approve','approved_by':uid,'approved_date': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def entry_draft(self,cr,uid,ids,context=None): rec = self.browse(cr, uid, ids[0]) if rec.partner_state == 'approve': self.write(cr, uid, ids, {'partner_state': 'draft'}) return True def entry_cancel(self,cr,uid,ids,context=None): rec = self.browse(cr,uid,ids[0]) if rec.partner_state == 'approve': if rec.cancel_remark: self.write(cr, uid, ids, {'partner_state': 'cancel','cancel_user_id': uid, 'cancel_date': time.strftime('%Y-%m-%d %H:%M:%S')}) else: raise osv.except_osv(_('Cancel remark is must !!'), _('Enter the remarks in Cancel remarks field !!')) return True def unlink(self,cr,uid,ids,context=None): unlink_ids = [] for rec in self.browse(cr,uid,ids): if rec.partner_state != 'draft': raise osv.except_osv(_('Warning!'), _('You can not delete this entry !!')) else: unlink_ids.append(rec.id) return osv.osv.unlink(self, cr, uid, unlink_ids, context=context) def write(self, cr, uid, ids, vals, context=None): print"valsssssS",vals #if len(str(vals['zip'])) == 6: # pass #else: # raise osv.except_osv(_('Check zip number !!'), # _('Please enter six digit number !!')) vals.update({'updated_date': time.strftime('%Y-%m-%d %H:%M:%S'),'updated_by':uid}) return super(kg_partner, self).write(cr, uid, ids, vals, context) def _check_zip(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.zip: if len(str(rec.zip)) in (6,7,8) and rec.zip.isdigit() == True: return True else: return True return False def _check_tin(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.tin_no: if len(str(rec.tin_no)) == 11 and rec.tin_no.isdigit() == True: return True else: return True return False def _check_cst(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.cst_no: if len(str(rec.cst_no)) == 11 and rec.cst_no.isdigit() == True: return True else: return True return False def _check_vat(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.vat_no: if len(str(rec.vat_no)) == 15: return True else: return True return False def _check_phone(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.phone: if len(str(rec.phone)) in (8,9,10,11,12,13,14,15) and rec.phone.isdigit() == True: return True else: return True return False def _validate_email(self, cr, uid, ids, context=None): rec = self.browse(cr,uid,ids[0]) if rec.email==False: return True else: if re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", rec.email) != None: return True else: raise osv.except_osv('Invalid Email', 'Please enter a valid email address') def _check_website(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.website != False: #~ if re.match('www?.(?:www)?(?:[\w-]{2,255}(?:\.\w{2,6}){1,2})(?:/[\w&%?#-]{1,300})?',rec.website): if re.match('www.(?:www)?(?:[\w-]{2,255}(?:\.\w{2,6}){1,2})(?:/[\w&%?#-]{1,300})?',rec.website): return True else: return False return True def _check_ifsc(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.bank_ids: for item in rec.bank_ids: if item.bank_bic: if len(str(item.bank_bic)) == 11: return True else: return True else: return True return False def _check_acc_no(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.bank_ids: for item in rec.bank_ids: if item.acc_number: if len(str(item.acc_number)) in (6,7,8,9,10,11,12,13,14,15,16,17,18) and item.acc_number.isdigit() == True: return True else: return True else: return True return False def _check_mobile_no(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.mobile: if len(str(rec.mobile)) in (10,11,12) and rec.mobile.isdigit() == True: return True else: return True return False def _name_validate(self, cr, uid,ids, context=None): rec = self.browse(cr,uid,ids[0]) res = True data='' if rec.name: partner_name = rec.name name=partner_name.upper() if rec.customer == True: cr.execute(""" select upper(name) from res_partner where upper(name) = '%s' and customer = True """ %(name)) data = cr.dictfetchall() elif rec.supplier == True: cr.execute(""" select upper(name) from res_partner where upper(name) = '%s' and supplier = True """ %(name)) data = cr.dictfetchall() elif rec.dealer == True: cr.execute(""" select upper(name) from res_partner where upper(name) = '%s' and dealer = True """ %(name)) data = cr.dictfetchall() if len(data) > 1: res = False else: res = True return res def _unique_tin(self, cr, uid,ids, context=None): rec = self.browse(cr,uid,ids[0]) res = True if rec.tin_no: tin_no = rec.tin_no name = tin_no.upper() cr.execute(""" select tin_no from res_partner where tin_no = '%s' """ %(name)) data = cr.dictfetchall() if len(data) > 1: res = False else: res = True return res def _spl_name(self, cr, uid, ids, context=None): rec = self.browse(cr, uid, ids[0]) if rec.name: name_special_char = ''.join(c for c in rec.name if c in '!@#$%^~*{}?+/=') if name_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not Allowed in Name!')) if rec.adhar_id: adhar_special_char = ''.join(c for c in rec.adhar_id if c in '!@#$%^~*{}?+/=') if adhar_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not Allowed in Adhar ID!')) if rec.pan_no: pan_special_char = ''.join(c for c in rec.pan_no if c in '!@#$%^~*{}?+/=') if pan_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not Allowed in PAN!')) if rec.gst_no: gst_special_char = ''.join(c for c in rec.gst_no if c in '!@#$%^~*{}?+/=') if gst_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not Allowed in GST!')) if rec.tan_no: tan_special_char = ''.join(c for c in rec.tan_no if c in '!@#$%^~*{}?+/=') if tan_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not Allowed in TAN!')) if rec.cst_no: cst_special_char = ''.join(c for c in rec.cst_no if c in '!@#$%^~*{}?+/=') if cst_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not in CST!')) if rec.vat_no: vat_special_char = ''.join(c for c in rec.vat_no if c in '!@#$%^~*{}?+/=') if vat_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not in VAT!')) if rec.cheque_in_favour: cheque_special_char = ''.join(c for c in rec.cheque_in_favour if c in '!@#$%^~*{}?+/=') if cheque_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not in Cheque in Favour Of!')) if rec.contact_person: contact_special_char = ''.join(c for c in rec.contact_person if c in '!@#$%^~*{}?+/=') if contact_special_char: raise osv.except_osv(_('Warning!'), _('Special Character Not in Contact Person!')) return True else: return True return False _constraints = [ (_check_zip,'ZIP should contain 6-8 digit numerics. Else system not allow to save.',['ZIP']), (_check_tin,'TIN No. should contain 11 digit numerics. Else system not allow to save.',['TIN']), (_check_cst,'CST No. should contain 11 digit numerics. Else system not allow to save.',['CST']), (_check_vat,'VAT No. should contain 15 letters. Else system not allow to save.',['VAT']), (_validate_email,'Check Email !',['']), (_check_website,'Check Website !',['Website']), (_check_phone,'Phone No. should contain 8-15 digit numerics. Else system not allow to save.',['Phone']), (_check_ifsc,'IFSC should contain 11 letters. Else system not allow to save.',['IFSC']), (_check_acc_no,'A/C No. should contain 6-18 digit numerics. Else system not allow to save.',['A/C No.']), (_check_mobile_no,'Mobile No. should contain 10-12 digit numerics. Else system not allow to save.',['Mobile']), (_name_validate, 'Name must be unique !!', ['Name']), (_unique_tin, 'TIN must be unique !!', ['TIN']), (_spl_name, 'Special Character Not Allowed!', ['']), ]
res = {} if logistic_company_id: logistic_company = self.pool.get('logistic.company').browse(cr, uid, logistic_company_id, context=context) res = {'value': {'ship_company_code': logistic_company.ship_company_code,'sale_account_id':logistic_company.ship_account_id.id}} return res def _get_logis_company(self, cr, uid, context=None): if context is None: context = {} user_rec = self.pool.get('res.users').browse(cr ,uid, uid, context) logis_company = self.pool.get('logistic.company').search(cr, uid, []) return logis_company and logis_company[0] or False _columns= { 'logis_company': fields.many2one('logistic.company', 'Logistic Company', help='Name of the Logistics company providing the shipper services.'), 'ship_company_code': fields.selection(_get_company_code, 'Ship Company', method=True, size=64), 'rate_selection': fields.selection([('rate_card', 'Rate Card'), ('rate_request', 'Rate Request')], 'Ship Rate Method'), 'partner_order_id': fields.many2one('res.partner', 'Ordering Contact', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The name and address of the contact who requested the order or quotation."), } _defaults = { 'partner_order_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['order_contact'])['order_contact'], 'logis_company': _get_logis_company, } def onchange_partner_id(self, cr, uid, ids, part, context=None): addr = {} if part: addr = super(sale_order, self).onchange_partner_id(cr, uid, ids, part, context) addr['value'].update({'partner_order_id': part}) return addr
class openupgrade_analysis_wizard(TransientModel): _name = 'openupgrade.analysis.wizard' _description = 'OpenUpgrade Analysis Wizard' _columns = { 'server_config': fields.many2one('openupgrade.comparison.config', 'Configuration', required=True), 'state': fields.selection([('init', 'Init'), ('ready', 'Ready')], 'State', readonly=True), 'log': fields.text('Log'), 'write_files': fields.boolean('Write files', help='Write analysis files to the module directories'), } _defaults = { 'state': 'init', 'write_files': True, } def get_communication(self, cr, uid, ids, context=None): """ Retrieve both sets of database representations, perform the comparison and register the resulting change set """ if context is None: context = {} def write_file(module, version, contents, filename='openupgrade_analysis.txt'): module_path = get_module_path(module) if not module_path: return "ERROR: could not find module path:\n" full_path = os.path.join(module_path, 'migrations', version) if not os.path.exists(full_path): try: os.makedirs(full_path) except os.error: return "ERROR: could not create migrations directory:\n" logfile = os.path.join(full_path, filename) try: f = open(logfile, 'w') except Exception: return "ERROR: could not open file %s for writing:\n" % logfile f.write(contents) f.close() return None wizard = self.browse(cr, uid, ids[0], context=context) # Retrieve connection and access methods conf_obj = self.pool.get('openupgrade.comparison.config') connection = conf_obj.get_connection(cr, uid, [wizard.server_config.id], context=context) remote_record_obj = connection.get_model('openupgrade.record') local_record_obj = self.pool.get('openupgrade.record') # Retrieve field representations and compare remote_records = remote_record_obj.field_dump(context) local_records = local_record_obj.field_dump(cr, uid, context) res = compare.compare_sets(remote_records, local_records) # Retrieve xml id representations and compare fields = ['module', 'model', 'name'] local_xml_record_ids = local_record_obj.search( cr, uid, [('type', '=', 'xmlid')]) remote_xml_record_ids = remote_record_obj.search([('type', '=', 'xmlid')]) local_xml_records = [ dict([(field, x[field]) for field in fields]) for x in local_record_obj.read( cr, uid, local_xml_record_ids, fields) ] remote_xml_records = [ dict([(field, x[field]) for field in fields]) for x in remote_record_obj.read(remote_xml_record_ids, fields) ] res_xml = compare.compare_xml_sets(remote_xml_records, local_xml_records) affected_modules = list( set(record['module'] for record in remote_records + local_records + remote_xml_records + local_xml_records)) # reorder and output the result keys = ['general'] + affected_modules module_obj = self.pool.get('ir.module.module') module_ids = module_obj.search(cr, uid, [('state', '=', 'installed')]) modules = dict([(x['name'], x) for x in module_obj.read(cr, uid, module_ids)]) general = '' for key in keys: contents = "---Fields in module '%s'---\n" % key if key in res: contents += '\n'.join( [unicode(line) for line in sorted(res[key])]) if res[key]: contents += '\n' contents += "---XML records in module '%s'---\n" % key if key in res_xml: contents += '\n'.join([unicode(line) for line in res_xml[key]]) if res_xml[key]: contents += '\n' if key not in res and key not in res_xml: contents += '-- nothing has changed in this module' if key == 'general': general += contents continue if key not in modules: general += ( "ERROR: module not in list of installed modules:\n" + contents) continue if wizard.write_files: error = write_file(key, modules[key]['installed_version'], contents) if error: general += error general += contents else: general += contents # Store the general log in as many places as possible ;-) if wizard.write_files and 'base' in modules: write_file('base', modules['base']['installed_version'], general, 'openupgrade_general_log.txt') self.pool.get('openupgrade.comparison.config').write( cr, uid, wizard.server_config.id, {'last_log': general}) self.write(cr, uid, ids, {'state': 'ready', 'log': general}) result = { 'name': self._description, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'openupgrade.analysis.wizard', 'domain': [], 'context': context, 'type': 'ir.actions.act_window', # 'target': 'new', 'res_id': ids[0], } return result
finally: image_file.close() else: self._logo_image = base64.encodestring(im.read()) return self._logo_image def _get_image_fn(self, cr, uid, ids, name, args, context=None): image = self._get_image(cr, uid, context) return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all _columns = { 'printer_ids':fields.one2many('aeroo.printers.temp', 'install_id', 'Printers'), 'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True), 'state':fields.selection([ ('init','Init'), ('done','Done'), ],'State', select=True, readonly=True), } def default_get(self, cr, uid, fields, context=None): printers_obj = self.pool.get('aeroo.printers') data = super(aeroo_printer_installer, self).default_get(cr, uid, fields, context=context) conn = cups.Connection() printers = conn.getPrinters() installed_ids = printers_obj.search(cr, 1, ['|',('active','=',False),('active','=',True)], context=context) printers_installed = printers_obj.read(cr, uid, installed_ids, context=context) new_printers = list(set(printers.keys()).difference(set(map(lambda p: p['code'], printers_installed)))) data['printer_ids'] = [] for p in printers_installed:
class hr_expense(osv.osv): _inherit = 'hr.expense.expense' _columns = { 'period_id': fields.many2one('account.period', 'Force Period'), 'state': fields.selection([ ('draft', 'New'), ('cancelled', 'Refused'), ('confirm', 'Waiting Approval'), ('accepted', 'Approved'), ('done', 'Waiting Payment'), ('paid', 'Paid'), ], 'Status', readonly=True, track_visibility='onchange', help='When the expense request is created the status is \'Draft\'.\n It is confirmed by the user and request is sent to admin, the status is \'Waiting Confirmation\'.\ \nIf the admin accepts it, the status is \'Accepted\'.\n If the accounting entries are made for the expense request, the status is \'Waiting Payment\'.'), } def action_register_payment(self, cr, uid, ids, context=None): expense_id = ids and ids[0] or False return { 'name': _('Expense Payment'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'hr.expense.payment', 'type': 'ir.actions.act_window', 'nodestroy': True, 'target': 'new', 'context': { 'expense_id': expense_id, }, } def action_receipt_create(self, cr, uid, ids, context=None): ''' main function that is called when trying to create the accounting entries related to an expense ''' move_obj = self.pool.get('account.move') for exp in self.browse(cr, uid, ids, context=context): if not exp.employee_id.account_payable_id.id: raise osv.except_osv(_('Error!'), _('The employee must have a payable account set on his profile.')) company_currency = exp.company_id.currency_id.id diff_currency_p = exp.currency_id.id <> company_currency #create the move that will contain the accounting entries cooked_data = self.account_move_get(cr, uid, exp.id, context=context) cooked_data['period_id'] = exp.period_id.id move_id = move_obj.create(cr, uid, cooked_data, context=context) #one account.move.line per expense line (+taxes..) eml = self.move_line_get(cr, uid, exp.id, context=context) #create one more move line, a counterline for the total on payable account total, total_currency, eml = self.compute_expense_totals(cr, uid, exp, company_currency, exp.name, eml, context=context) acc = exp.employee_id.account_payable_id.id eml.append({ 'type': 'dest', 'name': '/', 'price': total, 'account_id': acc, 'date_maturity': exp.date_confirm, 'amount_currency': diff_currency_p and total_currency or False, 'currency_id': diff_currency_p and exp.currency_id.id or False, 'ref': exp.name }) #convert eml into an osv-valid format lines = map(lambda x:(0,0,self.line_get_convert(cr, uid, x, exp.employee_id.address_home_id, exp.date_confirm, context=context)), eml) move_obj.write(cr, uid, [move_id], {'line_id': lines}, context=context) self.write(cr, uid, ids, {'account_move_id': move_id, 'state': 'done'}, context=context) return True def move_line_get_item(self, cr, uid, line, context=None): company = line.expense_id.company_id property_obj = self.pool.get('ir.property') acc = line.expense_account_id employee_id = line.expense_id.employee_id return { 'type':'src', 'name': line.name.split('\n')[0][:64], 'price_unit':line.unit_amount, 'quantity':line.unit_quantity, 'price':line.total_amount, 'account_id':acc.id, 'product_id':line.product_id.id, 'uos_id':line.uom_id.id, 'account_analytic_id':line.analytic_account.id, }
kip_ids=",".join(map(str,ids)) cr.execute("Select payment_import_id id from kderp_import_payment_line where payment_import_id in (%s) and state='draft'" % (kip_ids)) list_error=[] for new_id in cr.fetchall(): list_error.append(new_id[0]) for id in ids: if id not in list_error: self.write(cr, uid, [id], {'state':'done'}) return True _columns={ 'date':fields.date('Date', required=True, states={'done':[('readonly',True)]}, help="Date of Accounting Import Payment to Supplier to ERP"), 'name':fields.char('Code Import',size=32,required=True,select=True,states={'done':[('readonly',True)]}), 'description':fields.char('Desc.',size=128,states={'done':[('readonly',True)]}), 'import_line':fields.one2many('kderp.import.payment.line','payment_import_id','Details',states={'done':[('readonly',True)]}), 'state':fields.selection([('draft','Draft'),('done','Done')],'State',readonly=True,select=True) } _sql_constraints = [ ('supplier_payment_import_unique',"unique(name)","KDERP Error: The Code Import must be unique !") ] _defaults = { 'state': 'draft', 'date': lambda *a: time.strftime('%Y-%m-%d'), 'name':lambda *a: time.strftime('AISP-%Y%b%d.%H%M') } def load(self, cr, uid, fields, data, context=None): #def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None): try: payment_id_pos = fields.index('import_line/payment_number') except:
class purchase_report(osv.osv): _name = "purchase.report" _description = "Purchases Orders" _auto = False _columns = { 'date': fields.datetime('Order Date', readonly=True, help="Date on which this document has been created" ), # TDE FIXME master: rename into date_order 'state': fields.selection([('draft', 'Request for Quotation'), ('confirmed', 'Waiting Vendor Ack'), ('approved', 'Approved'), ('except_picking', 'Shipping Exception'), ('except_invoice', 'Invoice Exception'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Order Status', readonly=True), 'product_id': fields.many2one('product.product', 'Product', readonly=True), 'picking_type_id': fields.many2one('stock.warehouse', 'Warehouse', readonly=True), 'location_id': fields.many2one('stock.location', 'Destination', readonly=True), 'partner_id': fields.many2one('res.partner', 'Vendor', readonly=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True), 'date_approve': fields.date('Date Approved', readonly=True), 'expected_date': fields.date('Expected Date', readonly=True), 'validator': fields.many2one('res.users', 'Validated By', readonly=True), 'product_uom': fields.many2one('product.uom', 'Reference Unit of Measure', required=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'user_id': fields.many2one('res.users', 'Responsible', readonly=True), 'delay': fields.float('Days to Validate', digits=(16, 2), readonly=True), 'delay_pass': fields.float('Days to Deliver', digits=(16, 2), readonly=True), 'quantity': fields.float( 'Product Quantity', readonly=True), # TDE FIXME master: rename into unit_quantity 'price_total': fields.float('Total Price', readonly=True), 'price_average': fields.float('Average Price', readonly=True, group_operator="avg"), 'negociation': fields.float('Purchase-Standard Price', readonly=True, group_operator="avg"), 'price_standard': fields.float('Products Value', readonly=True, group_operator="sum"), 'nbr': fields.integer( '# of Lines', readonly=True), # TDE FIXME master: rename into nbr_lines 'category_id': fields.many2one('product.category', 'Product Category', readonly=True), 'product_tmpl_id': fields.many2one('product.template', 'Product Template', readonly=True), 'country_id': fields.many2one('res.country', 'Partner Country', readonly=True), 'fiscal_position_id': fields.many2one('account.fiscal.position', string='Fiscal Position', oldname='fiscal_position', readonly=True), 'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True), 'commercial_partner_id': fields.many2one('res.partner', 'Commercial Entity', readonly=True), } _order = 'date desc, price_total desc' def init(self, cr): tools.sql.drop_view_if_exists(cr, 'purchase_report') cr.execute(""" create or replace view purchase_report as ( select min(l.id) as id, s.date_order as date, l.state, s.date_approve, s.minimum_planned_date as expected_date, s.dest_address_id, s.pricelist_id, s.validator, spt.warehouse_id as picking_type_id, s.partner_id as partner_id, s.create_uid as user_id, s.company_id as company_id, s.fiscal_position_id as fiscal_position_id, l.product_id, p.product_tmpl_id, t.categ_id as category_id, t.uom_id as product_uom, s.location_id as location_id, sum(l.product_qty/u.factor*u2.factor) as quantity, extract(epoch from age(s.date_approve,s.date_order))/(24*60*60)::decimal(16,2) as delay, extract(epoch from age(l.date_planned,s.date_order))/(24*60*60)::decimal(16,2) as delay_pass, count(*) as nbr, sum(l.price_unit*l.product_qty)::decimal(16,2) as price_total, avg(100.0 * (l.price_unit*l.product_qty) / NULLIF(ip.value_float*l.product_qty/u.factor*u2.factor, 0.0))::decimal(16,2) as negociation, sum(ip.value_float*l.product_qty/u.factor*u2.factor)::decimal(16,2) as price_standard, (sum(l.product_qty*l.price_unit)/NULLIF(sum(l.product_qty/u.factor*u2.factor),0.0))::decimal(16,2) as price_average, partner.country_id as country_id, partner.commercial_partner_id as commercial_partner_id, analytic_account.id as account_analytic_id from purchase_order_line l join purchase_order s on (l.order_id=s.id) join res_partner partner on s.partner_id = partner.id left join product_product p on (l.product_id=p.id) left join product_template t on (p.product_tmpl_id=t.id) LEFT JOIN ir_property ip ON (ip.name='standard_price' AND ip.res_id=CONCAT('product.template,',t.id) AND ip.company_id=s.company_id) left join product_uom u on (u.id=l.product_uom) left join product_uom u2 on (u2.id=t.uom_id) left join stock_picking_type spt on (spt.id=s.picking_type_id) left join account_analytic_account analytic_account on (l.account_analytic_id = analytic_account.id) group by s.company_id, s.create_uid, s.partner_id, u.factor, s.location_id, l.price_unit, s.date_approve, l.date_planned, l.product_uom, s.minimum_planned_date, s.pricelist_id, s.validator, s.dest_address_id, s.fiscal_position_id, l.product_id, p.product_tmpl_id, t.categ_id, s.date_order, l.state, spt.warehouse_id, u.uom_type, u.category_id, t.uom_id, u.id, u2.factor, partner.country_id, partner.commercial_partner_id, analytic_account.id ) """)
# GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import time from openerp.osv import orm, fields from openerp.addons import decimal_precision as dp from .res_company import COMPANY_FISCAL_TYPE, COMPANY_FISCAL_TYPE_DEFAULT FISCAL_RULE_COLUMNS = { "partner_fiscal_type_id": fields.many2one("l10n_br_account.partner.fiscal.type", "Tipo Fiscal do Parceiro"), "fiscal_category_id": fields.many2one("l10n_br_account.fiscal.category", "Categoria"), "fiscal_type": fields.selection(COMPANY_FISCAL_TYPE, u"Regime Tributário", required=True), "revenue_start": fields.float( "Faturamento Inicial", digits_compute=dp.get_precision("Account"), help="Faixa inicial de faturamento bruto" ), "revenue_end": fields.float( "Faturamento Final", digits_compute=dp.get_precision("Account"), help="Faixa inicial de faturamento bruto" ), } OTHERS_FISCAL_RULE_COLUMNS_TEMPLATE = { "parent_id": fields.many2one("account.fiscal.position.rule.template", "Regra Pai"), "child_ids": fields.one2many("account.fiscal.position.rule.template", "parent_id", "Regras Filhas"), } OTHERS_FISCAL_RULE_COLUMNS = { "parent_id": fields.many2one("account.fiscal.position.rule", "Regra Pai"),
class product_product(osv.osv): _inherit = "product.product" def _stock_move_count(self, cr, uid, ids, field_name, arg, context=None): res = dict([(id, { 'reception_count': 0, 'delivery_count': 0 }) for id in ids]) move_pool = self.pool.get('stock.move') moves = move_pool.read_group(cr, uid, [('product_id', 'in', ids), ('picking_id.type', '=', 'in'), ('state', 'in', ('confirmed', 'assigned', 'pending'))], ['product_id'], ['product_id']) for move in moves: product_id = move['product_id'][0] res[product_id]['reception_count'] = move['product_id_count'] moves = move_pool.read_group(cr, uid, [('product_id', 'in', ids), ('picking_id.type', '=', 'out'), ('state', 'in', ('confirmed', 'assigned', 'pending'))], ['product_id'], ['product_id']) for move in moves: product_id = move['product_id'][0] res[product_id]['delivery_count'] = move['product_id_count'] return res def get_product_accounts(self, cr, uid, product_id, context=None): """ To get the stock input account, stock output account and stock journal related to product. @param product_id: product id @return: dictionary which contains information regarding stock input account, stock output account and stock journal """ if context is None: context = {} product_obj = self.pool.get('product.product').browse(cr, uid, product_id, context=context) stock_input_acc = product_obj.property_stock_account_input and product_obj.property_stock_account_input.id or False if not stock_input_acc: stock_input_acc = product_obj.categ_id.property_stock_account_input_categ and product_obj.categ_id.property_stock_account_input_categ.id or False stock_output_acc = product_obj.property_stock_account_output and product_obj.property_stock_account_output.id or False if not stock_output_acc: stock_output_acc = product_obj.categ_id.property_stock_account_output_categ and product_obj.categ_id.property_stock_account_output_categ.id or False journal_id = product_obj.categ_id.property_stock_journal and product_obj.categ_id.property_stock_journal.id or False account_valuation = product_obj.categ_id.property_stock_valuation_account_id and product_obj.categ_id.property_stock_valuation_account_id.id or False return { 'stock_account_input': stock_input_acc, 'stock_account_output': stock_output_acc, 'stock_journal': journal_id, 'property_stock_valuation_account_id': account_valuation } def do_change_standard_price(self, cr, uid, ids, datas, context=None): """ Changes the Standard Price of Product and creates an account move accordingly. @param datas : dict. contain default datas like new_price, stock_output_account, stock_input_account, stock_journal @param context: A standard dictionary @return: """ location_obj = self.pool.get('stock.location') move_obj = self.pool.get('account.move') move_line_obj = self.pool.get('account.move.line') if context is None: context = {} new_price = datas.get('new_price', 0.0) stock_output_acc = datas.get('stock_output_account', False) stock_input_acc = datas.get('stock_input_account', False) journal_id = datas.get('stock_journal', False) move_ids = [] loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal')]) for product in self.browse(cr, uid, ids, context=context): if product.valuation != 'real_time': continue account_valuation = product.categ_id.property_stock_valuation_account_id account_valuation_id = account_valuation and account_valuation.id or False if not account_valuation_id: raise osv.except_osv( _('Error!'), _('Specify valuation Account for Product Category: %s.') % (product.categ_id.name)) for location in location_obj.browse(cr, uid, loc_ids, context=context): c = context.copy() c.update({'location': location.id, 'compute_child': False}) qty = product.qty_available diff = product.standard_price - new_price if not diff: raise osv.except_osv( _('Error!'), _("No difference between standard price and new price!" )) if qty: company_id = location.company_id and location.company_id.id or False if not company_id: raise osv.except_osv( _('Error!'), _('Please specify company in Location.')) # # Accounting Entries # if not journal_id: journal_id = product.categ_id.property_stock_journal and product.categ_id.property_stock_journal.id or False if not journal_id: raise osv.except_osv(_('Error!'), _('Please define journal '\ 'on the product category: "%s" (id: %d).') % \ (product.categ_id.name, product.categ_id.id,)) move_id = move_obj.create(cr, uid, { 'journal_id': journal_id, 'company_id': company_id }) move_ids.append(move_id) if diff > 0: if not stock_input_acc: stock_input_acc = product.\ property_stock_account_input.id if not stock_input_acc: stock_input_acc = product.categ_id.\ property_stock_account_input_categ.id if not stock_input_acc: raise osv.except_osv(_('Error!'), _('Please define stock input account ' \ 'for this product: "%s" (id: %d).') % \ (product.name, product.id,)) amount_diff = qty * diff move_line_obj.create( cr, uid, { 'name': product.name, 'account_id': stock_input_acc, 'debit': amount_diff, 'move_id': move_id, }) move_line_obj.create( cr, uid, { 'name': product.categ_id.name, 'account_id': account_valuation_id, 'credit': amount_diff, 'move_id': move_id }) elif diff < 0: if not stock_output_acc: stock_output_acc = product.\ property_stock_account_output.id if not stock_output_acc: stock_output_acc = product.categ_id.\ property_stock_account_output_categ.id if not stock_output_acc: raise osv.except_osv(_('Error!'), _('Please define stock output account ' \ 'for this product: "%s" (id: %d).') % \ (product.name, product.id,)) amount_diff = qty * -diff move_line_obj.create( cr, uid, { 'name': product.name, 'account_id': stock_output_acc, 'credit': amount_diff, 'move_id': move_id }) move_line_obj.create( cr, uid, { 'name': product.categ_id.name, 'account_id': account_valuation_id, 'debit': amount_diff, 'move_id': move_id }) self.write(cr, uid, ids, {'standard_price': new_price}) return move_ids def view_header_get(self, cr, user, view_id, view_type, context=None): if context is None: context = {} res = super(product_product, self).view_header_get(cr, user, view_id, view_type, context) if res: return res if (context.get('active_id', False)) and (context.get('active_model') == 'stock.location'): return _('Products: ') + self.pool.get('stock.location').browse( cr, user, context['active_id'], context).name return res def get_product_available(self, cr, uid, ids, context=None): """ Finds whether product is available or not in particular warehouse. @return: Dictionary of values """ if context is None: context = {} location_obj = self.pool.get('stock.location') warehouse_obj = self.pool.get('stock.warehouse') shop_obj = self.pool.get('sale.shop') states = context.get('states', []) what = context.get('what', ()) if not ids: ids = self.search(cr, uid, []) res = {}.fromkeys(ids, 0.0) if not ids: return res if context.get('shop', False): warehouse_id = shop_obj.read(cr, uid, int(context['shop']), ['warehouse_id'])['warehouse_id'][0] if warehouse_id: context['warehouse'] = warehouse_id if context.get('warehouse', False): lot_id = warehouse_obj.read(cr, uid, int(context['warehouse']), ['lot_stock_id'])['lot_stock_id'][0] if lot_id: context['location'] = lot_id if context.get('location', False): if type(context['location']) == type(1): location_ids = [context['location']] elif type(context['location']) in (type(''), type(u'')): location_ids = location_obj.search( cr, uid, [('name', 'ilike', context['location'])], context=context) else: location_ids = context['location'] else: location_ids = [] wids = warehouse_obj.search(cr, uid, [], context=context) if not wids: return res for w in warehouse_obj.browse(cr, uid, wids, context=context): location_ids.append(w.lot_stock_id.id) # build the list of ids of children of the location given by id if context.get('compute_child', True): child_location_ids = location_obj.search( cr, uid, [('location_id', 'child_of', location_ids)]) location_ids = child_location_ids or location_ids # this will be a dictionary of the product UoM by product id product2uom = {} uom_ids = [] for product in self.read(cr, uid, ids, ['uom_id'], context=context): product2uom[product['id']] = product['uom_id'][0] uom_ids.append(product['uom_id'][0]) # this will be a dictionary of the UoM resources we need for conversion purposes, by UoM id uoms_o = {} for uom in self.pool.get('product.uom').browse(cr, uid, uom_ids, context=context): uoms_o[uom.id] = uom results = [] results2 = [] from_date = context.get('from_date', False) to_date = context.get('to_date', False) date_str = False date_values = False where = [ tuple(location_ids), tuple(location_ids), tuple(ids), tuple(states) ] if from_date and to_date: date_str = "date>=%s and date<=%s" where.append(tuple([from_date])) where.append(tuple([to_date])) elif from_date: date_str = "date>=%s" date_values = [from_date] elif to_date: date_str = "date<=%s" date_values = [to_date] if date_values: where.append(tuple(date_values)) prodlot_id = context.get('prodlot_id', False) prodlot_clause = '' if prodlot_id: prodlot_clause = ' and prodlot_id = %s ' where += [prodlot_id] # TODO: perhaps merge in one query. if 'in' in what: # all moves from a location out of the set to a location in the set cr.execute( 'select sum(product_qty), product_id, product_uom '\ 'from stock_move '\ 'where location_id NOT IN %s '\ 'and location_dest_id IN %s '\ 'and product_id IN %s '\ 'and state IN %s ' + (date_str and 'and '+date_str+' ' or '') +' '\ + prodlot_clause + 'group by product_id,product_uom',tuple(where)) results = cr.fetchall() if 'out' in what: # all moves from a location in the set to a location out of the set cr.execute( 'select sum(product_qty), product_id, product_uom '\ 'from stock_move '\ 'where location_id IN %s '\ 'and location_dest_id NOT IN %s '\ 'and product_id IN %s '\ 'and state in %s ' + (date_str and 'and '+date_str+' ' or '') + ' '\ + prodlot_clause + 'group by product_id,product_uom',tuple(where)) results2 = cr.fetchall() # Get the missing UoM resources uom_obj = self.pool.get('product.uom') uoms = map(lambda x: x[2], results) + map(lambda x: x[2], results2) if context.get('uom', False): uoms += [context['uom']] uoms = filter(lambda x: x not in uoms_o.keys(), uoms) if uoms: uoms = uom_obj.browse(cr, uid, list(set(uoms)), context=context) for o in uoms: uoms_o[o.id] = o #TOCHECK: before change uom of product, stock move line are in old uom. context.update({'raise-exception': False}) # Count the incoming quantities for amount, prod_id, prod_uom in results: amount = uom_obj._compute_qty_obj(cr, uid, uoms_o[prod_uom], amount, uoms_o[context.get('uom', False) or product2uom[prod_id]], context=context) res[prod_id] += amount # Count the outgoing quantities for amount, prod_id, prod_uom in results2: amount = uom_obj._compute_qty_obj(cr, uid, uoms_o[prod_uom], amount, uoms_o[context.get('uom', False) or product2uom[prod_id]], context=context) res[prod_id] -= amount return res def _product_available(self, cr, uid, ids, field_names=None, arg=False, context=None): """ Finds the incoming and outgoing quantity of product. @return: Dictionary of values """ if not field_names: field_names = [] if context is None: context = {} res = {} for id in ids: res[id] = {}.fromkeys(field_names, 0.0) for f in field_names: c = context.copy() if f == 'qty_available': c.update({'states': ('done', ), 'what': ('in', 'out')}) if f == 'virtual_available': c.update({ 'states': ('confirmed', 'waiting', 'assigned', 'done'), 'what': ('in', 'out') }) if f == 'incoming_qty': c.update({ 'states': ('confirmed', 'waiting', 'assigned'), 'what': ('in', ) }) if f == 'outgoing_qty': c.update({ 'states': ('confirmed', 'waiting', 'assigned'), 'what': ('out', ) }) stock = self.get_product_available(cr, uid, ids, context=c) for id in ids: res[id][f] = stock.get(id, 0.0) return res _columns = { 'reception_count': fields.function(_stock_move_count, string="Reception", type='integer', multi='pickings'), 'delivery_count': fields.function(_stock_move_count, string="Delivery", type='integer', multi='pickings'), 'qty_available': fields.function(_product_available, multi='qty_available', type='float', digits_compute=dp.get_precision('Product Unit of Measure'), string='Quantity On Hand', help="Current quantity of products.\n" "In a context with a single Stock Location, this includes " "goods stored at this Location, or any of its children.\n" "In a context with a single Warehouse, this includes " "goods stored in the Stock Location of this Warehouse, or any " "of its children.\n" "In a context with a single Shop, this includes goods " "stored in the Stock Location of the Warehouse of this Shop, " "or any of its children.\n" "Otherwise, this includes goods stored in any Stock Location " "with 'internal' type."), 'virtual_available': fields.function(_product_available, multi='qty_available', type='float', digits_compute=dp.get_precision('Product Unit of Measure'), string='Forecasted Quantity', help="Forecast quantity (computed as Quantity On Hand " "- Outgoing + Incoming)\n" "In a context with a single Stock Location, this includes " "goods stored in this location, or any of its children.\n" "In a context with a single Warehouse, this includes " "goods stored in the Stock Location of this Warehouse, or any " "of its children.\n" "In a context with a single Shop, this includes goods " "stored in the Stock Location of the Warehouse of this Shop, " "or any of its children.\n" "Otherwise, this includes goods stored in any Stock Location " "with 'internal' type."), 'incoming_qty': fields.function(_product_available, multi='qty_available', type='float', digits_compute=dp.get_precision('Product Unit of Measure'), string='Incoming', help="Quantity of products that are planned to arrive.\n" "In a context with a single Stock Location, this includes " "goods arriving to this Location, or any of its children.\n" "In a context with a single Warehouse, this includes " "goods arriving to the Stock Location of this Warehouse, or " "any of its children.\n" "In a context with a single Shop, this includes goods " "arriving to the Stock Location of the Warehouse of this " "Shop, or any of its children.\n" "Otherwise, this includes goods arriving to any Stock " "Location with 'internal' type."), 'outgoing_qty': fields.function(_product_available, multi='qty_available', type='float', digits_compute=dp.get_precision('Product Unit of Measure'), string='Outgoing', help="Quantity of products that are planned to leave.\n" "In a context with a single Stock Location, this includes " "goods leaving this Location, or any of its children.\n" "In a context with a single Warehouse, this includes " "goods leaving the Stock Location of this Warehouse, or " "any of its children.\n" "In a context with a single Shop, this includes goods " "leaving the Stock Location of the Warehouse of this " "Shop, or any of its children.\n" "Otherwise, this includes goods leaving any Stock " "Location with 'internal' type."), 'track_production': fields.boolean('Track Manufacturing Lots', help="Forces to specify a Serial Number for all moves containing this product and generated by a Manufacturing Order"), 'track_incoming': fields.boolean('Track Incoming Lots', help="Forces to specify a Serial Number for all moves containing this product and coming from a Supplier Location"), 'track_outgoing': fields.boolean('Track Outgoing Lots', help="Forces to specify a Serial Number for all moves containing this product and going to a Customer Location"), 'location_id': fields.dummy(string='Location', relation='stock.location', type='many2one'), 'warehouse_id': fields.dummy(string='Warehouse', relation='stock.warehouse', type='many2one'), 'valuation':fields.selection([('manual_periodic', 'Periodical (manual)'), ('real_time','Real Time (automated)'),], 'Inventory Valuation', help="If real-time valuation is enabled for a product, the system will automatically write journal entries corresponding to stock moves." \ "The inventory variation account set on the product category will represent the current inventory value, and the stock input and stock output account will hold the counterpart moves for incoming and outgoing products." , required=True), } _defaults = { 'valuation': 'manual_periodic', } def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(product_product, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu) if context is None: context = {} if ('location' in context) and context['location']: location_info = self.pool.get('stock.location').browse( cr, uid, context['location']) fields = res.get('fields', {}) if fields: if location_info.usage == 'supplier': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future Receptions') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _( 'Received Qty') if location_info.usage == 'internal': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future Stock') if location_info.usage == 'customer': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future Deliveries') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _( 'Delivered Qty') if location_info.usage == 'inventory': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future P&L') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _('P&L Qty') if location_info.usage == 'procurement': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future Qty') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _( 'Unplanned Qty') if location_info.usage == 'production': if fields.get('virtual_available'): res['fields']['virtual_available']['string'] = _( 'Future Productions') if fields.get('qty_available'): res['fields']['qty_available']['string'] = _( 'Produced Qty') return res
class account_voucher(osv.Model): _inherit = 'account.voucher' _columns = { 'name':fields.char('Memo', size=256), 'log_ref': fields.char('Check-log Ref', size=128), <<<<<<< HEAD # 'origin' : fields.char('Origin', size=128), 'check_status' :fields.selection([('void','Voided'),('print','Printed'),('re_print','Re-Printed'),('clear','Cleared')]), 'chk_seq': fields.char("Check Number", size=64, readonly=True), 'invoice_ids': fields.one2many('account.invoice', 'voucher_id', 'Invoices', ondelete='cascade'), 'jtype':fields.related('journal_id','type', string="Journal Type", type='selection', selection=[('sale', 'Sale'),('sale_refund','Sale Refund'), ('purchase', 'Purchase'), ('purchase_refund','Purchase Refund'), ('cash', 'Cash'), ('bank', 'Bank and Checks'), ('general', 'General'), ('situation', 'Opening/Closing Situation')],), } ======= # 'origin' : fields.char('Origin', size=128), 'check_status' :fields.selection([('void','Voided'),('print','Printed'),('re_print','Re-Printed'),('clear','Cleared')]), 'chk_seq': fields.char("Check Number", size=64, readonly=True), 'invoice_ids': fields.one2many('account.invoice', 'voucher_id', 'Invoices', ondelete='cascade'), 'reference': fields.char('Origin', size=64, readonly=True, states={'draft':[('readonly',False)]}, help="Source document which generated the payment."), 'jtype':fields.related('journal_id','type', string="Journal Type", type='selection', selection=[('sale', 'Sale'),('sale_refund','Sale Refund'), ('purchase', 'Purchase'), ('purchase_refund','Purchase Refund'), ('cash', 'Cash'), ('bank', 'Bank and Checks'), ('general', 'General'), ('situation', 'Opening/Closing Situation')],), } >>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d def print_checks(self, cr, uid, ids, context=None): check_state = self.browse(cr, uid, ids[0],context=None).check_status view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_check_writing', 'view_account_check_write') view_id = view_ref and view_ref[1] or False, context.update({'active_ids':ids, 'check_state': check_state}) return { 'type': 'ir.actions.act_window', 'name': 'Print Checks',
class int_candidature(orm.Model): _name = 'int.candidature' _description = "Internal Candidature" _inherit = ['abstract.candidature'] _mandate_model = 'int.mandate' _selection_committee_model = 'int.selection.committee' _init_mandate_columns = list(abstract_candidature._init_mandate_columns) _init_mandate_columns.extend(['int_assembly_id', 'months_before_end_of_mandate']) _allowed_inactive_link_models = [_selection_committee_model] _mandate_form_view = 'int_mandate_form_view' _unique_id_sequence = 0 _mandate_category_store_trigger = { 'int.candidature': (lambda self, cr, uid, ids, context=None: ids, ['selection_committee_id'], 20), _selection_committee_model: (lambda self, cr, uid, ids, context=None: self.pool.get('int.candidature').search( cr, uid, [('selection_committee_id', 'in', ids)], context=context), ['mandate_category_id'], 20), } _int_assembly_store_trigger = { 'int.candidature': (lambda self, cr, uid, ids, context=None: ids, ['selection_committee_id'], 20), _selection_committee_model: (lambda self, cr, uid, ids, context=None: self.pool.get('int.candidature').search( cr, uid, [('selection_committee_id', 'in', ids)], context=context), ['int_assembly_id'], 20), } _designation_assembly_store_trigger = { 'int.candidature': (lambda self, cr, uid, ids, context=None: ids, ['selection_committee_id'], 20), _selection_committee_model: (lambda self, cr, uid, ids, context=None: self.pool.get('int.candidature').search( cr, uid, [('selection_committee_id', 'in', ids)], context=context), ['designation_int_assembly_id'], 20), } _mandate_start_date_store_trigger = { 'int.candidature': (lambda self, cr, uid, ids, context=None: ids, ['selection_committee_id'], 20), _selection_committee_model: (lambda self, cr, uid, ids, context=None: self.pool.get('int.candidature').search( cr, uid, [('selection_committee_id', 'in', ids)], context=context), ['mandate_start_date'], 20), } _columns = { 'state': fields.selection(CANDIDATURE_AVAILABLE_STATES, 'Status', readonly=True, track_visibility='onchange',), 'selection_committee_id': fields.many2one(_selection_committee_model, string='Selection Committee', required=True, select=True, track_visibility='onchange'), 'mandate_category_id': fields.related( 'selection_committee_id', 'mandate_category_id', string='Mandate Category', type='many2one', relation="mandate.category", store=_mandate_category_store_trigger, domain=[('type', '=', 'int')]), 'mandate_start_date': fields.related( 'selection_committee_id', 'mandate_start_date', string='Mandate Start Date', type='date', store=_mandate_start_date_store_trigger ), 'int_assembly_id': fields.related('selection_committee_id', 'assembly_id', string='Internal Assembly', type='many2one', relation="int.assembly", store=_int_assembly_store_trigger), 'designation_int_assembly_id': fields.related( 'selection_committee_id', 'designation_int_assembly_id', string='Designation Assembly', type='many2one', relation="int.assembly", store=_designation_assembly_store_trigger), 'months_before_end_of_mandate': fields.related( 'int_assembly_id', 'months_before_end_of_mandate', string='Alert Delay (#Months)', type='integer', relation="int.assembly", store=False), 'mandate_ids': fields.one2many(_mandate_model, 'candidature_id', 'Internal Mandates', domain=[('active', '<=', True)]), } _order = 'int_assembly_id, mandate_start_date, mandate_category_id, \ partner_name' # view methods: onchange, button def onchange_selection_committee_id(self, cr, uid, ids, selection_committee_id, context=None): res = {} selection_committee = self.pool.get( self._selection_committee_model).browse( cr, uid, selection_committee_id, context) assembly = selection_committee.designation_int_assembly_id.id res['value'] = dict( int_assembly_id=selection_committee.assembly_id.id, designation_int_assembly_id=assembly, mandate_category_id=selection_committee.mandate_category_id.id) return res def button_create_mandate(self, cr, uid, ids, context=None): return super(int_candidature, self).button_create_mandate( cr, uid, ids, context=context)
# # #This program is distributed in the hope that it will be useful, # #but WITHOUT ANY WARRANTY; without even the implied warranty of # #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # #GNU Affero General Public License for more details. # # # #You should have received a copy of the GNU Affero General Public License # #along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from openerp.osv import orm, fields FISCAL_POSITION_COLUMNS = { 'fiscal_category_id': fields.many2one('l10n_br_account.fiscal.category', 'Categoria Fiscal'), 'type': fields.selection([('input', 'Entrada'), ('output', 'Saida')], 'Tipo'), 'type_tax_use': fields.selection([('sale', 'Sale'), ('purchase', 'Purchase'), ('all', 'All')], 'Tax Application'), 'inv_copy_note': fields.boolean('Copiar Observação na Nota Fiscal')} class AccountFiscalPositionTemplate(orm.Model): _inherit = 'account.fiscal.position.template' _columns = FISCAL_POSITION_COLUMNS def onchange_type(self, cr, uid, ids, type=False, context=None): type_tax = {'input': 'purhcase', 'output': 'sale'} return {'value': {'type_tax_use': type_tax.get(type, 'all'), 'tax_ids': False}}
class marketing_campaign_segment(osv.osv): _name = "marketing.campaign.segment" _description = "Campaign Segment" _order = "name" def _get_next_sync(self, cr, uid, ids, fn, args, context=None): # next auto sync date is same for all segments sync_job = self.pool.get('ir.model.data').get_object( cr, uid, 'marketing_campaign', 'ir_cron_marketing_campaign_every_day', context=context) next_sync = sync_job and sync_job.nextcall or False return dict.fromkeys(ids, next_sync) _columns = { 'name': fields.char('Name', size=64,required=True), 'campaign_id': fields.many2one('marketing.campaign', 'Campaign', required=True, select=1, ondelete="cascade"), 'object_id': fields.related('campaign_id','object_id', type='many2one', relation='ir.model', string='Resource'), 'ir_filter_id': fields.many2one('ir.filters', 'Filter', ondelete="restrict", help="Filter to select the matching resource records that belong to this segment. "\ "New filters can be created and saved using the advanced search on the list view of the Resource. "\ "If no filter is set, all records are selected without filtering. "\ "The synchronization mode may also add a criterion to the filter."), 'sync_last_date': fields.datetime('Last Synchronization', help="Date on which this segment was synchronized last time (automatically or manually)"), 'sync_mode': fields.selection([('create_date', 'Only records created after last sync'), ('write_date', 'Only records modified after last sync (no duplicates)'), ('all', 'All records (no duplicates)')], 'Synchronization mode', help="Determines an additional criterion to add to the filter when selecting new records to inject in the campaign. "\ '"No duplicates" prevents selecting records which have already entered the campaign previously.'\ 'If the campaign has a "unique field" set, "no duplicates" will also prevent selecting records which have '\ 'the same value for the unique field as other records that already entered the campaign.'), 'state': fields.selection([('draft', 'New'), ('cancelled', 'Cancelled'), ('running', 'Running'), ('done', 'Done')], 'Status',), 'date_run': fields.datetime('Launch Date', help="Initial start date of this segment."), 'date_done': fields.datetime('End Date', help="Date this segment was last closed or cancelled."), 'date_next_sync': fields.function(_get_next_sync, string='Next Synchronization', type='datetime', help="Next time the synchronization job is scheduled to run automatically"), } _defaults = { 'state': lambda *a: 'draft', 'sync_mode': lambda *a: 'create_date', } def _check_model(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context=context): if not obj.ir_filter_id: return True if obj.campaign_id.object_id.model != obj.ir_filter_id.model_id: return False return True _constraints = [ (_check_model, 'Model of filter must be same as resource model of Campaign ', ['ir_filter_id,campaign_id']), ] def onchange_campaign_id(self, cr, uid, ids, campaign_id): res = {'domain': {'ir_filter_id': []}} campaign_pool = self.pool.get('marketing.campaign') if campaign_id: campaign = campaign_pool.browse(cr, uid, campaign_id) model_name = self.pool.get('ir.model').read( cr, uid, [campaign.object_id.id], ['model']) if model_name: mod_name = model_name[0]['model'] res['domain'] = {'ir_filter_id': [('model_id', '=', mod_name)]} else: res['value'] = {'ir_filter_id': False} return res def state_running_set(self, cr, uid, ids, *args): segment = self.browse(cr, uid, ids[0]) vals = {'state': 'running'} if not segment.date_run: vals['date_run'] = time.strftime('%Y-%m-%d %H:%M:%S') self.write(cr, uid, ids, vals) return True def state_done_set(self, cr, uid, ids, *args): wi_ids = self.pool.get("marketing.campaign.workitem").search( cr, uid, [('state', '=', 'todo'), ('segment_id', 'in', ids)]) self.pool.get("marketing.campaign.workitem").write( cr, uid, wi_ids, {'state': 'cancelled'}) self.write(cr, uid, ids, { 'state': 'done', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S') }) return True def state_cancel_set(self, cr, uid, ids, *args): wi_ids = self.pool.get("marketing.campaign.workitem").search( cr, uid, [('state', '=', 'todo'), ('segment_id', 'in', ids)]) self.pool.get("marketing.campaign.workitem").write( cr, uid, wi_ids, {'state': 'cancelled'}) self.write(cr, uid, ids, { 'state': 'cancelled', 'date_done': time.strftime('%Y-%m-%d %H:%M:%S') }) return True def synchroniz(self, cr, uid, ids, *args): self.process_segment(cr, uid, ids) return True def process_segment(self, cr, uid, segment_ids=None, context=None): Workitems = self.pool.get('marketing.campaign.workitem') Campaigns = self.pool.get('marketing.campaign') if not segment_ids: segment_ids = self.search(cr, uid, [('state', '=', 'running')], context=context) action_date = time.strftime('%Y-%m-%d %H:%M:%S') campaigns = set() for segment in self.browse(cr, uid, segment_ids, context=context): if segment.campaign_id.state != 'running': continue campaigns.add(segment.campaign_id.id) act_ids = self.pool.get('marketing.campaign.activity').search( cr, uid, [('start', '=', True), ('campaign_id', '=', segment.campaign_id.id)], context=context) model_obj = self.pool[segment.object_id.model] criteria = [] if segment.sync_last_date and segment.sync_mode != 'all': criteria += [(segment.sync_mode, '>', segment.sync_last_date)] if segment.ir_filter_id: criteria += eval(segment.ir_filter_id.domain) object_ids = model_obj.search(cr, uid, criteria, context=context) # XXX TODO: rewrite this loop more efficiently without doing 1 search per record! for record in model_obj.browse(cr, uid, object_ids, context=context): # avoid duplicate workitem for the same resource if segment.sync_mode in ('write_date', 'all'): if Campaigns._find_duplicate_workitems(cr, uid, record, segment.campaign_id, context=context): continue wi_vals = { 'segment_id': segment.id, 'date': action_date, 'state': 'todo', 'res_id': record.id } partner = self.pool.get('marketing.campaign')._get_partner_for( segment.campaign_id, record) if partner: wi_vals['partner_id'] = partner.id for act_id in act_ids: wi_vals['activity_id'] = act_id Workitems.create(cr, uid, wi_vals, context=context) self.write(cr, uid, segment.id, {'sync_last_date': action_date}, context=context) Workitems.process_all(cr, uid, list(campaigns), context=context) return True
# Get rid of (None, attrs) for searchResultReference replies results = [i for i in results if i[0]] if results and len(results) >= 1: dn = results[0][0] conn = self.connect(conf) conn.simple_bind_s(dn, password) conn.unbind() entry = results[0] except ldap.INVALID_CREDENTIALS: return False except ldap.LDAPError, e: _logger.error('An LDAP exception occurred: %s', e) return entry _columns = { 'ldap_protocol':fields.selection([('ldap','ldap'),('ldaps','ldaps')], string='Protocol', help=("Select regular or secure ldap (ldaps)")), } _defaults = { 'ldap_protocol': 'ldaps', 'ldap_server_port': 636, } class res_company(osv.osv): _inherit = "res.company" _columns = { 'ldaps': fields.one2many( 'res.company.ldap', 'company', 'LDAP Parameters', copy=True, groups="base.group_system"), }
class marketing_campaign_activity(osv.osv): _name = "marketing.campaign.activity" _order = "name" _description = "Campaign Activity" _action_types = [ ('email', 'Email'), ('report', 'Report'), ('action', 'Custom Action'), # TODO implement the subcampaigns. # TODO implement the subcampaign out. disallow out transitions from # subcampaign activities ? #('subcampaign', 'Sub-Campaign'), ] _columns = { 'name': fields.char('Name', size=128, required=True), 'campaign_id': fields.many2one('marketing.campaign', 'Campaign', required=True, ondelete='cascade', select=1), 'object_id': fields.related('campaign_id', 'object_id', type='many2one', relation='ir.model', string='Object', readonly=True), 'start': fields.boolean( 'Start', help="This activity is launched when the campaign starts.", select=True), 'condition': fields.text( 'Condition', size=256, required=True, help= "Python expression to decide whether the activity can be executed, otherwise it will be deleted or cancelled." "The expression may use the following [browsable] variables:\n" " - activity: the campaign activity\n" " - workitem: the campaign workitem\n" " - resource: the resource object this campaign item represents\n" " - transitions: list of campaign transitions outgoing from this activity\n" "...- re: Python regular expression module"), 'type': fields.selection( _action_types, 'Type', required=True, help= """The type of action to execute when an item enters this activity, such as: - Email: send an email using a predefined email template - Report: print an existing Report defined on the resource item and save it into a specific directory - Custom Action: execute a predefined action, e.g. to modify the fields of the resource record """), 'email_template_id': fields.many2one( 'email.template', "Email Template", help='The email to send when this activity is activated'), 'report_id': fields.many2one( 'ir.actions.report.xml', "Report", help='The report to generate when this activity is activated', ), 'report_directory_id': fields.many2one( 'document.directory', 'Directory', help="This folder is used to store the generated reports"), 'server_action_id': fields.many2one( 'ir.actions.server', string='Action', help="The action to perform when this activity is activated"), 'to_ids': fields.one2many('marketing.campaign.transition', 'activity_from_id', 'Next Activities'), 'from_ids': fields.one2many('marketing.campaign.transition', 'activity_to_id', 'Previous Activities'), 'variable_cost': fields.float( 'Variable Cost', help= "Set a variable cost if you consider that every campaign item that has reached this point has entailed a certain cost. You can get cost statistics in the Reporting section", digits_compute=dp.get_precision('Product Price')), 'revenue': fields.float( 'Revenue', help= "Set an expected revenue if you consider that every campaign item that has reached this point has generated a certain revenue. You can get revenue statistics in the Reporting section", digits_compute=dp.get_precision('Account')), 'signal': fields.char( 'Signal', size=128, help= 'An activity with a signal can be called programmatically. Be careful, the workitem is always created when a signal is sent' ), 'keep_if_condition_not_met': fields.boolean( "Don't Delete Workitems", help= "By activating this option, workitems that aren't executed because the condition is not met are marked as cancelled instead of being deleted." ) } _defaults = { 'type': lambda *a: 'email', 'condition': lambda *a: 'True', } def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): if context == None: context = {} if 'segment_id' in context and context['segment_id']: segment_obj = self.pool.get('marketing.campaign.segment').browse( cr, uid, context['segment_id']) act_ids = [] for activity in segment_obj.campaign_id.activity_ids: act_ids.append(activity.id) return act_ids return super(marketing_campaign_activity, self).search(cr, uid, args, offset, limit, order, context, count) #dead code def _process_wi_report(self, cr, uid, activity, workitem, context=None): report_data, format = render_report(cr, uid, [], activity.report_id.report_name, {}, context=context) attach_vals = { 'name': '%s_%s_%s' % (activity.report_id.report_name, activity.name, workitem.partner_id.name), 'datas_fname': '%s.%s' % (activity.report_id.report_name, activity.report_id.report_type), 'parent_id': activity.report_directory_id.id, 'datas': base64.encodestring(report_data), 'file_type': format } self.pool.get('ir.attachment').create(cr, uid, attach_vals) return True def _process_wi_email(self, cr, uid, activity, workitem, context=None): return self.pool.get('email.template').send_mail( cr, uid, activity.email_template_id.id, workitem.res_id, context=context) #dead code def _process_wi_action(self, cr, uid, activity, workitem, context=None): if context is None: context = {} server_obj = self.pool.get('ir.actions.server') action_context = dict(context, active_id=workitem.res_id, active_ids=[workitem.res_id], active_model=workitem.object_id.model, workitem=workitem) server_obj.run(cr, uid, [activity.server_action_id.id], context=action_context) return True def process(self, cr, uid, act_id, wi_id, context=None): activity = self.browse(cr, uid, act_id, context=context) method = '_process_wi_%s' % (activity.type, ) action = getattr(self, method, None) if not action: raise NotImplementedError( 'Method %r is not implemented on %r object.' % (method, self)) workitem_obj = self.pool.get('marketing.campaign.workitem') workitem = workitem_obj.browse(cr, uid, wi_id, context=context) return action(cr, uid, activity, workitem, context=context)
# -*- coding: utf-8 -*- from openerp.osv import fields, orm import datetime class manuscrito(orm.Model): _name = 'res.manuscrito' _description = 'Manuscrito' _columns = { 'autor': fields.many2one('res.partner', 'Autor', track_visibility='onchange',required=True, select=True),domain="[('author','=',True)]"), 'partner_id': fields.many2one('res.partner', 'Partner', ondelete='set null', track_visibility='onchange', select=True, help="Linked partner (optional). Usually created when converting the lead."), 'titulo': fields.char('Título', size=50, required=True), 'isbn':fields.char('ISBN', size=30, required=True), 'formato':fields.char('Formato', size=30), 'genero':fields.selection([('ciencia-ficcion','Ciencia-Ficcion'),('novela','Novela'),('poesia','Poesía'),('cuento','Cuento'),('historia','Historia'),('miedo','Miedo'),('otro','Otros')],'Género', required=True), 'email':fields.char('E-MAIL',size=20), 'comment': fields.text('Descripción'), 'image': fields.binary("Image", help="Select image here"), 'date': fields.date('Date', select=1), 'idioma':fields.selection([('cas','Castellano'),('en','Inglés'),('fr','Francés')],'Idioma'), 'state': fields.selection([('recibo', 'Acuse recibo'),('eval', 'Evaluación'),('confirmacion','Pendiente confirmación'),('cancelled', 'Cancelado'),('firma', 'Firma Contrato'),('corregir', 'Corrección'),('reenvio', 'Visto bueno autor'),('envio imprenta', 'Enviado a imprenta'), ('done', 'Hecho')]), } def set_recibo(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'eval'}, context=context) def set_evaluar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'confirmacion'}, context=context) def set_aceptar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'firma'}, context=context) def set_firmar(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {'state': 'corregir'}, context=context)
class marketing_campaign_transition(osv.osv): _name = "marketing.campaign.transition" _description = "Campaign Transition" _interval_units = [('hours', 'Hour(s)'), ('days', 'Day(s)'), ('months', 'Month(s)'), ('years', 'Year(s)')] def _get_name(self, cr, uid, ids, fn, args, context=None): result = dict.fromkeys(ids, False) formatters = { 'auto': _('Automatic transition'), 'time': _('After %(interval_nbr)d %(interval_type)s'), 'cosmetic': _('Cosmetic'), } for tr in self.browse(cr, uid, ids, context=context, fields_process=translate_selections): result[tr.id] = formatters[tr.trigger.value] % tr return result def _delta(self, cr, uid, ids, context=None): assert len(ids) == 1 transition = self.browse(cr, uid, ids[0], context=context) if transition.trigger != 'time': raise ValueError('Delta is only relevant for timed transition.') return relativedelta( **{str(transition.interval_type): transition.interval_nbr}) _columns = { 'name': fields.function(_get_name, string='Name', type='char', size=128), 'activity_from_id': fields.many2one('marketing.campaign.activity', 'Previous Activity', select=1, required=True, ondelete="cascade"), 'activity_to_id': fields.many2one('marketing.campaign.activity', 'Next Activity', required=True, ondelete="cascade"), 'interval_nbr': fields.integer('Interval Value', required=True), 'interval_type': fields.selection(_interval_units, 'Interval Unit', required=True), 'trigger': fields.selection( [ ('auto', 'Automatic'), ('time', 'Time'), ('cosmetic', 'Cosmetic'), # fake plastic transition ], 'Trigger', required=True, help="How is the destination workitem triggered"), } _defaults = { 'interval_nbr': 1, 'interval_type': 'days', 'trigger': 'time', } def _check_campaign(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context=context): if obj.activity_from_id.campaign_id != obj.activity_to_id.campaign_id: return False return True _constraints = [ (_check_campaign, 'The To/From Activity of transition must be of the same Campaign ', ['activity_from_id,activity_to_id']), ] _sql_constraints = [('interval_positive', 'CHECK(interval_nbr >= 0)', 'The interval must be positive or zero')]
res['value']['ups_shipper_id'] = ups_shipper_id return res ======= >>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d def _method_get(self, cr, uid, context=None): res = super(sale_order, self)._method_get(cr, uid, context=context) res.append(('ups.account', 'UPS')) return res _columns = { 'payment_method':fields.selection([ ('cc_pre_auth', 'Credit Card – PreAuthorized'), ('invoice', 'Invoice'), ('cod', 'COD'), ('p_i_a', 'Pay In Advance'), ('pay_pal', 'Paypal'), ('no_charge', 'No Charge')], 'Payment Method'), 'ship_company_code': fields.selection(_get_company_code, 'Logistic Company', method=True, size=64), <<<<<<< HEAD 'ups_shipper_id': fields.many2one('ups.account.shipping', 'Shipping Account'), ======= 'ups_shipper_id': fields.many2one('ups.account.shipping', 'Shipper'), >>>>>>> c1979f64b3360c86d60e00c92be0271d89f97f2d 'ups_service_id': fields.many2one('ups.shipping.service.type', 'Service Type'), 'ups_pickup_type': fields.selection([ ('01', 'Daily Pickup'), ('03', 'Customer Counter'), ('06', 'One Time Pickup'), ('07', 'On Call Air'),
class marketing_campaign_workitem(osv.osv): _name = "marketing.campaign.workitem" _description = "Campaign Workitem" def _res_name_get(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, '/') for wi in self.browse(cr, uid, ids, context=context): if not wi.res_id: continue proxy = self.pool[wi.object_id.model] if not proxy.exists(cr, uid, [wi.res_id]): continue ng = proxy.name_get(cr, uid, [wi.res_id], context=context) if ng: res[wi.id] = ng[0][1] return res def _resource_search(self, cr, uid, obj, name, args, domain=None, context=None): """Returns id of workitem whose resource_name matches with the given name""" if not len(args): return [] condition_name = None for domain_item in args: # we only use the first domain criterion and ignore all the rest including operators if isinstance(domain_item, (list, tuple)) and len( domain_item) == 3 and domain_item[0] == 'res_name': condition_name = [None, domain_item[1], domain_item[2]] break assert condition_name, "Invalid search domain for marketing_campaign_workitem.res_name. It should use 'res_name'" cr.execute("""select w.id, w.res_id, m.model \ from marketing_campaign_workitem w \ left join marketing_campaign_activity a on (a.id=w.activity_id)\ left join marketing_campaign c on (c.id=a.campaign_id)\ left join ir_model m on (m.id=c.object_id) """) res = cr.fetchall() workitem_map = {} matching_workitems = [] for id, res_id, model in res: workitem_map.setdefault(model, {}).setdefault(res_id, set()).add(id) for model, id_map in workitem_map.iteritems(): model_pool = self.pool[model] condition_name[0] = model_pool._rec_name condition = [('id', 'in', id_map.keys()), condition_name] for res_id in model_pool.search(cr, uid, condition, context=context): matching_workitems.extend(id_map[res_id]) return [('id', 'in', list(set(matching_workitems)))] _columns = { 'segment_id': fields.many2one('marketing.campaign.segment', 'Segment', readonly=True), 'activity_id': fields.many2one('marketing.campaign.activity', 'Activity', required=True, readonly=True), 'campaign_id': fields.related('activity_id', 'campaign_id', type='many2one', relation='marketing.campaign', string='Campaign', readonly=True, store=True), 'object_id': fields.related('activity_id', 'campaign_id', 'object_id', type='many2one', relation='ir.model', string='Resource', select=1, readonly=True, store=True), 'res_id': fields.integer('Resource ID', select=1, readonly=True), 'res_name': fields.function(_res_name_get, string='Resource Name', fnct_search=_resource_search, type="char", size=64), 'date': fields.datetime( 'Execution Date', help='If date is not set, this workitem has to be run manually', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', select=1, readonly=True), 'state': fields.selection([ ('todo', 'To Do'), ('cancelled', 'Cancelled'), ('exception', 'Exception'), ('done', 'Done'), ], 'Status', readonly=True), 'error_msg': fields.text('Error Message', readonly=True) } _defaults = { 'state': lambda *a: 'todo', 'date': False, } def button_draft(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): if wi.state in ('exception', 'cancelled'): self.write(cr, uid, [wi.id], {'state': 'todo'}, context=context) return True def button_cancel(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): if wi.state in ('todo', 'exception'): self.write(cr, uid, [wi.id], {'state': 'cancelled'}, context=context) return True def _process_one(self, cr, uid, workitem, context=None): if workitem.state != 'todo': return False activity = workitem.activity_id proxy = self.pool[workitem.object_id.model] object_id = proxy.browse(cr, uid, workitem.res_id, context=context) eval_context = { 'activity': activity, 'workitem': workitem, 'object': object_id, 'resource': object_id, 'transitions': activity.to_ids, 're': re, } try: condition = activity.condition campaign_mode = workitem.campaign_id.mode if condition: if not eval(condition, eval_context): if activity.keep_if_condition_not_met: workitem.write({'state': 'cancelled'}, context=context) else: workitem.unlink(context=context) return result = True if campaign_mode in ('manual', 'active'): Activities = self.pool.get('marketing.campaign.activity') result = Activities.process(cr, uid, activity.id, workitem.id, context=context) values = dict(state='done') if not workitem.date: values['date'] = datetime.now().strftime(DT_FMT) workitem.write(values, context=context) if result: # process _chain workitem = workitem.browse(context=context)[0] # reload date = datetime.strptime(workitem.date, DT_FMT) for transition in activity.to_ids: if transition.trigger == 'cosmetic': continue launch_date = False if transition.trigger == 'auto': launch_date = date elif transition.trigger == 'time': launch_date = date + transition._delta() if launch_date: launch_date = launch_date.strftime(DT_FMT) values = { 'date': launch_date, 'segment_id': workitem.segment_id.id, 'activity_id': transition.activity_to_id.id, 'partner_id': workitem.partner_id.id, 'res_id': workitem.res_id, 'state': 'todo', } wi_id = self.create(cr, uid, values, context=context) # Now, depending on the trigger and the campaign mode # we know whether we must run the newly created workitem. # # rows = transition trigger \ colums = campaign mode # # test test_realtime manual normal (active) # time Y N N N # cosmetic N N N N # auto Y Y N Y # run = (transition.trigger == 'auto' \ and campaign_mode != 'manual') \ or (transition.trigger == 'time' \ and campaign_mode == 'test') if run: new_wi = self.browse(cr, uid, wi_id, context) self._process_one(cr, uid, new_wi, context) except Exception: tb = "".join(format_exception(*exc_info())) workitem.write({ 'state': 'exception', 'error_msg': tb }, context=context) def process(self, cr, uid, workitem_ids, context=None): for wi in self.browse(cr, uid, workitem_ids, context=context): self._process_one(cr, uid, wi, context=context) return True def process_all(self, cr, uid, camp_ids=None, context=None): camp_obj = self.pool.get('marketing.campaign') if camp_ids is None: camp_ids = camp_obj.search(cr, uid, [('state', '=', 'running')], context=context) for camp in camp_obj.browse(cr, uid, camp_ids, context=context): if camp.mode == 'manual': # manual states are not processed automatically continue while True: domain = [('campaign_id', '=', camp.id), ('state', '=', 'todo'), ('date', '!=', False)] if camp.mode in ('test_realtime', 'active'): domain += [('date', '<=', time.strftime('%Y-%m-%d %H:%M:%S'))] workitem_ids = self.search(cr, uid, domain, context=context) if not workitem_ids: break self.process(cr, uid, workitem_ids, context=context) return True def preview(self, cr, uid, ids, context=None): res = {} wi_obj = self.browse(cr, uid, ids[0], context=context) if wi_obj.activity_id.type == 'email': view_id = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'email_template', 'email_template_preview_form') res = { 'name': _('Email Preview'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'email_template.preview', 'view_id': False, 'context': context, 'views': [(view_id and view_id[1] or 0, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', 'nodestroy': True, 'context': "{'template_id':%d,'default_res_id':%d}" % (wi_obj.activity_id.email_template_id.id, wi_obj.res_id) } elif wi_obj.activity_id.type == 'report': datas = {'ids': [wi_obj.res_id], 'model': wi_obj.object_id.model} res = { 'type': 'ir.actions.report.xml', 'report_name': wi_obj.activity_id.report_id.report_name, 'datas': datas, } else: raise osv.except_osv( _('No preview'), _('The current step for this item has no email or report to preview.' )) return res
return res def _get_invoice(self, cr, uid, ids, context=None): return self.pool.get('sale.order').search(cr, uid, [('invoice_ids', 'in', ids)], context=context) def _get_voucher(self, cr, uid, ids, context=None): result = {} for line in self.pool.get('account.voucher').browse(cr, uid, ids, context=context): if line.rel_sale_order_id: result[line.rel_sale_order_id.id] = True return result.keys() _columns = { 'payment_method':fields.selection([('cc_pre_auth', 'Credit Card – PreAuthorized'), ('invoice', 'Invoice'), ('cod', 'COD'), ('p_i_a', 'Pay In Advance'), ], 'Payment Method'), 'order_policy': fields.selection([ ('prepaid', 'Payment Before Delivery'), ('manual', 'On Demand'), ('postpaid', 'Invoice On Order After Delivery'), ('picking', 'Invoice From The Picking'), ('credit_card', 'CC Pre-Auth Pick Charge Ship'), ], 'Order Policy', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="""The Order Policy is used to synchronise invoice and delivery operations. - The 'Pay Before delivery' choice will first generate the invoice and then generate the picking order after the payment of this invoice. - The 'Shipping & Manual Invoice' will create the picking order directly and wait for the user to manually click on the 'Invoice' button to generate the draft invoice. - The 'Invoice On Order After Delivery' choice will generate the draft invoice based on sales order after all picking lists have been finished. - The 'Invoice From The Picking' choice is used to create an invoice during the picking process."""), 'state': fields.selection([ ('draft', 'Draft Quotation'),
class marketing_campaign(osv.osv): _name = "marketing.campaign" _description = "Marketing Campaign" def _count_segments(self, cr, uid, ids, field_name, arg, context=None): res = {} try: for segments in self.browse(cr, uid, ids, context=context): res[segments.id] = len(segments.segment_ids) except: pass return res _columns = { 'name': fields.char('Name', size=64, required=True), 'object_id': fields.many2one('ir.model', 'Resource', required=True, help="Choose the resource on which you want \ this campaign to be run" ), 'partner_field_id': fields.many2one('ir.model.fields', 'Partner Field', domain="[('model_id', '=', object_id), ('ttype', '=', 'many2one'), ('relation', '=', 'res.partner')]", help="The generated workitems will be linked to the partner related to the record. "\ "If the record is the partner itself leave this field empty. "\ "This is useful for reporting purposes, via the Campaign Analysis or Campaign Follow-up views."), 'unique_field_id': fields.many2one('ir.model.fields', 'Unique Field', domain="[('model_id', '=', object_id), ('ttype', 'in', ['char','int','many2one','text','selection'])]", help='If set, this field will help segments that work in "no duplicates" mode to avoid '\ 'selecting similar records twice. Similar records are records that have the same value for '\ 'this unique field. For example by choosing the "email_from" field for CRM Leads you would prevent '\ 'sending the same campaign to the same email address again. If not set, the "no duplicates" segments '\ "will only avoid selecting the same record again if it entered the campaign previously. "\ "Only easily comparable fields like textfields, integers, selections or single relationships may be used."), 'mode': fields.selection([('test', 'Test Directly'), ('test_realtime', 'Test in Realtime'), ('manual', 'With Manual Confirmation'), ('active', 'Normal')], 'Mode', required=True, help= \ """Test - It creates and process all the activities directly (without waiting for the delay on transitions) but does not send emails or produce reports. Test in Realtime - It creates and processes all the activities directly but does not send emails or produce reports. With Manual Confirmation - the campaigns runs normally, but the user has to validate all workitem manually. Normal - the campaign runs normally and automatically sends all emails and reports (be very careful with this mode, you're live!)"""), 'state': fields.selection([('draft', 'New'), ('running', 'Running'), ('cancelled', 'Cancelled'), ('done', 'Done')], 'Status',), 'activity_ids': fields.one2many('marketing.campaign.activity', 'campaign_id', 'Activities'), 'fixed_cost': fields.float('Fixed Cost', help="Fixed cost for running this campaign. You may also specify variable cost and revenue on each campaign activity. Cost and Revenue statistics are included in Campaign Reporting.", digits_compute=dp.get_precision('Product Price')), 'segment_ids': fields.one2many('marketing.campaign.segment', 'campaign_id', 'Segments', readonly=False), 'segments_count': fields.function(_count_segments, type='integer', string='Segments') } _defaults = { 'state': lambda *a: 'draft', 'mode': lambda *a: 'test', } def state_running_set(self, cr, uid, ids, *args): # TODO check that all subcampaigns are running campaign = self.browse(cr, uid, ids[0]) if not campaign.activity_ids: raise osv.except_osv( _("Error"), _("The campaign cannot be started. There are no activities in it." )) has_start = False has_signal_without_from = False for activity in campaign.activity_ids: if activity.start: has_start = True if activity.signal and len(activity.from_ids) == 0: has_signal_without_from = True if not has_start and not has_signal_without_from: raise osv.except_osv( _("Error"), _("The campaign cannot be started. It does not have any starting activity. Modify campaign's activities to mark one as the starting point." )) return self.write(cr, uid, ids, {'state': 'running'}) def state_done_set(self, cr, uid, ids, *args): # TODO check that this campaign is not a subcampaign in running mode. segment_ids = self.pool.get('marketing.campaign.segment').search( cr, uid, [('campaign_id', 'in', ids), ('state', '=', 'running')]) if segment_ids: raise osv.except_osv( _("Error"), _("The campaign cannot be marked as done before all segments are closed." )) self.write(cr, uid, ids, {'state': 'done'}) return True def state_cancel_set(self, cr, uid, ids, *args): # TODO check that this campaign is not a subcampaign in running mode. self.write(cr, uid, ids, {'state': 'cancelled'}) return True # dead code def signal(self, cr, uid, model, res_id, signal, run_existing=True, context=None): record = self.pool[model].browse(cr, uid, res_id, context) return self._signal(cr, uid, record, signal, run_existing, context) #dead code def _signal(self, cr, uid, record, signal, run_existing=True, context=None): if not signal: raise ValueError('Signal cannot be False.') Workitems = self.pool.get('marketing.campaign.workitem') domain = [('object_id.model', '=', record._table._name), ('state', '=', 'running')] campaign_ids = self.search(cr, uid, domain, context=context) for campaign in self.browse(cr, uid, campaign_ids, context=context): for activity in campaign.activity_ids: if activity.signal != signal: continue data = dict(activity_id=activity.id, res_id=record.id, state='todo') wi_domain = [(k, '=', v) for k, v in data.items()] wi_ids = Workitems.search(cr, uid, wi_domain, context=context) if wi_ids: if not run_existing: continue else: partner = self._get_partner_for(campaign, record) if partner: data['partner_id'] = partner.id wi_id = Workitems.create(cr, uid, data, context=context) wi_ids = [wi_id] Workitems.process(cr, uid, wi_ids, context=context) return True def _get_partner_for(self, campaign, record): partner_field = campaign.partner_field_id.name if partner_field: return getattr(record, partner_field) elif campaign.object_id.model == 'res.partner': return record return None # prevent duplication until the server properly duplicates several levels of nested o2m def copy(self, cr, uid, id, default=None, context=None): raise osv.except_osv( _("Operation not supported"), _("You cannot duplicate a campaign, Not supported yet.")) def _find_duplicate_workitems(self, cr, uid, record, campaign_rec, context=None): """Finds possible duplicates workitems for a record in this campaign, based on a uniqueness field. :param record: browse_record to find duplicates workitems for. :param campaign_rec: browse_record of campaign """ Workitems = self.pool.get('marketing.campaign.workitem') duplicate_workitem_domain = [('res_id', '=', record.id), ('campaign_id', '=', campaign_rec.id)] unique_field = campaign_rec.unique_field_id if unique_field: unique_value = getattr(record, unique_field.name, None) if unique_value: if unique_field.ttype == 'many2one': unique_value = unique_value.id similar_res_ids = self.pool[ campaign_rec.object_id.model].search( cr, uid, [(unique_field.name, '=', unique_value)], context=context) if similar_res_ids: duplicate_workitem_domain = [ ('res_id', 'in', similar_res_ids), ('campaign_id', '=', campaign_rec.id) ] return Workitems.search(cr, uid, duplicate_workitem_domain, context=context)
readonly=True, states={"draft": [("readonly", False)]}, ), "product_uos": fields.many2one("product.uom", "Product UoS"), "product_rent_qty": fields.function( _product_rent_qty, string="Rent Quantity", digits_compute=dp.get_precision("Product UoS") ), "th_weight": fields.float("Weight", readonly=True, states={"draft": [("readonly", False)]}), "move_ids": fields.one2many("stock.move", "rent_line_id", "Inventory Moves", readonly=True), "state": fields.selection( [ ("draft", "Draft"), ("cancel", "Cancelled"), ("confirmed", "Waiting Approval"), ("accepted", "Approved"), ("returned", "Returned"), ("done", "Done"), ], "Status", required=True, readonly=True, ), "order_partner_id": fields.related( "order_id", "partner_id", type="many2one", relation="res.partner", store=True, string="Customer" ), "salesman_id": fields.related( "order_id", "user_id", type="many2one", relation="res.users", store=True, string="Salesperson" ), "company_id": fields.related( "order_id", "company_id",
class hr_employment_termination(osv.Model): """Inherits hr.employment.termination and adds some fields to be used when calculates and transfers employee's end of service allowance. """ _inherit = "hr.employment.termination" _columns = { 'date': fields.date("Allowance Calculation Date", readonly=True), 'line_ids': fields.one2many('hr.employment.termination.lines', 'termination_id', "Allowances"), 'acc_number': fields.many2one("account.voucher", 'Voucher', readonly=True), 'state': fields.selection([ ('draft', 'Draft'), ('refuse', 'Out Of Service'), ('calculate', 'Calculated'), ('transfer', 'Transferred'), ], 'State', readonly=True), } def calculation(self, cr, uid, ids, transfer, context=None): """Method that calculates employee's end of service allowance and adds a record to hr.employment.termination.lines. @return: Boolean True """ transfer = transfer == True and transfer or False payroll = self.pool.get('payroll') allow_list = [] for rec in self.browse(cr, uid, ids, context=context): self.pool.get('hr.employment.termination.lines').unlink( cr, uid, [l.id for l in rec.line_ids], context) exception_allow_deduct_obj = self.pool.get( 'hr.allowance.deduction.exception') allow_ids = exception_allow_deduct_obj.search( cr, uid, [('employee_id', '=', rec.employee_id.id), ('action', '=', 'special'), ('types', '=', 'allow')]) allow = exception_allow_deduct_obj.browse(cr, uid, allow_ids) deduct_ids = exception_allow_deduct_obj.search( cr, uid, [('employee_id', '=', rec.employee_id.id), ('action', '=', 'special'), ('types', '=', 'deduct')]) deduct = exception_allow_deduct_obj.browse(cr, uid, deduct_ids) total_allow = 0 for a in allow: current_date = mx.DateTime.Parser.DateTimeFromString( rec.dismissal_date) end_date = mx.DateTime.Parser.DateTimeFromString(a.end_date) emp_end_date_days = (end_date - current_date).days day = a.amount / 30 allownce = emp_end_date_days * day #print"total" ,total_allow ,emp_end_date_days ,allownce ,a ,a.allow_deduct_id.account_id.id,a.id allownce_id = self.pool.get( 'hr.employment.termination.lines').create( cr, uid, { 'allow_deduct_id': a.allow_deduct_id.id, 'account_id': a.allow_deduct_id.account_id.id, 'termination_id': rec.id, 'amount': allownce, 'name': a.allow_deduct_id.name }) allow_list.append(allownce_id) for d in deduct: current_date = mx.DateTime.Parser.DateTimeFromString( rec.dismissal_date) end_date = mx.DateTime.Parser.DateTimeFromString(a.end_date) emp_end_date_days = (end_date - current_date).days day = d.amount / 30 deduct = emp_end_date_days * day #print"$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$deduct" ,emp_end_date_days ,deduct ,d.id deduct_id = self.pool.get( 'hr.employment.termination.lines').create( cr, uid, { 'allow_deduct_id': d.allow_deduct_id.id, 'account_id': d.allow_deduct_id.account_id.id, 'termination_id': rec.id, 'amount': -deduct, 'name': d.allow_deduct_id.name }) allow_list.append(deduct_id) allowance_ids = rec.dismissal_type.allowance_ids if not allowance_ids: raise orm.except_orm(_('Sorry'), _('No Allwances to be calculated')) for allow in allowance_ids: amount = payroll.compute_allowance_deduction( cr, uid, rec.employee_id, allow.id) line_id = self.pool.get('hr.employment.termination.lines').create( cr, uid, { 'allow_deduct_id': allow.id, 'account_id': allow.account_id.id, 'termination_id': rec.id, 'amount': amount['amount'], 'name': allow.name }) allow_list.append(line_id) self.write(cr, uid, ids, { 'state': 'calculate', 'date': time.strftime('%Y-%m-%d') }, context=context) return allow_list def transfer(self, cr, uid, ids, context=None): """Method that transfers employee's end of service allowance to voucher. @return: Boolean True """ lines = [] for rec in self.browse(cr, uid, ids): transfer = True allow_rec = self.pool.get( 'hr.employment.termination.lines').browse( cr, uid, rec.calculation(transfer)) if not allow_rec: raise orm.except_orm(_('Sorry'), _('No Allwances to be transferred ')) reference = 'HR/Allowances/End_Service/ ' + rec.employee_id.name + " / " + str( rec.date) for line in allow_rec: line = { 'name': line.name, #'allow_deduct_id':allwo.allow_deduct_id.id, 'amount': line.amount, 'account_id': line.account_id.id, } lines.append(line) voucher = self.pool.get('payroll').create_payment( cr, uid, ids, { 'reference': reference, 'lines': lines }, context=context) self.write(cr, uid, ids, { 'state': 'transfer', 'acc_number': voucher }, context=context) return True
kail.write({'state':'done'}) except Exception, e: kail.write({'reason':e}) done = False if done: kai.write({'state':'done'}) pass IMPORT_TYPE = (('lq','Liquidation'),('usage','Usage'),('spec','Specification')) _columns={ 'name':fields.date("Import Date", states={'done':[('readonly',True)]}, required=True), 'remarks':fields.char('Remarks',size=256, states={'done':[('readonly',True)]}), 'state':fields.selection((('draft','Draft'),('done','Completed')),'State',readonly=True), 'detail_ids':fields.one2many('kderp.import.asset.detail','import_id','Details',states={'done':[('readonly',True)]}), 'detail_spec_ids':fields.one2many('kderp.import.asset.detail','import_id','Details',states={'done':[('readonly',True)]}), 'detail_usage_ids':fields.one2many('kderp.import.asset.detail','import_id','Details',states={'done':[('readonly',True)]}), 'import_type':fields.selection(IMPORT_TYPE,'Import type',states={'done':[('readonly',True)]}), } _defaults={ 'name':lambda *a: time.strftime('%Y-%m-%d'), 'state':lambda *a: 'draft' } kderp_asset_import() class kderp_import_asset_detail(Model): _name = 'kderp.import.asset.detail'
class mro_order(osv.osv): """ Maintenance Orders """ _name = 'mro.order' _description = 'Maintenance Order' _inherit = ['mail.thread', 'ir.needaction_mixin'] STATE_SELECTION = [ ('draft', 'DRAFT'), ('released', 'WAITING PARTS'), ('ready', 'READY TO MAINTENANCE'), ('done', 'DONE'), ('cancel', 'CANCELED') ] MAINTENANCE_TYPE_SELECTION = [ ('bm', 'Breakdown'), ('cm', 'Corrective') ] _track = { 'state': { 'mro.mt_order_confirmed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'ready', }, } def _get_available_parts(self, cr, uid, ids, name, arg, context=None): res = {} for order in self.browse(cr, uid, ids, context=context): res[order.id] = {} line_ids = [] available_line_ids = [] done_line_ids = [] if order.procurement_group_id: for procurement in order.procurement_group_id.procurement_ids: line_ids += [move.id for move in procurement.move_ids if move.location_dest_id.id == order.asset_id.property_stock_asset.id] available_line_ids += [move.id for move in procurement.move_ids if move.location_dest_id.id == order.asset_id.property_stock_asset.id and move.state == 'assigned'] done_line_ids += [move.id for move in procurement.move_ids if move.location_dest_id.id == order.asset_id.property_stock_asset.id and move.state == 'done'] res[order.id]['parts_ready_lines'] = line_ids res[order.id]['parts_move_lines'] = available_line_ids res[order.id]['parts_moved_lines'] = done_line_ids return res _columns = { 'name': fields.char('Reference', size=64), 'origin': fields.char('Source Document', size=64, readonly=True, states={'draft': [('readonly', False)]}, help="Reference of the document that generated this maintenance order."), 'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, help="When the maintenance order is created the status is set to 'Draft'.\n\ If the order is confirmed the status is set to 'Waiting Parts'.\n\ If the stock is available then the status is set to 'Ready to Maintenance'.\n\ When the maintenance is over, the status is set to 'Done'."), 'maintenance_type': fields.selection(MAINTENANCE_TYPE_SELECTION, 'Maintenance Type', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'task_id': fields.many2one('mro.task', 'Task', readonly=True, states={'draft': [('readonly', False)]}), 'description': fields.char('Description', size=64, translate=True, required=True, readonly=True, states={'draft': [('readonly', False)]}), 'asset_id': fields.many2one('asset.asset', 'Asset', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'date_planned': fields.datetime('Planned Date', required=True, select=1, readonly=True, states={'draft':[('readonly',False)]}), 'date_scheduled': fields.datetime('Scheduled Date', required=True, select=1, readonly=True, states={'draft':[('readonly',False)],'released':[('readonly',False)],'ready':[('readonly',False)]}), 'date_execution': fields.datetime('Execution Date', required=True, states={'done':[('readonly',True)],'cancel':[('readonly',True)]}), 'parts_lines': fields.one2many('mro.order.parts.line', 'maintenance_id', 'Planned parts', readonly=True, states={'draft':[('readonly',False)]}), 'parts_ready_lines': fields.function(_get_available_parts, relation="stock.move", method=True, type="one2many", multi='parts'), 'parts_move_lines': fields.function(_get_available_parts, relation="stock.move", method=True, type="one2many", multi='parts'), 'parts_moved_lines': fields.function(_get_available_parts, relation="stock.move", method=True, type="one2many", multi='parts'), 'tools_description': fields.text('Tools Description',translate=True), 'labor_description': fields.text('Labor Description',translate=True), 'operations_description': fields.text('Operations Description',translate=True), 'documentation_description': fields.text('Documentation Description',translate=True), 'problem_description': fields.text('Problem Description'), 'company_id': fields.many2one('res.company','Company',required=True, readonly=True, states={'draft':[('readonly',False)]}), 'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False), 'category_ids': fields.related('asset_id', 'category_ids', type='many2many', relation='asset.category', string='Asset Category', readonly=True), } _defaults = { 'state': lambda *a: 'draft', 'maintenance_type': lambda *a: 'bm', 'date_planned': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'date_scheduled': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'date_execution': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'mro.order', context=c), } _order = 'date_execution' def onchange_asset(self, cr, uid, ids, asset): value = {} if asset: value['category_ids'] = self.pool.get('asset.asset').browse(cr, uid, asset).category_ids return {'value': value} def onchange_planned_date(self, cr, uid, ids, date): """ onchange handler of date_planned. """ return {'value': { 'date_scheduled': date, }} def onchange_scheduled_date(self, cr, uid, ids, date): """ onchange handler of date_scheduled. """ return {'value': { 'date_execution': date, }} def onchange_execution_date(self, cr, uid, ids, date, state): """ onchange handler of date_execution. """ value = {} if state == 'draft': value['value'] = {'date_planned': date} else: value['value'] = {'date_scheduled': date} return value def onchange_task(self, cr, uid, ids, task_id, parts_lines): task = self.pool.get('mro.task').browse(cr, uid, task_id) new_parts_lines = [] for line in task.parts_lines: new_parts_lines.append([0,0,{ 'name': line.name, 'parts_id': line.parts_id.id, 'parts_qty': line.parts_qty, 'parts_uom': line.parts_uom.id, }]) return {'value': { 'parts_lines': new_parts_lines, 'description': task.name, 'tools_description': task.tools_description, 'labor_description': task.labor_description, 'operations_description': task.operations_description, 'documentation_description': task.documentation_description }} def test_ready(self, cr, uid, ids): res = True for order in self.browse(cr, uid, ids): if order.parts_lines and order.procurement_group_id: states = [] for procurement in order.procurement_group_id.procurement_ids: states += [move.state != 'assigned' for move in procurement.move_ids if move.location_dest_id.id == order.asset_id.property_stock_asset.id] if any(states) or len(states) == 0: res = False return res def action_confirm(self, cr, uid, ids, context=None): """ Confirms maintenance order. @return: True """ procurement_obj = self.pool.get('procurement.order') for order in self.browse(cr, uid, ids, context=context): proc_ids = [] group_id = self.pool.get("procurement.group").create(cr, uid, {'name': order.name}, context=context) for line in order.parts_lines: vals = { 'name': order.name, 'origin': order.name, 'company_id': order.company_id.id, 'group_id': group_id, 'date_planned': order.date_planned, 'product_id': line.parts_id.id, 'product_qty': line.parts_qty, 'product_uom': line.parts_uom.id, 'location_id': order.asset_id.property_stock_asset.id } proc_id = procurement_obj.create(cr, uid, vals, context=context) proc_ids.append(proc_id) procurement_obj.run(cr, uid, proc_ids, context=context) order.write({'state':'released','procurement_group_id':group_id}, context=context) return 0 def action_ready(self, cr, uid, ids): self.write(cr, uid, ids, {'state': 'ready'}) return True def action_done(self, cr, uid, ids, context=None): for order in self.browse(cr, uid, ids, context=context): self.pool.get('stock.move').action_done(cr, uid, [x.id for x in order.parts_move_lines]) self.write(cr, uid, ids, {'state': 'done', 'date_execution': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def action_cancel(self, cr, uid, ids, context=None): for order in self.browse(cr, uid, ids, context=context): self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in order.parts_ready_lines]) self.write(cr, uid, ids, {'state': 'cancel'}) return True def test_if_parts(self, cr, uid, ids): """ @return: True or False """ res = True for order in self.browse(cr, uid, ids): if not order.parts_lines: res = False return res def force_done(self, cr, uid, ids, context=None): """ Assign and consume parts. @return: True """ self.force_parts_reservation(cr, uid, ids) wf_service = netsvc.LocalService("workflow") for order in self.browse(cr, uid, ids, context=context): wf_service.trg_validate(uid, 'mro.order', order.id, 'button_done', cr) return True def force_parts_reservation(self, cr, uid, ids, context=None): """ Assign parts. @return: True """ for order in self.browse(cr, uid, ids, context=context): self.pool.get('stock.move').force_assign(cr, uid, [x.id for x in order.parts_ready_lines]) return True def create(self, cr, uid, vals, context=None): if vals.get('name','/')=='/': vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'mro.order') or '/' return super(mro_order, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): if vals.get('date_execution') and not vals.get('state'): # constraint for calendar view for order in self.browse(cr, uid, ids): if order.state == 'draft': vals['date_planned'] = vals['date_execution'] vals['date_scheduled'] = vals['date_execution'] elif order.state in ('released','ready'): vals['date_scheduled'] = vals['date_execution'] else: del vals['date_execution'] return super(mro_order, self).write(cr, uid, ids, vals, context=context)
return result def _get_extras(self, cr, uid, ids, *args, **kwargs): result = [] if aeroo_ooo_test(cr): result.append('aeroo_ooo') ##### Check deferred_processing module ##### cr.execute("SELECT id, state FROM ir_module_module WHERE name='deferred_processing'") deferred_proc_module = cr.dictfetchone() if deferred_proc_module and deferred_proc_module['state'] in ('installed', 'to upgrade'): result.append('deferred_processing') ############################################ return dict.fromkeys(ids, ','.join(result)) _columns = { 'charset':fields.selection(_get_encodings, string='Charset', required=True), 'content_fname': fields.char('Override Extension',size=64, help='Here you can override output file extension'), 'styles_mode': fields.selection([ ('default','Not used'), ('global', 'Global'), ('specified', 'Specified'), ], string='Stylesheet'), 'stylesheet_id':fields.many2one('report.stylesheets', 'Template Stylesheet'), 'preload_mode':fields.selection([ ('static',_('Static')), ('preload',_('Preload')), ],'Preload Mode'), 'tml_source':fields.selection([ ('database','Database'), ('file','File'), ('parser','Parser'),
class mro_request(osv.osv): """ Maintenance Requests """ _name = 'mro.request' _description = 'Maintenance Request' _inherit = ['mail.thread', 'ir.needaction_mixin'] STATE_SELECTION = [ ('draft', 'Draft'), ('claim', 'Claim'), ('run', 'Execution'), ('done', 'Done'), ('reject', 'Rejected'), ('cancel', 'Canceled') ] _track = { 'state': { 'mro.mt_request_sent': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'claim', 'mro.mt_request_confirmed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'run', 'mro.mt_request_rejected': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'reject', }, } _columns = { 'name': fields.char('Reference', size=64), 'state': fields.selection(STATE_SELECTION, 'Status', readonly=True, help="When the maintenance request is created the status is set to 'Draft'.\n\ If the request is sent the status is set to 'Claim'.\n\ If the request is confirmed the status is set to 'Execution'.\n\ If the request is rejected the status is set to 'Rejected'.\n\ When the maintenance is over, the status is set to 'Done'."), 'asset_id': fields.many2one('asset.asset', 'Asset', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'cause': fields.char('Cause', size=64, translate=True, required=True, readonly=True, states={'draft': [('readonly', False)]}), 'description': fields.text('Description', readonly=True, states={'draft': [('readonly', False)]}), 'reject_reason': fields.text('Reject Reason', readonly=True), 'requested_date': fields.datetime('Requested Date', required=True, select=1, readonly=True, states={'draft': [('readonly', False)]}, help="Date requested by the customer for maintenance."), 'execution_date': fields.datetime('Execution Date', required=True, select=1, readonly=True, states={'draft':[('readonly',False)],'claim':[('readonly',False)]}), 'breakdown': fields.boolean('Breakdown', readonly=True, states={'draft': [('readonly', False)]}), 'create_uid': fields.many2one('res.users', 'Responsible'), } _defaults = { 'state': 'draft', 'requested_date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'execution_date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), 'breakdown': False, } def onchange_requested_date(self, cr, uid, ids, date): """ onchange handler of requested_date. """ return {'value': { 'execution_date': date, }} def onchange_execution_date(self, cr, uid, ids, date, state, breakdown): """ onchange handler of execution_date. """ value = {} if state == 'draft' and not breakdown: value['value'] = {'requested_date': date} return value def action_send(self, cr, uid, ids, context=None): value = {'state': 'claim'} for request in self.browse(cr, uid, ids, context=context): if request.breakdown: value['requested_date'] = time.strftime('%Y-%m-%d %H:%M:%S') self.write(cr, uid, ids, value) return True def action_confirm(self, cr, uid, ids, context=None): """ Confirms maintenance request. @return: Newly generated Maintenance Order Id. """ order = self.pool.get('mro.order') order_id = False for request in self.browse(cr, uid, ids, context=context): order_id = order.create(cr, uid, { 'date_planned':request.requested_date, 'date_scheduled':request.requested_date, 'date_execution':request.requested_date, 'origin': request.name, 'state': 'draft', 'maintenance_type': 'bm', 'asset_id': request.asset_id.id, 'description': request.cause, 'problem_description': request.description, }) self.write(cr, uid, ids, {'state': 'run'}) return order_id def action_done(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'done', 'execution_date': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def action_reject(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'reject', 'execution_date': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def action_cancel(self, cr, uid, ids, context=None): self.write(cr, uid, ids, {'state': 'cancel', 'execution_date': time.strftime('%Y-%m-%d %H:%M:%S')}) return True def create(self, cr, uid, vals, context=None): if vals.get('name','/')=='/': vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'mro.request') or '/' return super(mro_request, self).create(cr, uid, vals, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
""" just so CreatorCase.export can be used """ pass models = [ ('boolean', fields.boolean()), ('integer', fields.integer()), ('float', fields.float()), ('decimal', fields.float(digits=(16, 3))), ('string.bounded', fields.char('unknown', size=16)), ('string.required', fields.char('unknown', size=None, required=True)), ('string', fields.char('unknown', size=None)), ('date', fields.date()), ('datetime', fields.datetime()), ('text', fields.text()), ('selection', fields.selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])), # here use size=-1 to store the values as integers instead of strings ('selection.function', fields.selection(selection_fn, size=-1)), # just relate to an integer ('many2one', fields.many2one('export.integer')), ('one2many', fields.one2many('export.one2many.child', 'parent_id')), ('many2many', fields.many2many('export.many2many.other')), ('function', fields.function(function_fn, fnct_inv=function_fn_write, type="integer")), # related: specialization of fields.function, should work the same way # TODO: reference ] for name, field in models: class NewModel(orm.Model): _name = 'export.%s' % name _columns = {
class ir_translation(osv.osv): _name = "ir.translation" _log_access = False def _get_language(self, cr, uid, context): lang_model = self.pool.get('res.lang') lang_ids = lang_model.search(cr, uid, [('translatable', '=', True)], context=context) lang_data = lang_model.read(cr, uid, lang_ids, ['code', 'name'], context=context) return [(d['code'], d['name']) for d in lang_data] def _get_src(self, cr, uid, ids, name, arg, context=None): ''' Get source name for the translation. If object type is model then return the value store in db. Otherwise return value store in src field ''' if context is None: context = {} res = dict.fromkeys(ids, False) for record in self.browse(cr, uid, ids, context=context): if record.type != 'model': res[record.id] = record.src else: model_name, field = record.name.split(',') model = self.pool.get(model_name) if model and model.exists( cr, uid, record.res_id, context=context): #We need to take the context without the language information, because we want to read the #value store in db and not on the one associate with current language. context_wo_lang = context.copy() context_wo_lang.pop('lang', None) result = model.read(cr, uid, record.res_id, [field], context=context_wo_lang) res[record.id] = result and result[field] or False return res def _set_src(self, cr, uid, id, name, value, args, context=None): ''' When changing source term of a translation, change its value in db for the associated object, and the src field ''' if context is None: context = {} record = self.browse(cr, uid, id, context=context) if record.type == 'model': model_name, field = record.name.split(',') model = self.pool.get(model_name) #We need to take the context without the language information, because we want to write on the #value store in db and not on the one associate with current language. #Also not removing lang from context trigger an error when lang is different context_wo_lang = context.copy() context_wo_lang.pop('lang', None) model.write(cr, uid, record.res_id, {field: value}, context=context_wo_lang) return self.write(cr, uid, id, {'src': value}, context=context) _columns = { 'name': fields.char('Translated field', required=True), 'res_id': fields.integer('Record ID', select=True), 'lang': fields.selection(_get_language, string='Language'), 'type': fields.selection(TRANSLATION_TYPE, string='Type', select=True), 'src': fields.text('Old source'), 'source': fields.function(_get_src, fnct_inv=_set_src, type='text', string='Source'), 'value': fields.text('Translation Value'), 'module': fields.char('Module', help="Module this term belongs to", select=True), 'state': fields.selection( [('to_translate', 'To Translate'), ('inprogress', 'Translation in Progress'), ('translated', 'Translated')], string="Status", help= "Automatically set to let administators find new terms that might need to be translated" ), # aka gettext extracted-comments - we use them to flag openerp-web translation # cfr: http://www.gnu.org/savannah-checkouts/gnu/gettext/manual/html_node/PO-Files.html 'comments': fields.text('Translation comments', select=True), } _defaults = { 'state': 'to_translate', } _sql_constraints = [ ('lang_fkey_res_lang', 'FOREIGN KEY(lang) REFERENCES res_lang(code)', 'Language code of translation item must be among known languages'), ] def _auto_init(self, cr, context=None): super(ir_translation, self)._auto_init(cr, context) # FIXME: there is a size limit on btree indexed values so we can't index src column with normal btree. cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_ltns', )) if cr.fetchone(): #temporarily removed: cr.execute('CREATE INDEX ir_translation_ltns ON ir_translation (name, lang, type, src)') cr.execute('DROP INDEX ir_translation_ltns') cr.commit() cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_lts', )) if cr.fetchone(): #temporarily removed: cr.execute('CREATE INDEX ir_translation_lts ON ir_translation (lang, type, src)') cr.execute('DROP INDEX ir_translation_lts') cr.commit() # add separate hash index on src (no size limit on values), as postgres 8.1+ is able to combine separate indexes cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_src_hash_idx', )) if not cr.fetchone(): cr.execute( 'CREATE INDEX ir_translation_src_hash_idx ON ir_translation using hash (src)' ) cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_ltn', )) if not cr.fetchone(): cr.execute( 'CREATE INDEX ir_translation_ltn ON ir_translation (name, lang, type)' ) cr.commit() def _check_selection_field_value(self, cr, uid, field, value, context=None): if field == 'lang': return return super(ir_translation, self)._check_selection_field_value(cr, uid, field, value, context=context) @tools.ormcache_multi(skiparg=3, multi=6) def _get_ids(self, cr, uid, name, tt, lang, ids): translations = dict.fromkeys(ids, False) if ids: cr.execute( 'select res_id,value ' 'from ir_translation ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s', (lang, tt, name, tuple(ids))) for res_id, value in cr.fetchall(): translations[res_id] = value return translations def _set_ids(self, cr, uid, name, tt, lang, ids, value, src=None): # clear the caches tr = self._get_ids(cr, uid, name, tt, lang, ids) for res_id in tr: if tr[res_id]: self._get_source.clear_cache(self, uid, name, tt, lang, tr[res_id]) self._get_ids.clear_cache(self, uid, name, tt, lang, res_id) self._get_source.clear_cache(self, uid, name, tt, lang) cr.execute( 'delete from ir_translation ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s', ( lang, tt, name, tuple(ids), )) for id in ids: self.create( cr, uid, { 'lang': lang, 'type': tt, 'name': name, 'res_id': id, 'value': value, 'src': src, }) return len(ids) @tools.ormcache(skiparg=3) def _get_source(self, cr, uid, name, types, lang, source=None): """ Returns the translation for the given combination of name, type, language and source. All values passed to this method should be unicode (not byte strings), especially ``source``. :param name: identification of the term to translate, such as field name (optional if source is passed) :param types: single string defining type of term to translate (see ``type`` field on ir.translation), or sequence of allowed types (strings) :param lang: language code of the desired translation :param source: optional source term to translate (should be unicode) :rtype: unicode :return: the request translation, or an empty unicode string if no translation was found and `source` was not passed """ # FIXME: should assert that `source` is unicode and fix all callers to always pass unicode # so we can remove the string encoding/decoding. if not lang: return tools.ustr(source or '') if isinstance(types, basestring): types = (types, ) if source: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND src=%s""" params = (lang or '', types, tools.ustr(source)) if name: query += " AND name=%s" params += (tools.ustr(name), ) cr.execute(query, params) else: cr.execute( """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND name=%s""", (lang or '', types, tools.ustr(name))) res = cr.fetchone() trad = res and res[0] or u'' if source and not trad: return tools.ustr(source) # Remove control characters return filter(lambda c: unicodedata.category(c) != 'Cc', tools.ustr(trad)) def create(self, cr, uid, vals, context=None): if context is None: context = {} ids = super(ir_translation, self).create(cr, uid, vals, context=context) self._get_source.clear_cache(self, uid, vals.get('name', 0), vals.get('type', 0), vals.get('lang', 0), vals.get('src', 0)) self._get_ids.clear_cache(self, uid, vals.get('name', 0), vals.get('type', 0), vals.get('lang', 0), vals.get('res_id', 0)) return ids def write(self, cursor, user, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] if vals.get('src') or ('value' in vals and not (vals.get('value'))): vals.update({'state': 'to_translate'}) if vals.get('value'): vals.update({'state': 'translated'}) result = super(ir_translation, self).write(cursor, user, ids, vals, context=context) for trans_obj in self.read(cursor, user, ids, ['name', 'type', 'res_id', 'src', 'lang'], context=context): self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], trans_obj['src']) self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], trans_obj['res_id']) return result def unlink(self, cursor, user, ids, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] for trans_obj in self.read(cursor, user, ids, ['name', 'type', 'res_id', 'src', 'lang'], context=context): self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], trans_obj['src']) self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], trans_obj['res_id']) result = super(ir_translation, self).unlink(cursor, user, ids, context=context) return result def translate_fields(self, cr, uid, model, id, field=None, context=None): trans_model = self.pool.get(model) domain = ['&', ('res_id', '=', id), ('name', '=like', model + ',%')] langs_ids = self.pool.get('res.lang').search(cr, uid, [('code', '!=', 'en_US')], context=context) if not langs_ids: raise osv.except_osv( _('Error'), _("Translation features are unavailable until you install an extra OpenERP translation." )) langs = [ lg.code for lg in self.pool.get('res.lang').browse( cr, uid, langs_ids, context=context) ] main_lang = 'en_US' translatable_fields = [] for f, info in trans_model._all_columns.items(): if info.column.translate: if info.parent_model: parent_id = trans_model.read( cr, uid, [id], [info.parent_column], context=context)[0][info.parent_column][0] translatable_fields.append({ 'name': f, 'id': parent_id, 'model': info.parent_model }) domain.insert(0, '|') domain.extend([ '&', ('res_id', '=', parent_id), ('name', '=', "%s,%s" % (info.parent_model, f)) ]) else: translatable_fields.append({ 'name': f, 'id': id, 'model': model }) if len(langs): fields = [f.get('name') for f in translatable_fields] record = trans_model.read(cr, uid, [id], fields, context={'lang': main_lang})[0] for lg in langs: for f in translatable_fields: # Check if record exists, else create it (at once) sql = """INSERT INTO ir_translation (lang, src, name, type, res_id, value) SELECT %s, %s, %s, 'model', %s, %s WHERE NOT EXISTS (SELECT 1 FROM ir_translation WHERE lang=%s AND name=%s AND res_id=%s AND type='model'); UPDATE ir_translation SET src = %s WHERE lang=%s AND name=%s AND res_id=%s AND type='model'; """ src = record[f['name']] or None name = "%s,%s" % (f['model'], f['name']) cr.execute(sql, (lg, src, name, f['id'], src, lg, name, f['id'], src, lg, name, id)) action = { 'name': 'Translate', 'res_model': 'ir.translation', 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'tree,form', 'domain': domain, } if field: info = trans_model._all_columns[field] action['context'] = { 'search_default_name': "%s,%s" % (info.parent_model or model, field) } return action def _get_import_cursor(self, cr, uid, context=None): """ Return a cursor-like object for fast inserting translations """ return ir_translation_import_cursor(cr, uid, self, context=context) def load(self, cr, modules, langs, context=None): context = dict(context or {}) # local copy for module_name in modules: modpath = openerp.modules.get_module_path(module_name) if not modpath: continue for lang in langs: lang_code = tools.get_iso_codes(lang) base_lang_code = None if '_' in lang_code: base_lang_code = lang_code.split('_')[0] # Step 1: for sub-languages, load base language first (e.g. es_CL.po is loaded over es.po) if base_lang_code: base_trans_file = openerp.modules.get_module_resource( module_name, 'i18n', base_lang_code + '.po') if base_trans_file: _logger.info( 'module %s: loading base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_file, lang, verbose=False, module_name=module_name, context=context) context[ 'overwrite'] = True # make sure the requested translation will override the base terms later # i18n_extra folder is for additional translations handle manually (eg: for l10n_be) base_trans_extra_file = openerp.modules.get_module_resource( module_name, 'i18n_extra', base_lang_code + '.po') if base_trans_extra_file: _logger.info( 'module %s: loading extra base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_extra_file, lang, verbose=False, module_name=module_name, context=context) context[ 'overwrite'] = True # make sure the requested translation will override the base terms later # Step 2: then load the main translation file, possibly overriding the terms coming from the base language trans_file = openerp.modules.get_module_resource( module_name, 'i18n', lang_code + '.po') if trans_file: _logger.info( 'module %s: loading translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_file, lang, verbose=False, module_name=module_name, context=context) elif lang_code != 'en_US': _logger.warning( 'module %s: no translation for language %s', module_name, lang_code) trans_extra_file = openerp.modules.get_module_resource( module_name, 'i18n_extra', lang_code + '.po') if trans_extra_file: _logger.info( 'module %s: loading extra translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_extra_file, lang, verbose=False, module_name=module_name, context=context) return True
return result def _get_extras(self, cr, uid, ids, *args, **kwargs): result = [] if _aeroo_ooo_test(cr): result.append('aeroo_ooo') ##### Check deferred_processing module ##### cr.execute("SELECT id, state FROM ir_module_module WHERE name='deferred_processing'") deferred_proc_module = cr.dictfetchone() if deferred_proc_module and deferred_proc_module['state'] in ('installed', 'to upgrade'): result.append('deferred_processing') ############################################ return dict.fromkeys(ids, ','.join(result)) _columns = { 'charset':fields.selection(_get_encodings, string='Charset', required=True), 'content_fname': fields.char('Override Extension',size=64, help='Here you can override output file extension'), 'styles_mode': fields.selection([ ('default','Not used'), ('global', 'Global'), ('specified', 'Specified'), ], string='Stylesheet'), 'stylesheet_id':fields.many2one('report.stylesheets', 'Template Stylesheet'), 'preload_mode':fields.selection([ ('static',_('Static')), ('preload',_('Preload')), ],'Preload Mode'), 'tml_source':fields.selection([ ('database','Database'), ('file','File'), ('parser','Parser'),
class ir_attachment(osv.osv): """Attachments are used to link binary files or url to any openerp document. External attachment storage --------------------------- The 'data' function field (_data_get,data_set) is implemented using _file_read, _file_write and _file_delete which can be overridden to implement other storage engines, such methods should check for other location pseudo uri (example: hdfs://hadoppserver) The default implementation is the file:dirname location that stores files on the local filesystem using name based on their sha1 hash """ _order = 'id desc' def _name_get_resname(self, cr, uid, ids, object, method, context): data = {} for attachment in self.browse(cr, uid, ids, context=context): model_object = attachment.res_model res_id = attachment.res_id if model_object and res_id: model_pool = self.pool[model_object] res = model_pool.name_get(cr, uid, [res_id], context) res_name = res and res[0][1] or None if res_name: field = self._columns.get('res_name', False) if field and len(res_name) > field.size: res_name = res_name[:30] + '...' data[attachment.id] = res_name or False else: data[attachment.id] = False return data def _storage(self, cr, uid, context=None): return self.pool['ir.config_parameter'].get_param( cr, SUPERUSER_ID, 'ir_attachment.location', 'file') def _filestore(self, cr, uid, context=None): return tools.config.filestore(cr.dbname) def force_storage(self, cr, uid, context=None): """Force all attachments to be stored in the currently configured storage""" if not self.pool['res.users']._is_admin(cr, uid, [uid]): raise AccessError( _('Only administrators can execute this action.')) location = self._storage(cr, uid, context) domain = { 'db': [('store_fname', '!=', False)], 'file': [('db_datas', '!=', False)], }[location] ids = self.search(cr, uid, domain, context=context) for attach in self.browse(cr, uid, ids, context=context): attach.write({'datas': attach.datas}) return True # 'data' field implementation def _full_path(self, cr, uid, path): # sanitize ath path = re.sub('[.]', '', path) path = path.strip('/\\') return os.path.join(self._filestore(cr, uid), path) def _get_path(self, cr, uid, bin_data, sha): # retro compatibility fname = sha[:3] + '/' + sha full_path = self._full_path(cr, uid, fname) if os.path.isfile(full_path): return fname, full_path # keep existing path # scatter files across 256 dirs # we use '/' in the db (even on windows) fname = sha[:2] + '/' + sha full_path = self._full_path(cr, uid, fname) dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): os.makedirs(dirname) return fname, full_path def _file_read(self, cr, uid, fname, bin_size=False): full_path = self._full_path(cr, uid, fname) r = '' try: if bin_size: r = os.path.getsize(full_path) else: r = open(full_path, 'rb').read().encode('base64') except IOError: _logger.info("_read_file reading %s", full_path, exc_info=True) return r def _file_write(self, cr, uid, value, checksum): bin_value = value.decode('base64') fname, full_path = self._get_path(cr, uid, bin_value, checksum) if not os.path.exists(full_path): try: with open(full_path, 'wb') as fp: fp.write(bin_value) except IOError: _logger.info("_file_write writing %s", full_path, exc_info=True) return fname def _file_delete(self, cr, uid, fname): # using SQL to include files hidden through unlink or due to record rules cr.execute("SELECT COUNT(*) FROM ir_attachment WHERE store_fname = %s", (fname, )) count = cr.fetchone()[0] full_path = self._full_path(cr, uid, fname) if not count and os.path.exists(full_path): try: os.unlink(full_path) except OSError: _logger.info("_file_delete could not unlink %s", full_path, exc_info=True) except IOError: # Harmless and needed for race conditions _logger.info("_file_delete could not unlink %s", full_path, exc_info=True) def _data_get(self, cr, uid, ids, name, arg, context=None): if context is None: context = {} result = {} bin_size = context.get('bin_size') for attach in self.browse(cr, uid, ids, context=context): if attach.store_fname: result[attach.id] = self._file_read(cr, uid, attach.store_fname, bin_size) else: result[attach.id] = attach.db_datas return result def _data_set(self, cr, uid, id, name, value, arg, context=None): # compute the field depending of datas, supporting the case of a empty/None datas bin_data = value and value.decode( 'base64') or '' # empty string to compute its hash checksum = self._compute_checksum(bin_data) vals = { 'file_size': len(bin_data), 'checksum': checksum, } # We dont handle setting data to null # datas is false, but file_size and checksum are not (computed as datas is an empty string) if not value: # reset computed fields super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], vals, context=context) return True if context is None: context = {} # browse the attachment and get the file to delete attach = self.browse(cr, uid, id, context=context) fname_to_delete = attach.store_fname location = self._storage(cr, uid, context) # compute the index_content field vals['index_content'] = self._index(cr, SUPERUSER_ID, bin_data, attach.datas_fname, attach.mimetype), if location != 'db': # create the file fname = self._file_write(cr, uid, value, checksum) vals.update({'store_fname': fname, 'db_datas': False}) else: vals.update({'store_fname': False, 'db_datas': value}) # SUPERUSER_ID as probably don't have write access, trigger during create super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], vals, context=context) # After de-referencing the file in the database, check whether we need # to garbage-collect it on the filesystem if fname_to_delete: self._file_delete(cr, uid, fname_to_delete) return True def _compute_checksum(self, bin_data): """ compute the checksum for the given datas :param bin_data : datas in its binary form """ if bin_data: return hashlib.sha1(bin_data).hexdigest() return False def _compute_mimetype(self, values): """ compute the mimetype of the given values :param values : dict of values to create or write an ir_attachment :return mime : string indicating the mimetype, or application/octet-stream by default """ mimetype = 'application/octet-stream' if values.get('datas_fname'): mimetype = mimetypes.guess_type(values['datas_fname'])[0] if values.get('datas'): mimetype = guess_mimetype(values['datas'].decode('base64')) return mimetype def _index(self, cr, uid, bin_data, datas_fname, file_type): """ compute the index content of the given filename, or binary data. This is a python implementation of the unix command 'strings'. :param bin_data : datas in binary form :return index_content : string containing all the printable character of the binary data """ index_content = False if file_type: index_content = file_type.split('/')[0] if index_content == 'text': # compute index_content only for text type words = re.findall("[^\x00-\x1F\x7F-\xFF]{4,}", bin_data) index_content = ustr("\n".join(words)) return index_content _name = 'ir.attachment' _columns = { 'name': fields.char('Attachment Name', required=True), 'datas_fname': fields.char('File Name'), 'description': fields.text('Description'), 'res_name': fields.function(_name_get_resname, type='char', string='Resource Name', store=True), 'res_model': fields.char( 'Resource Model', readonly=True, help="The database object this attachment will be attached to"), 'res_id': fields.integer('Resource ID', readonly=True, help="The record id this is attached to"), 'create_date': fields.datetime('Date Created', readonly=True), 'create_uid': fields.many2one('res.users', 'Owner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', change_default=True), 'type': fields.selection( [ ('url', 'URL'), ('binary', 'File'), ], 'Type', help= "You can either upload a file from your computer or copy/paste an internet link to your file", required=True, change_default=True), 'url': fields.char('Url', size=1024), # al: We keep shitty field names for backward compatibility with document 'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True), 'store_fname': fields.char('Stored Filename'), 'db_datas': fields.binary('Database Data'), # computed fields depending on datas 'file_size': fields.integer('File Size', readonly=True), 'checksum': fields.char("Checksum/SHA1", size=40, select=True, readonly=True), 'mimetype': fields.char('Mime Type', readonly=True), 'index_content': fields.text('Indexed Content', readonly=True), } _defaults = { 'type': 'binary', 'file_size': 0, 'mimetype': False, 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get( cr, uid, 'ir.attachment', context=c), } def _auto_init(self, cr, context=None): super(ir_attachment, self)._auto_init(cr, context) cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_attachment_res_idx', )) if not cr.fetchone(): cr.execute( 'CREATE INDEX ir_attachment_res_idx ON ir_attachment (res_model, res_id)' ) cr.commit() def check(self, cr, uid, ids, mode, context=None, values=None): """Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there. """ res_ids = {} require_employee = False if ids: if isinstance(ids, (int, long)): ids = [ids] cr.execute( 'SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids, )) for rmod, rid, create_uid in cr.fetchall(): if not (rmod and rid): if create_uid != uid: require_employee = True continue res_ids.setdefault(rmod, set()).add(rid) if values: if values.get('res_model') and values.get('res_id'): res_ids.setdefault(values['res_model'], set()).add(values['res_id']) ima = self.pool.get('ir.model.access') for model, mids in res_ids.items(): # ignore attachments that are not attached to a resource anymore when checking access rights # (resource was deleted but attachment was not) if not self.pool.get(model): require_employee = True continue existing_ids = self.pool[model].exists(cr, uid, mids) if len(existing_ids) != len(mids): require_employee = True # For related models, check if we can write to the model, as unlinking # and creating attachments can be seen as an update to the model if (mode in ['unlink', 'create']): ima.check(cr, uid, model, 'write') else: ima.check(cr, uid, model, mode) self.pool[model].check_access_rule(cr, uid, existing_ids, mode, context=context) if require_employee: if not uid == SUPERUSER_ID and not self.pool[ 'res.users'].has_group(cr, uid, 'base.group_user'): raise AccessError( _("Sorry, you are not allowed to access this document.")) def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None): ids = super(ir_attachment, self)._search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=False, access_rights_uid=access_rights_uid) if not ids: if count: return 0 return [] # Work with a set, as list.remove() is prohibitive for large lists of documents # (takes 20+ seconds on a db with 100k docs during search_count()!) orig_ids = ids ids = set(ids) # For attachments, the permissions of the document they are attached to # apply, so we must remove attachments for which the user cannot access # the linked document. # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs), # and the permissions are checked in super() and below anyway. cr.execute( """SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids), )) targets = cr.dictfetchall() model_attachments = {} for target_dict in targets: if not target_dict['res_model']: continue # model_attachments = { 'model': { 'res_id': [id1,id2] } } model_attachments.setdefault(target_dict['res_model'], {}).setdefault( target_dict['res_id'] or 0, set()).add(target_dict['id']) # To avoid multiple queries for each attachment found, checks are # performed in batch as much as possible. ima = self.pool.get('ir.model.access') for model, targets in model_attachments.iteritems(): if model not in self.pool: continue if not ima.check(cr, uid, model, 'read', False): # remove all corresponding attachment ids for attach_id in itertools.chain(*targets.values()): ids.remove(attach_id) continue # skip ir.rule processing, these ones are out already # filter ids according to what access rules permit target_ids = targets.keys() allowed_ids = [0] + self.pool[model].search( cr, uid, [('id', 'in', target_ids)], context=context) disallowed_ids = set(target_ids).difference(allowed_ids) for res_id in disallowed_ids: for attach_id in targets[res_id]: ids.remove(attach_id) # sort result according to the original sort ordering result = [id for id in orig_ids if id in ids] return len(result) if count else list(result) def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'read', context=context) return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context=context, load=load) def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'write', context=context, values=vals) # remove computed field depending of datas for field in ['file_size', 'checksum']: vals.pop(field, False) return super(ir_attachment, self).write(cr, uid, ids, vals, context) def copy(self, cr, uid, id, default=None, context=None): self.check(cr, uid, [id], 'write', context=context) return super(ir_attachment, self).copy(cr, uid, id, default, context) def unlink(self, cr, uid, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] self.check(cr, uid, ids, 'unlink', context=context) # First delete in the database, *then* in the filesystem if the # database allowed it. Helps avoid errors when concurrent transactions # are deleting the same file, and some of the transactions are # rolled back by PostgreSQL (due to concurrent updates detection). to_delete = [ a.store_fname for a in self.browse(cr, uid, ids, context=context) if a.store_fname ] res = super(ir_attachment, self).unlink(cr, uid, ids, context) for file_path in to_delete: self._file_delete(cr, uid, file_path) return res def create(self, cr, uid, values, context=None): # remove computed field depending of datas for field in ['file_size', 'checksum']: values.pop(field, False) # if mimetype not given, compute it ! if 'mimetype' not in values: values['mimetype'] = self._compute_mimetype(values) self.check(cr, uid, [], mode='write', context=context, values=values) return super(ir_attachment, self).create(cr, uid, values, context) def action_get(self, cr, uid, context=None): return self.pool.get('ir.actions.act_window').for_xml_id( cr, uid, 'base', 'action_attachment', context=context) def invalidate_bundle(self, cr, uid, type='%', xmlid=None, context=None): assert type in ('%', 'css', 'js'), "Unhandled bundle type" xmlid = '%' if xmlid is None else xmlid + '%' domain = [('url', '=like', '/web/%s/%s/%%' % (type, xmlid))] ids = self.search(cr, uid, domain, context=context) if ids: self.unlink(cr, uid, ids, context=context)
_inherit = "res.company" _columns = { ### activate the currency update 'auto_currency_up': fields.boolean('Automatical update of the currency this company'), 'services_to_use' : fields.one2many( 'currency.rate.update.service', 'company_id', 'Currency update services' ), ###predifine cron frequence 'interval_type': fields.selection( [ ('days','Day(s)'), ('weeks', 'Week(s)'), ('months', 'Month(s)') ], 'Currency update frequence', help="""changing this value will also affect other compagnies""" ), ###function field that allows to know the ###mutli company currency implementation 'multi_company_currency_enable' : fields.function( _multi_curr_enable, method=True, type='boolean', string="Multi company currency", help='if this case is not check you can'+\ ' not set currency is active on two company' ), }
#You should have received a copy of the GNU Affero General Public License # #along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import time from openerp.osv import orm, fields from openerp.addons import decimal_precision as dp FISCAL_RULE_COLUMNS = { 'partner_fiscal_type_id': fields.many2one( 'l10n_br_account.partner.fiscal.type', 'Tipo Fiscal do Parceiro'), 'fiscal_category_id': fields.many2one('l10n_br_account.fiscal.category', 'Categoria', requeried=True), 'fiscal_type': fields.selection( [('1', 'Simples Nacional'), ('2', 'Simples Nacional – excesso de sublimite de receita bruta'), ('3', 'Regime Normal')], 'Regime Tributário', required=True), 'revenue_start': fields.float('Faturamento Inicial', digits_compute=dp.get_precision('Account'), help="Faixa inicial de faturamento bruto"), 'revenue_end': fields.float('Faturamento Final', digits_compute=dp.get_precision('Account'), help="Faixa inicial de faturamento bruto") } FISCAL_RULE_DEFAULTS = { 'fiscal_type': '3', 'revenue_start': 0.00, 'revenue_end': 0.00 }
'account.fiscal.attribute', 'fiscal_attribute_rel', 'rule_id', 'attr_id', 'Fiscal Attributes', # TODO this probably might result in problems as templates do not have field company_id domain="[('company_id','=',company_id),('fiscal_domain_id','=',fiscal_domain_id)]"), 'use_sale': fields.boolean('Sales Order'), 'use_invoice': fields.boolean('Invoices'), 'use_purchase': fields.boolean('Purchases'), 'use_picking': fields.boolean('Picking'), 'date_start': fields.date( 'from', help="Starting date for this rule to be valid."), 'date_end': fields.date( 'until', help="Ending date for this rule to be valid."), 'vat_rule': fields.selection( [('with', 'With VAT number'), ('both', 'With or Without VAT number'), ('without', 'Without VAT number')], "VAT Rule", help=("Choose if the customer need to have the" " field VAT fill for using this fiscal position")), 'sequence': fields.integer( 'Priority', help='Unused, unless you use account replacement. Within a sequence, the rule with ' 'the highest sequence AND which has an account replacement defined, will apply ' 'across all fiscal domains will.'), # --- APPLICATION SECTION --- # These are "Tax Clusters" applied with some extra magic. They can contain one ore various (many2many) clusters # and typically require to be clustered around several (or one) Fiscal Domains. 'fiscal_allocation_id': fields.many2many( 'account.fiscal.allocation', 'fiscal_allocation_rel',
class hr_holidays_status(osv.osv): _name = "hr.holidays.status" _description = "Leave Type" def get_days(self, cr, uid, ids, employee_id, context=None): result = dict((id, dict(max_leaves=0, leaves_taken=0, remaining_leaves=0, virtual_remaining_leaves=0)) for id in ids) holiday_ids = self.pool['hr.holidays'].search(cr, uid, [('employee_id', '=', employee_id), ('state', 'in', ['confirm', 'validate1', 'validate']), ('holiday_status_id', 'in', ids) ], context=context) for holiday in self.pool['hr.holidays'].browse(cr, uid, holiday_ids, context=context): status_dict = result[holiday.holiday_status_id.id] if holiday.type == 'add': status_dict['virtual_remaining_leaves'] += holiday.number_of_days_temp if holiday.state == 'validate': status_dict['max_leaves'] += holiday.number_of_days_temp status_dict['remaining_leaves'] += holiday.number_of_days_temp elif holiday.type == 'remove': # number of days is negative status_dict['virtual_remaining_leaves'] -= holiday.number_of_days_temp if holiday.state == 'validate': status_dict['leaves_taken'] += holiday.number_of_days_temp status_dict['remaining_leaves'] -= holiday.number_of_days_temp return result def _user_left_days(self, cr, uid, ids, name, args, context=None): employee_id = False if context and 'employee_id' in context: employee_id = context['employee_id'] else: employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) if employee_ids: employee_id = employee_ids[0] if employee_id: res = self.get_days(cr, uid, ids, employee_id, context=context) else: res = dict((res_id, {'leaves_taken': 0, 'remaining_leaves': 0, 'max_leaves': 0}) for res_id in ids) return res _columns = { 'name': fields.char('Leave Type', size=64, required=True, translate=True), 'categ_id': fields.many2one('calendar.event.type', 'Meeting Type', help='Once a leave is validated, Odoo will create a corresponding meeting of this type in the calendar.'), 'color_name': fields.selection([('red', 'Red'),('blue','Blue'), ('lightgreen', 'Light Green'), ('lightblue','Light Blue'), ('lightyellow', 'Light Yellow'), ('magenta', 'Magenta'),('lightcyan', 'Light Cyan'),('black', 'Black'),('lightpink', 'Light Pink'),('brown', 'Brown'),('violet', 'Violet'),('lightcoral', 'Light Coral'),('lightsalmon', 'Light Salmon'),('lavender', 'Lavender'),('wheat', 'Wheat'),('ivory', 'Ivory')],'Color in Report', required=True, help='This color will be used in the leaves summary located in Reporting\Leaves by Department.'), 'limit': fields.boolean('Allow to Override Limit', help='If you select this check box, the system allows the employees to take more leaves than the available ones for this type and will not take them into account for the "Remaining Legal Leaves" defined on the employee form.'), 'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the leave type without removing it."), 'max_leaves': fields.function(_user_left_days, string='Maximum Allowed', help='This value is given by the sum of all holidays requests with a positive value.', multi='user_left_days'), 'leaves_taken': fields.function(_user_left_days, string='Leaves Already Taken', help='This value is given by the sum of all holidays requests with a negative value.', multi='user_left_days'), 'remaining_leaves': fields.function(_user_left_days, string='Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken', multi='user_left_days'), 'virtual_remaining_leaves': fields.function(_user_left_days, string='Virtual Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken - Leaves Waiting Approval', multi='user_left_days'), 'double_validation': fields.boolean('Apply Double Validation', help="When selected, the Allocation/Leave Requests for this type require a second validation to be approved."), } _defaults = { 'color_name': 'red', 'active': True, } def name_get(self, cr, uid, ids, context=None): if context is None: context = {} if not context.get('employee_id',False): # leave counts is based on employee_id, would be inaccurate if not based on correct employee return super(hr_holidays_status, self).name_get(cr, uid, ids, context=context) res = [] for record in self.browse(cr, uid, ids, context=context): name = record.name if not record.limit: name = name + (' (%g/%g)' % (record.leaves_taken or 0.0, record.max_leaves or 0.0)) res.append((record.id, name)) return res