def run(self, binding_id): """ Export the tracking number of a picking to Magento """ # verify the picking is done + magento id exists picking = self.session.browse(self.model._name, binding_id) carrier = picking.carrier_id if not carrier: return FailedJobError('The carrier is missing on the picking %s.' % picking.name) if not carrier.magento_export_tracking: return _('The carrier %s does not export ' 'tracking numbers.') % carrier.name if not picking.carrier_tracking_ref: return _('No tracking number to send.') sale_binding_id = picking.magento_order_id if not sale_binding_id: return FailedJobError("No sales order is linked with the picking " "%s, can't export the tracking number." % picking.name) magento_picking_id = picking.magento_id if magento_picking_id is None: raise NoExternalId("No value found for the picking ID on " "Magento side, the job will be retried later.") self._validate(picking) self._check_allowed_carrier(picking, sale_binding_id.magento_id) tracking_args = self._get_tracking_args(picking) self.backend_adapter.add_tracking_number(magento_picking_id, *tracking_args)
def _try_perform_job_custom(self, session_hdl, job): if job.state != ENQUEUED: _logger.warning( 'job %s is in state %s ' 'instead of enqueued in /runjob', job.uuid, job.state) return with session_hdl.session() as session: job.set_started() self.job_storage_class(session).store(job) _logger.debug('%s started', job) with session_hdl.session() as session: job.perform(session) job.set_done() retry = job.retry try: self.job_storage_class(session).store(job) except OperationalError as err: if retry >= job.max_retries: type_, value, traceback = sys.exc_info() new_exc = FailedJobError( "Max. retries (%d) reached: %s" % (job.max_retries, value or type_)) raise new_exc.__class__, new_exc, traceback raise err _logger.debug('%s done', job)
def get_import_job(session, ctx, model_name, res_id, att_id): try: # Get attachment wizard = session.env[model_name].browse(res_id) attachment = session.env['ir.attachment'].browse(att_id) Import = session.env['import.xlsx.template'].with_context(ctx) xml_ids = Import.import_template(attachment.datas, wizard.template_id, wizard.res_model) # Link attachment to job queue job_uuid = session.context.get('job_uuid') job = session.env['queue.job'].search([('uuid', '=', job_uuid)], limit=1) # Get init time date_created = fields.Datetime.from_string(job.date_created) ts = fields.Datetime.context_timestamp(job, date_created) init_time = ts.strftime('%d/%m/%Y %H:%M:%S') # Description names = [] for xml_id in xml_ids: name = session.env.ref(xml_id).display_name names.append(name) desc = 'INIT: %s\n> UUID: %s' % (init_time, job_uuid) attachment.write({ 'name': '%s__%s' % ('_'.join(names), attachment.name), 'parent_id': session.env.ref('pabi_utils.dir_spool_import').id, 'description': desc, }) return _('File imported successfully') except Exception, e: raise FailedJobError(e)
def import_one_chunk(session, res_model, att_id, options): model_obj = session.pool[res_model] context = session.context.copy() if not session.context.get('lang', False): context.update({'lang': session.env.user.lang}) fields, data = _read_csv_attachment(session, att_id, options) result = model_obj.load(session.cr, session.uid, fields, data, context=context) error_message = [ message['message'] for message in result['messages'] if message['type'] == 'error' ] if error_message: raise FailedJobError('\n'.join(error_message)) if res_model == 'account.invoice': if result['ids']: from openerp import workflow for res_id in result['ids']: workflow.trg_validate(session.uid, res_model, res_id, 'invoice_open', session.cr) # model_obj.browse(session.cr, session.uid, result['ids']).signal_workflow('invoice_open') return result
def check(self, record): """ Check whether the current sale order should be imported or not. It will actually use the payment mode configuration and see if the chosen rule is fullfilled. :returns: True if the sale order should be imported :rtype: boolean """ ps_payment_method = record['payment'] mode_binder = self.binder_for('account.payment.mode') payment_mode = mode_binder.to_odoo(ps_payment_method) if not payment_mode: raise FailedJobError( _("The configuration is missing for the Payment Mode '%s'.\n\n" "Resolution:\n" " - Use the automatic import in 'Connectors > PrestaShop " "Backends', button 'Import payment modes', or:\n" "\n" "- Go to 'Invoicing > Configuration > Management " "> Payment Modes'\n" "- Create a new Payment Mode with name '%s'\n" "-Eventually link the Payment Method to an existing Workflow " "Process or create a new one.") % (ps_payment_method, ps_payment_method)) self._rule_global(record, payment_mode) self._rules[payment_mode.import_rule](self, record, payment_mode)
def check(self, record): """ Check whether the current sale order should be imported or not. It will actually use the payment method configuration and see if the choosed rule is fullfilled. :returns: True if the sale order should be imported :rtype: boolean """ session = self.session payment_method = record['payment']['method'] method_ids = session.search('payment.method', [('name', '=', payment_method)]) if not method_ids: raise FailedJobError( "The configuration is missing for the Payment Method '%s'.\n\n" "Resolution:\n" "- Go to 'Sales > Configuration > Sales > Customer Payment Method\n" "- Create a new Payment Method with name '%s'\n" "-Eventually link the Payment Method to an existing Workflow " "Process or create a new one." % (payment_method, payment_method)) method = session.browse('payment.method', method_ids[0]) self._rule_global(record, method) self._rules[method.import_rule](self, record, method)
def get_report_job(session, model_name, res_id): try: out_file, out_name = session.pool[model_name].get_report( session.cr, session.uid, [res_id], session.context) # Make attachment and link ot job queue job_uuid = session.context.get('job_uuid') job = session.env['queue.job'].search([('uuid', '=', job_uuid)], limit=1) # Get init time date_created = fields.Datetime.from_string(job.date_created) ts = fields.Datetime.context_timestamp(job, date_created) init_time = ts.strftime('%d/%m/%Y %H:%M:%S') # Create output report place holder desc = 'INIT: %s\n> UUID: %s' % (init_time, job_uuid) session.env['ir.attachment'].create({ 'name': out_name, 'datas': out_file, 'datas_fname': out_name, 'res_model': 'queue.job', 'res_id': job.id, 'type': 'binary', 'parent_id': session.env.ref('pabi_utils.dir_spool_report').id, 'description': desc, 'user_id': job.user_id.id, }) # Result Description result = _('Successfully created excel report : %s') % out_name return result except Exception, e: raise FailedJobError(e)
def _validate(self, picking): if picking.state != 'done': # should not happen raise ValueError("Wrong value for picking state, " "it must be 'done', found: %s" % picking.state) if not picking.carrier_id.magento_carrier_code: raise FailedJobError("Wrong value for the Magento carrier code " "defined in the picking.")
def action_generate_budget_plan(session, model_name, res_id): try: session.pool[model_name].action_generate_budget_plan( session.cr, session.uid, [res_id], session.context) return _("Budget Plan created successfully") except Exception as e: raise FailedJobError(e)
def perform(self, session): """ Execute the job. The job is executed with the user which has initiated it. :param session: session to execute the job :type session: ConnectorSession """ assert not self.canceled, "Canceled job" with session.change_user(self.user_id): self.retry += 1 try: with session.change_context({'job_uuid': self._uuid}): self.result = self.func(session, *self.args, **self.kwargs) except RetryableJobError as err: if err.ignore_retry: self.retry -= 1 raise elif not self.max_retries: # infinite retries raise elif self.retry >= self.max_retries: type_, value, traceback = sys.exc_info() # change the exception type but keep the original # traceback and message: # http://blog.ianbicking.org/2007/09/12/re-raising-exceptions/ new_exc = FailedJobError( "Max. retries (%d) reached: %s" % (self.max_retries, value or type_)) raise new_exc.__class__, new_exc, traceback raise return self.result
def _split_per_language(self, record, fields=None): """Split record values by language. @param record: a record from PS @param fields: fields whitelist @return a dictionary with the following structure: 'en_US': { 'field1': value_en, 'field2': value_en, }, 'it_IT': { 'field1': value_it, 'field2': value_it, } """ split_record = {} languages = self.find_each_language(record) if not languages: raise FailedJobError( _('No language mapping defined. ' 'Run "Synchronize base data".') ) model_name = self.connector_env.model_name for language_id, language_code in languages.iteritems(): split_record[language_code] = record.copy() _fields = self._translatable_fields[model_name] if fields: _fields = [x for x in _fields if x in fields] for field in _fields: for language in record[field]['language']: current_id = language['attrs']['id'] code = languages.get(current_id) if not code: # TODO: be nicer here. # Currently if you have a language in PS # that is not present in odoo # the basic metadata sync is broken. # We should present skip the language # and maybe show a message to users. raise FailedJobError( _('No language could be found for the Prestashop lang ' 'with id "%s". Run "Synchronize base data" again.') % (current_id,) ) split_record[code][field] = language['value'] return split_record
def action_open_hr_salary(session, model_name, res_id): try: session.pool[model_name].action_open(session.cr, session.uid, [res_id], session.context) salary = session.pool[model_name].browse(session.cr, session.uid, res_id) return {'salary_id': salary.id} except Exception, e: raise FailedJobError(e)
def action_generate_recurring_entries(session, model_name, res_id): try: session.pool[model_name].action_generate(session.cr, session.uid, [res_id], session.context) entry = session.pool[model_name].browse(session.cr, session.uid, res_id) return {'move_ids': entry.move_ids.ids} except Exception, e: raise FailedJobError(e)
def send_odoo_message(self, model, function, code, message): server, result, login = self.try_connexion(raise_error=True) args = [ self.database, login, self.password, model, function, code, message ] try: result = server.call(service='object', method='execute', args=args) return result except jsonrpclib.ProtocolError: raise FailedJobError(self._return_last_jsonrpclib_error())
def action_purchase_create_invoice(session, model_name, res_id): try: session.pool[model_name].\ action_invoice_create(session.cr, session.uid, [res_id], session.context) purchase = session.pool[model_name].browse(session.cr, session.uid, res_id) invoice_ids = [x.id for x in purchase.invoice_ids] return {'invoice_ids': invoice_ids} except Exception, e: raise FailedJobError(e)
def _run(self, website_id=None): """ Export the product inventory to Magento :param website_id: if None, export on all websites, or OpenERP ID for the website to update """ # export of products is not implemented so we just raise # if the export was existing, we would export it assert self.magento_id, "Record has been deleted in Magento" pricelist = self.backend_record.pricelist_id if not pricelist: name = self.backend_record.name raise FailedJobError( 'Configuration Error:\n' 'No pricelist configured on the backend %s.\n\n' 'Resolution:\n' 'Go to Connectors > Backends > %s.\n' 'Choose a pricelist.' % (name, name)) pricelist_id = pricelist.id # export the price for websites if they have a different # pricelist storeview_binder = self.get_binder_for_model('magento.storeview') for website in self.backend_record.website_ids: if website_id is not None and website.id != website_id: continue # 0 is the admin website, the update on this website # set the default values in Magento, we use the default # pricelist site_pricelist_id = None if website.magento_id == '0': site_pricelist_id = pricelist_id elif website.pricelist_id: site_pricelist_id = website.pricelist_id.id # The update of the prices in Magento is very weird: # - The price is different per website (if the option # is active in the config), but is shared between # the store views of a website. # - BUT the Magento API expects a storeview id to modify # a price on a website (and not a website id...) # So we take the first storeview of the website to update. storeview_ids = self.session.search( 'magento.storeview', [('store_id.website_id', '=', website.id)]) if not storeview_ids: continue magento_storeview = storeview_binder.to_backend(storeview_ids[0]) price = self._get_price(site_pricelist_id) self._update({'price': price}, storeview_id=magento_storeview) self.binder.bind(self.magento_id, self.binding_id) return _('Prices have been updated.')
def pabi_action_job(session, model_name, func_name, kwargs, return_action): try: PabiAction = session.env[model_name] (records, result_msg) = getattr(PabiAction, func_name)(**kwargs) # Write result back to job job_uuid = session.context.get('job_uuid') job = session.env['queue.job'].search([('uuid', '=', job_uuid)]) job.write({'res_model': records._name, 'res_ids': str(records.ids)}) # Result Description result = result_msg or _('Successfully execute process') return result except Exception, e: raise FailedJobError(e)
def _import_dependencies(self): record = self.prestashop_record try: self._import_dependency(record['id_supplier'], 'prestashop.supplier') self._import_dependency(record['id_product'], 'prestashop.product.template') if record['id_product_attribute'] != '0': self._import_dependency(record['id_product_attribute'], 'prestashop.product.combination') except PrestaShopWebServiceError: raise FailedJobError('Error fetching a dependency')
def import_one_chunk(session, res_model, att_id, options): model_obj = session.pool[res_model] fields, data = _read_csv_attachment(session, att_id, options) result = model_obj.load(session.cr, session.uid, fields, data, context=session.context) error_message = [message['message'] for message in result['messages'] if message['type'] == 'error'] if error_message: raise FailedJobError('\n'.join(error_message)) return result
def run(self, binding_id): """ Export the tracking number of a picking to Magento """ # verify the picking is done + magento id exists picking = self.session.browse(self.model._name, binding_id) carrier = picking.carrier_id if not carrier: return FailedJobError('The carrier is missing on the picking %s.' % picking.name) if not carrier.magento_export_tracking: return _('The carrier %s does not export ' 'tracking numbers.') % carrier.name if not picking.carrier_tracking_ref: return _('No tracking number to send.') sale_binding_id = picking.magento_order_id if not sale_binding_id: return FailedJobError("No sales order is linked with the picking " "%s, can't export the tracking number." % picking.name) binder = self.get_binder_for_model() magento_id = binder.to_backend(binding_id) if not magento_id: # avoid circular reference from .stock_picking import MagentoPickingExport picking_exporter = self.get_connector_unit_for_model( MagentoPickingExport) picking_exporter.run(binding_id) magento_id = binder.to_backend(binding_id) if not magento_id: return FailedJobError("The delivery order %s has no Magento ID, " "can't export the tracking number." % picking.name) self._validate(picking) self._check_allowed_carrier(picking, sale_binding_id.magento_id) tracking_args = self._get_tracking_args(picking) self.backend_adapter.add_tracking_number(magento_id, *tracking_args)
def run(self, binding_id): """ Export the tracking number of a picking to Shopware """ # verify the picking is done + shopware id exists picking = self.model.browse(binding_id) carrier = picking.carrier_id if not carrier: return FailedJobError('The carrier is missing on the picking %s.' % picking.name) if not carrier.shopware_export_tracking: return _('The carrier %s does not export ' 'tracking numbers.') % carrier.name if not picking.carrier_tracking_ref: return _('No tracking number to send.') sale_binding_id = picking.shopware_order_id if not sale_binding_id: return FailedJobError("No sales order is linked with the picking " "%s, can't export the tracking number." % picking.name) binder = self.binder_for() shopware_id = binder.to_backend(binding_id) if not shopware_id: # avoid circular reference from .stock_picking import ShopwarePickingExport picking_exporter = self.unit_for(ShopwarePickingExport) picking_exporter.run(binding_id) shopware_id = binder.to_backend(binding_id) if not shopware_id: return FailedJobError("The delivery order %s has no Shopware ID, " "can't export the tracking number." % picking.name) self._validate(picking) self._check_allowed_carrier(picking, sale_binding_id.shopware_id) tracking_args = self._get_tracking_args(picking) self.backend_adapter.add_tracking_number(shopware_id, *tracking_args)
def skeletonSetId(self, record): tmpl_set_id = self.backend_record.attribute_set_tpl_id.id if tmpl_set_id: binder = self.get_binder_for_model('magento.attribute.set') magento_tpl_set_id = binder.to_backend(tmpl_set_id) else: raise FailedJobError(( "'Attribute set template' field must be define on " "the backend.\n\n" "Resolution: \n" "- Go to Connectors > Magento > Backends > '%s'\n" "- Fill the field Attribte set Tempalte\n" ) % self.backend_record.name) return {'skeletonSetId': magento_tpl_set_id}
def import_one_chunk(session, res_model, att_id, options): model_obj = session.pool[res_model] context = session.context.copy() if not session.context.get('lang', False): context.update({'lang': session.env.user.lang}) fields, data = _read_csv_attachment(session, att_id, options) result = model_obj.load(session.cr, session.uid, fields, data, context=context) error_message = [message['message'] for message in result['messages'] if message['type'] == 'error'] if error_message: raise FailedJobError('\n'.join(error_message)) return result
def _check_allowed_carrier(self, picking, magento_id): allowed_carriers = self.backend_adapter.get_carriers(magento_id) carrier = picking.carrier_id if carrier.magento_carrier_code not in allowed_carriers: raise FailedJobError("The carrier %(name)s does not accept " "tracking numbers on Magento.\n\n" "Tracking codes accepted by Magento:\n" "%(allowed)s.\n\n" "Actual tracking code:\n%(code)s\n\n" "Resolution:\n" "* Add support of %(code)s in Magento\n" "* Or deactivate the export of tracking " "numbers in the setup of the carrier " "%(name)s." % {'name': carrier.name, 'allowed': allowed_carriers, 'code': carrier.magento_carrier_code})
def action_run_tax_report(session, data, format): try: # Render Report rpt_name = 'pabi_th_tax_report.%s' % data['report_name'] report = session.env.ref(rpt_name) data = data['datas'] data['model'] = 'account.tax.report' # model is required, even sql rpt result, _x = openerp.report.render_report(session.cr, session.uid, [], report.report_name, data) # Make attachment and link ot job queue job_uuid = session.context.get('job_uuid') job = session.env['queue.job'].search([('uuid', '=', job_uuid)], limit=1) result = base64.b64encode(result) file_name = 'VAT Report' file_name = re.sub(r'[^a-zA-Z0-9_-]', '_', file_name) file_name += format == 'pdf' and '.pdf' or '.xls' # Get init time date_created = fields.Datetime.from_string(job.date_created) ts = fields.Datetime.context_timestamp(job, date_created) init_time = ts.strftime('%d/%m/%Y %H:%M:%S') # Description desc = 'INIT: %s\n> UUID: %s' % (init_time, job_uuid) session.env['ir.attachment'].create({ 'name': file_name, 'datas': result, 'datas_fname': file_name, 'res_model': 'queue.job', 'res_id': job.id, 'type': 'binary', 'parent_id': session.env.ref('pabi_utils.dir_spool_report').id, 'description': desc, 'user_id': job.user_id.id, }) return _('Tax Report created successfully') except Exception, e: raise FailedJobError(e) # Queue Error
def get_import_job(session, model_name, ctx, res_id, att_id): try: # Process Import File wizard = session.env[model_name].browse(res_id) attachment = session.env['ir.attachment'].browse(att_id) Import = session.env['import.xlsx.template'].with_context(ctx) record = Import.import_template(attachment.datas, wizard.template_id, wizard.res_model) # Write result back to job job_uuid = session.context.get('job_uuid') job = session.env['queue.job'].search([('uuid', '=', job_uuid)]) job.write({'res_model': record._name, 'res_ids': [record.id]}) # Result Description result = _('Successfully imported excel file : %s for %s') % \ (attachment.name, record.display_name) return result except Exception, e: raise FailedJobError(e)
def try_connexion(self, raise_error=False): self.ensure_one() if self.port: url = "%s:%s/jsonrpc" % (self.url, self.port) else: url = "%s/jsonrpc" % (self.url) server = jsonrpclib.Server(url) connection = False try: args = [self.database, self.login, self.password] connection = server.call(service='common', method='login', args=args) result = u"Error Login/Password" if connection == 0 else u"OK" except socket.error as e: result = e.strerror except jsonrpclib.ProtocolError: result = self._return_last_jsonrpclib_error() if raise_error and result != "OK": raise FailedJobError(result) return (server, result, connection)
def _rule_state(self, record, mode): """Check if order is importable by its state. If `backend_record.importable_order_state_ids` is valued we check if current order is in the list. If not, the job fails gracefully. """ if self.backend_record.importable_order_state_ids: ps_state_id = record['current_state'] state = self.binder_for('prestashop.sale.order.state').to_odoo( ps_state_id, unwrap=1) if not state: raise FailedJobError( _("The configuration is missing " "for sale order state with PS ID=%s.\n\n" "Resolution:\n" " - Use the automatic import in 'Connectors > PrestaShop " "Backends', button 'Synchronize base data'.") % (ps_state_id, )) if state not in self.backend_record.importable_order_state_ids: raise NothingToDoJob( _('Import of the order with PS ID=%s canceled ' 'because its state is not importable') % record['id'])
def check(self, record): """ Check whether the current sale order should be imported or not. It will actually use the payment method configuration and see if the chosen rule is fullfilled. :returns: True if the sale order should be imported :rtype: boolean """ session = self.session payment_method = record['payment'] methods = session.env['account.payment.method'].search([ ('name', '=', payment_method) ]) if not methods: raise FailedJobError( "The configuration is missing for the Payment Method '%s'.\n\n" "Resolution:\n" "- Go to 'Accounting > Configuration > Management > Payment Methods " "Method'\n" "- Create a new Payment Method with name '%s'\n" "-Eventually link the Payment Method to an existing Workflow " "Process or create a new one." % (payment_method, payment_method))
def import_document(session, model_name, args): import_obj = session.pool['base_import.import'] model_obj = session.pool[args['erp_model']] backend_id = args['backend_id'] document_url = args['document_url'] document_sheet = args['document_sheet'] fields = args['fields'] row_start = args['chunk_row_start'] row_end = args['chunk_row_end'] col_start = args['sheet_col_start'] col_end = args['sheet_col_end'] error_col = args['error_col'] backend = session.browse('google.spreadsheet.backend', backend_id) document = open_document(backend, document_url) sheet = document.worksheet(document_sheet) start = sheet.get_addr_int(row_start, col_start) stop = sheet.get_addr_int(row_end, col_end) chunk = sheet.range(start + ':' + stop) cols = col_end - col_start + 1 rows = row_end - row_start + 1 data = [['' for c in range(cols)] for r in range(rows)] for cell in chunk: i = cell.row - row_start j = cell.col - col_start data[i][j] = cell.value available_fields = import_obj.get_fields(session.cr, session.uid, model_obj._name, context=session.context, depth=FIELDS_RECURSION_LIMIT) headers_raw = iter([fields]) headers_raw, headers_match = import_obj._match_headers( headers_raw, available_fields, options={'headers': True}, ) fields = [False] * len(headers_match) for indice, header in headers_match.items(): if isinstance(header, list) and len(header): fields[indice] = '/'.join(header) else: fields[indice] = False data, import_fields = convert_import_data(data, fields) # import the chunk of clean data result = model_obj.load(session.cr, session.uid, import_fields, data, context=session.context) # clear previous errors error_cells = None if error_col is not None: start = sheet.get_addr_int(row_start, error_col) stop = sheet.get_addr_int(row_end, error_col) error_cells = sheet.range(start + ':' + stop) for cell in error_cells: cell.value = '' # log errors errors = False messages = [] for m in result['messages']: row_from = row_start + m['rows']['from'] row_to = row_start + m['rows']['to'] for row in range(row_from, row_to + 1): message = m['message'] message_type = m['type'] messages.append('%s:line %i: %s' % (message_type, row, message)) if message_type == 'error': errors = True if error_cells: for cell in error_cells: if cell.row == row: cell.value = backend.format_spreadsheet_error( message) break if error_cells: sheet.update_cells(error_cells) if errors: raise FailedJobError(messages) else: imported_ids = ', '.join([str(id_) for id_ in result['ids']]) messages.append('Imported/Updated ids: %s' % imported_ids) return '\n'.join(messages)