def receive_message(self, code, message_json, jobify=True): try: backend = self.env['bus.configuration'].search([('code', '=', code)]) if not backend: raise exceptions.ValidationError(u"No bakcend found : %s" % code) dict_message = json.loads(message_json, encoding='utf-8') # search for a parent message id # 1. look for a sent message with the same cross id origin (ex : master:1>master:3) # 2. look for a sent message in the upper level of cross id origin (ex: master:1) parent_message = self.env['bus.message'].get_same_cross_id_messages(dict_message)._get_first_sent_message() if not parent_message: parent_message = self.env['bus.message'].get_parent_cross_id_messages(dict_message)\ ._get_first_sent_message() parent_message_id = parent_message.id if parent_message else False message = self.env['bus.message'].create_message(dict_message, 'received', backend, parent_message_id) if jobify: job_uiid = job_receive_message.delay(ConnectorSession.from_env(self.env), self._name, message.id) else: job_uiid = job_receive_message(ConnectorSession.from_env(self.env), self._name, message.id) result = u"Receive Message : %s in processing by the job %s" % (message.id, job_uiid) to_raise = False except exceptions.ValidationError as error: result = error to_raise = True except AttributeError as error: result = error to_raise = True except TypeError as error: result = error to_raise = True if to_raise: raise exceptions.except_orm(u"Reception Error", result) return result
def update_remaining_qty(self, jobify=True): list_lines_to_synchronize = [] for rec in self: prec = rec.product_uom.rounding delivered_qty, remaining_qty, returned_qty, qty_running_pol_uom = rec.compute_remaining_qty( ) if float_compare(rec.sent_qty, delivered_qty, precision_rounding=prec) != 0 or \ float_compare(rec.sent_qty, remaining_qty, precision_rounding=prec) != 0: rec.write({ 'sent_qty': delivered_qty, 'remaining_qty': remaining_qty, }) if rec.order_id.state in ['progress', 'manual'] and \ float_compare(qty_running_pol_uom, remaining_qty, precision_rounding=prec) != 0: list_lines_to_synchronize += [(rec.id, returned_qty)] while list_lines_to_synchronize: chunk_list_lines_to_synchronize = list_lines_to_synchronize[:100] if jobify: job_synchronize_lines.delay( ConnectorSession.from_env(self.env), 'sale.order.line', chunk_list_lines_to_synchronize, dict(self.env.context)) else: job_synchronize_lines(ConnectorSession.from_env(self.env), 'sale.order.line', chunk_list_lines_to_synchronize, dict(self.env.context)) list_lines_to_synchronize = list_lines_to_synchronize[100:]
def run_confirm_moves(self): group_draft_moves = {} all_draft_moves = self.env['stock.move'].search( [('state', '=', 'draft')], limit=None, order='priority desc, date_expected asc') all_draft_moves_ids = all_draft_moves.read( ['id', 'group_id', 'location_id', 'location_dest_id'], load=False) for move in all_draft_moves_ids: key = (move['group_id'], move['location_id'], move['location_dest_id']) if key not in group_draft_moves: group_draft_moves[key] = [] group_draft_moves[key].append(move['id']) for draft_move_ids in group_draft_moves: if self.env.context.get('jobify'): confirm_moves.delay(ConnectorSession.from_env(self.env), 'stock.move', group_draft_moves[draft_move_ids], self.env.context) else: confirm_moves(ConnectorSession.from_env(self.env), 'stock.move', group_draft_moves[draft_move_ids], self.env.context)
def cron_update_active_line(self, jobify=True): if jobify: job_update_active_line.delay(ConnectorSession.from_env(self.env), 'pricelist.partnerinfo') else: job_update_active_line(ConnectorSession.from_env(self.env), 'pricelist.partnerinfo')
def update_scheduler_controller(self, jobify=True, run_procurements=True): max_sequence = self.search( [('done', '=', False)], order='location_sequence desc, route_sequence desc', limit=1) if max_sequence: max_location_sequence = max_sequence.location_sequence max_route_sequence = max_sequence.route_sequence is_procs_confirmation_ok = self.env[ 'procurement.order'].is_procs_confirmation_ok() is_moves_confirmation_ok = self.env[ 'procurement.order'].is_moves_confirmation_ok() if is_procs_confirmation_ok and is_moves_confirmation_ok: controller_lines_no_run = self.search([ ('done', '=', False), ('job_uuid', '=', False), ('location_sequence', '=', max_location_sequence), ('route_sequence', '=', max_route_sequence), ('run_procs', '=', False) ]) for line in controller_lines_no_run: if jobify: job_uuid = process_orderpoints. \ delay(ConnectorSession.from_env(self.env), 'stock.warehouse.orderpoint', line.orderpoint_id.ids, dict(self.env.context), description="Computing orderpoints for product %s and location %s" % (line.product_id.display_name, line.location_id.display_name)) line.job_uuid = job_uuid line.write({ 'job_uuid': job_uuid, 'job_creation_date': fields.Datetime.now() }) else: line.job_uuid = str(line.orderpoint_id.id) self.env.context = dict(self.env.context, job_uuid=line.job_uuid) process_orderpoints( ConnectorSession.from_env(self.env), 'stock.warehouse.orderpoint', line.orderpoint_id.ids, dict(self.env.context)) if not controller_lines_no_run: controller_lines_run_procs = self.search([ ('done', '=', False), ('location_sequence', '=', max_location_sequence), ('route_sequence', '=', max_route_sequence), ('run_procs', '=', True) ]) if controller_lines_run_procs: if run_procurements: self.env['procurement.order'].with_context( jobify=jobify).run_confirm_procurements() else: _logger.info( u"No procurement confirmation required") controller_lines_run_procs.set_to_done()
def cron_compute_remaining_qties(self): orders = self.env['sale.order'].search([('state', 'not in', ('done', 'cancel'))]) for order_id in orders.ids: if self.env.context.get('jobify'): job_update_remaining_qty.delay( ConnectorSession.from_env(self.env), 'sale.order', order_id) else: job_update_remaining_qty(ConnectorSession.from_env(self.env), 'sale.order', order_id)
def cron_get_new_refresh_token(self, jobify=True): """ Cron to refresh the refresh_token every month. """ if jobify: job_get_refresh_token.delay(ConnectorSession.from_env(self.env), 'knowledge.config.settings', dict(self.env.context)) else: job_get_refresh_token(ConnectorSession.from_env(self.env), 'knowledge.config.settings', dict(self.env.context))
def create_needed_orderpoints(self, domain=None, jobify=True): domain = domain or [] products = self.search(domain) if jobify: while products: chunk_products = products[:100] products = products[100:] create_needed_orderpoint_for_product.delay( ConnectorSession.from_env(self.env), 'product.product', chunk_products.ids, dict(self.env.context)) else: create_needed_orderpoint_for_product( ConnectorSession.from_env(self.env), 'product.product', products.ids, dict(self.env.context))
def run_assign_moves(self): confirmed_moves = self.env['stock.move'].search( [('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc') while confirmed_moves: if self.env.context.get('jobify'): assign_moves.delay(ConnectorSession.from_env(self.env), 'stock.move', confirmed_moves[:100].ids, dict(self.env.context)) else: assign_moves(ConnectorSession.from_env(self.env), 'stock.move', confirmed_moves[:100].ids, dict(self.env.context)) confirmed_moves = confirmed_moves[100:]
def run_schedule_button_update(self): self.env.cr.execute("""WITH mrp_moves_details AS ( SELECT mrp.id AS mrp_id, sm.state AS raw_move_state, sm_orig.state AS service_move_state FROM mrp_production mrp LEFT JOIN stock_move sm ON sm.raw_material_production_id = mrp.id LEFT JOIN stock_move sm_orig ON sm_orig.move_dest_id = sm.id WHERE mrp.state IN ('ready', 'confirmed')) SELECT mrp_id FROM mrp_moves_details GROUP BY mrp_id HAVING sum(CASE WHEN raw_move_state = 'done' OR service_move_state = 'done' THEN 1 ELSE 0 END) <= 0""") fetchall = self.env.cr.fetchall() mrp_to_update_ids = [item[0] for item in fetchall] chunk_number = 0 while mrp_to_update_ids: chunk_number += 1 mrp_chunk_ids = mrp_to_update_ids[:100] run_mrp_production_update.delay( ConnectorSession.from_env(self.env), 'mrp.production', mrp_chunk_ids, self.env.context, description=u"MRP Production Update (chunk %s)" % chunk_number) mrp_to_update_ids = mrp_to_update_ids[100:]
def button_import(self): """ Analyze the imports in order to create the letter's lines """ if not self.manual_import: # when letters are in a folder on NAS redefine method for letters_import in self: letters_import.state = 'pending' if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) ilh.import_letters_job.delay( session, self._name, letters_import.id) else: letters_import._run_analyze() return True else: # when letters selected by user, save them on NAS and call # super method for letters_import in self: if letters_import.data and self.env.context.get( 'async_mode', True): for attachment in letters_import.data: self._save_imported_letter(attachment) return super(ImportLettersHistory, self).button_import()
def run(self): self.ensure_one() if self.use_job: job_run_memory_test.delay(ConnectorSession.from_env(self.env), self._name, self.uom, self.total_memory_to_reach, self.increase_step_memory) else: self.test_load(self.uom, self.total_memory_to_reach, self.increase_step_memory)
def test_from_env(self): """ ConnectorSession.from_env(env) """ session = ConnectorSession.from_env(self.env) self.assertEqual(session.cr, self.env.cr) self.assertEqual(session.uid, self.env.uid) self.assertEqual(session.context, self.env.context) self.assertEqual(session.pool, self.env.registry)
def do_in_new_connector_env(self, model_name=None): """ Context manager that yields a new connector environment Using a new Odoo Environment thus a new PG transaction. This can be used to make a preemptive check in a new transaction, for instance to see if another transaction already made the work. """ with openerp.api.Environment.manage(): registry = openerp.modules.registry.RegistryManager.get( self.env.cr.dbname ) with closing(registry.cursor()) as cr: try: new_env = openerp.api.Environment(cr, self.env.uid, self.env.context) new_connector_session = ConnectorSession.from_env(new_env) connector_env = self.connector_env.create_environment( self.backend_record.with_env(new_env), new_connector_session, model_name or self.model._name, connector_env=self.connector_env ) yield connector_env except: cr.rollback() raise else: # Despite what pylint says, this a perfectly valid # commit (in a new cursor). Disable the warning. cr.commit() # pylint: disable=invalid-commit
def button_import(self): """ Analyze the imports in order to create the letter's lines """ if not self.manual_import: # when letters are in a folder on NAS redefine method for letters_import in self: letters_import.state = 'pending' if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) ilh.import_letters_job.delay(session, self._name, letters_import.id) else: letters_import._run_analyze() return True else: # when letters selected by user, save them on NAS and call # super method for letters_import in self: if letters_import.data and self.env.context.get( 'async_mode', True): for attachment in letters_import.data: self._save_imported_letter(attachment) return super(ImportLettersHistory, self).button_import()
def _price_changed(self, vals): """ Fire the ``on_product_price_changed`` on all the variants of the template if the price of the product could have changed. If one of the field used in a sale pricelist item has been modified, we consider that the price could have changed. There is no guarantee that's the price actually changed, because it depends on the pricelists. """ price_fields = self._price_changed_fields() if any(field in vals for field in price_fields): product_model = self.env['product.product'] session = ConnectorSession.from_env(self.env) products = product_model.search([('product_tmpl_id', 'in', self.ids)]) # when the write is done on the product.product, avoid # to fire the event 2 times if self.env.context.get('from_product_ids'): from_product_ids = self.env.context['from_product_ids'] remove_products = product_model.browse(from_product_ids) products -= remove_products for product in products: on_product_price_changed.fire(session, product_model._name, product.id)
def test_from_env(self): """ ConnectorSession.from_env(env) """ session = ConnectorSession.from_env(self.env) self.assertEqual(session.cr, self.env.cr) self.assertEqual(session.uid, self.env.uid) self.assertEqual(session.context, self.env.context) self.assertEqual(session.pool, self.env.registry)
def write(self, vals): res = super(StockPicking, self).write(vals) if vals.get('carrier_tracking_ref'): session = ConnectorSession.from_env(self.env) for record_id in self.ids: on_tracking_number_added.fire(session, self._name, record_id) return res
def handler_web_children_hold(self): headers = request.httprequest.headers self._validate_headers(headers) # load children via a research on childpool child_research = request.env['compassion.childpool.search'].sudo() research = child_research.create({'take': 5}) research.rich_mix() # create a hold for all children found session = ConnectorSession.from_env(request.env) hold_children_job.delay(session, research.id) data = "" # return principal children info for child in research.global_child_ids: if child.image_url: data += '<img src="' + child.image_url + '"/> <br>' data += child.name + ' ' + child.birthdate + '<br>' headers = Headers() response = Response(data, content_type='text/html', headers=headers) return response
def import_suppliers(self): session = ConnectorSession.from_env(self.env) for backend_record in self: since_date = self._date_as_user_tz( backend_record.import_suppliers_since) import_suppliers.delay(session, backend_record.id, since_date) return True
def delete_products(self): self.ensure_one() model_name = 'product.template' session = ConnectorSession.from_env(self.env) products = session.env[model_name].search([]) for product in products: delete_product_job.delay(session, model_name, self.id, product.id)
def check_picking_one_by_one(self, jobify=True): """ Apply 'check_pickings_filled' one picking by one. Thus, in case of problem we know which picking to look at. """ if self.env['queue.job']. \ search([('job_function_id.name', '=', 'openerp.addons.stock_quant_packages_moving_wizard.models.stock.job_check_picking_one_by_one'), ('state', 'not in', ('done', 'failed'))], limit=1): return pickings_to_check = self.env['stock.picking'].search([ ('filled_by_jobs', '=', True), ('picking_correctly_filled', '=', False), ('state', 'not in', [('done', 'cancel')]) ]) if not jobify: return pickings_to_check.check_pickings_filled() while pickings_to_check: chunk_picking = pickings_to_check[:1] job_check_picking_one_by_one.delay( ConnectorSession.from_env(self.env), 'stock.picking', chunk_picking.ids, dict(self.env.context)) pickings_to_check = pickings_to_check[1:]
def confirm(self): self.ensure_one() self.env['shopinvader.partner'].write({'date_initialisation': False}) for partner_id in self._context['active_ids']: session = ConnectorSession.from_env(self.env) reset_password.delay(session, 'shopinvader.partner', partner_id, self.template_id.id, self.date_validity)
def setUp(self): super(TestJobSubscribe, self).setUp() grp_connector_manager = self.ref("connector.group_connector_manager") self.other_partner_a = self.env['res.partner'].create( {"name": "My Company a", "is_company": True, "email": "*****@*****.**", }) self.other_user_a = self.env['res.users'].create( {"partner_id": self.other_partner_a.id, "login": "******", "name": "my user", "groups_id": [(4, grp_connector_manager)] }) self.other_partner_b = self.env['res.partner'].create( {"name": "My Company b", "is_company": True, "email": "*****@*****.**", }) self.other_user_b = self.env['res.users'].create( {"partner_id": self.other_partner_b.id, "login": "******", "name": "my user 1", "groups_id": [(4, grp_connector_manager)] }) self.session = ConnectorSession.from_env(self.env)
def process_reconciliations(self, mv_line_dicts): """ Launch reconciliation in a job. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) process_reconciliations_job.delay( session, self._name, mv_line_dicts) else: self._process_reconciliations(mv_line_dicts)
def send_mail(self): result = super(MassMailing, self).send_mail() if self.email_template_id: # Used for Sendgrid -> Send e-mails in a job session = ConnectorSession.from_env(self.env) send_emails_job.delay( session, result._name, result.ids) return result
def _export_all_content(self, model): session = ConnectorSession.from_env(self.env) for record in self: bindings = self.env[model]\ .search([('backend_id', '=', record.id)]) for binding in bindings: delay_export(session, model, binding.id, {}) return True
def _reset_open_invoices(self): """ Launch the task in asynchrnous job by default. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) reset_open_invoices_job.delay(session, self._name, self.ids) else: self._reset_open_invoices_job() return True
def get_map_file(self): self.mapping_file = False session = ConnectorSession.from_env(self.env) env = ConnectorEnvironment(self, session, 'prime.catalog.service') service = env.get_connector_unit(PrimeCatalogService) service.url = self.url service.token = self.token self.mapping_file = service.get_map_file()
def process_reconciliations(self, mv_line_dicts): """ Launch reconciliation in a job. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) process_reconciliations_job.delay(session, self._name, mv_line_dicts) else: self._process_reconciliations(mv_line_dicts)
def _get_base_adapter(self): """ Get an adapter to test the backend connection """ self.ensure_one() session = ConnectorSession.from_env(self.env) environment = ConnectorEnvironment(self, session, None) return CmisAdapter(environment)
def run_schedule_button_update(self, jobify=True): self.env.cr.execute("""WITH mrp_moves_details AS ( SELECT mrp.id AS mrp_id, sm.state AS raw_move_state, sm_orig.state AS service_move_state FROM mrp_production mrp LEFT JOIN stock_move sm ON sm.raw_material_production_id = mrp.id LEFT JOIN stock_move sm_orig ON sm_orig.move_dest_id = sm.id WHERE mrp.state IN ('ready', 'confirmed')) SELECT mrp_id FROM mrp_moves_details GROUP BY mrp_id HAVING sum(CASE WHEN raw_move_state = 'done' OR service_move_state = 'done' THEN 1 ELSE 0 END) <= 0""") fetchall = self.env.cr.fetchall() mrp_to_check_ids = [item[0] for item in fetchall] mrp_to_update_ids = [] for mrp_id in mrp_to_check_ids: mrp = self.env['mrp.production'].search([('id', '=', mrp_id)]) bom = mrp.bom_id if not bom: bom = bom._bom_find(product_id=mrp.product_id.id) if bom: mrp_to_update_ids += [mrp_id] chunk_number = 0 while mrp_to_update_ids: chunk_number += 1 mrp_chunk_ids = mrp_to_update_ids[:100] if jobify: run_mrp_production_update.delay( ConnectorSession.from_env(self.env), 'mrp.production', mrp_chunk_ids, dict(self.env.context), description=u"MRP Production Update (chunk %s)" % chunk_number) else: run_mrp_production_update(ConnectorSession.from_env(self.env), 'mrp.production', mrp_chunk_ids, dict(self.env.context)) mrp_to_update_ids = mrp_to_update_ids[100:]
def process_messages(self): new_messages = self.filtered(lambda m: m.state in ('new', 'failure')) new_messages.write({'state': 'pending', 'failure_reason': False}) if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) process_messages_job.delay(session, self._name, self.ids) else: self._process_messages() return True
def _reset_open_invoices(self): """ Launch the task in asynchrnous job by default. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) reset_open_invoices_job.delay( session, self._name, self.ids) else: self._reset_open_invoices_job() return True
def procure_calculation(self): for company in self.env.user.company_id + self.env.user.company_id.child_ids: # Hack to get tests working correctly context = dict(self.env.context) context['jobify'] = True run_procure_orderpoint_async.delay( ConnectorSession.from_env(self.env), 'procurement.orderpoint.compute', company.id, context) return {'type': 'ir.actions.act_window_close'}
def import_sale_orders(self): session = ConnectorSession.from_env(self.env) for backend in self: sale_order_import_batch.delay( session, 'cdiscount.sale.order', backend.id, priority=1) # executed as soon as possible return True
def clean_invoices(self): """ By default, launch asynchronous job to perform the task. Context value async_mode set to False can force to perform the task immediately. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) clean_generate_job.delay(session, self._name, self.ids) else: self._clean_generate_invoices() return True
def clean_invoices(self, since_date=None, to_date=None, keep_lines=None): """ By default, launch asynchronous job to perform the task. Context value async_mode set to False can force to perform the task immediately. """ if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) clean_invoices_job.delay( session, self._name, self.ids, since_date, to_date, keep_lines) else: self._clean_invoices(since_date, to_date, keep_lines)
def button_import(self): """ Analyze the attachment in order to create the letter's lines """ for letters_import in self: if letters_import.data: letters_import.state = 'pending' if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) import_letters_job.delay( session, self._name, letters_import.id) else: letters_import._run_analyze() return True
def setUp(self): super(TestImportMagentoConcurrentSync, self).setUp() self.registry2 = RegistryManager.get(get_db_name()) self.cr2 = self.registry2.cursor() self.env2 = api.Environment(self.cr2, self.env.uid, {}) backend2 = mock.Mock(name='Backend Record') backend2._name = 'magento.backend' backend2.id = self.backend_id self.backend2 = backend2 self.connector_session2 = ConnectorSession.from_env(self.env2) @self.addCleanup def reset_cr2(): # rollback and close the cursor, and reset the environments self.env2.reset() self.cr2.rollback() self.cr2.close()
def process_commkit_notifications(self, commkit_updates, headers, eta=None): """ Create jobs which will process all incoming CommKit Notification messages. """ session = ConnectorSession.from_env(self.env) action_id = self.env.ref("onramp_compassion.update_commkit").id for update_data in commkit_updates: # Create a GMC message to keep track of the updates gmc_message = self.env["gmc.message.pool"].create( { "action_id": action_id, "content": json.dumps(update_data), "headers": json.dumps(dict(headers.items())), } ) job_uuid = update_commkit_job.delay(session, self._name, update_data, gmc_message.id, eta=eta) gmc_message.request_id = job_uuid return True
def handler_web_children_hold(self): headers = request.httprequest.headers self._validate_headers(headers) # load children via a research on childpool child_research = request.env['compassion.childpool.search'].sudo() research = child_research.create({'take': 5}) research.rich_mix() # create a hold for all children found session = ConnectorSession.from_env(request.env) hold_children_job.delay(session, research.id) data = "" # return principal children info for child in research.global_child_ids: data += child.name + ' ' + child.birthdate + '<br>' headers = Headers() response = Response(data, content_type='text/html', headers=headers) return response
def generate_invoices(self, invoicer=None): """ By default, launch asynchronous job to perform the task. Context value async_mode set to False can force to perform the task immediately. """ if invoicer is None: invoicer = self.env['recurring.invoicer'].create( {'source': self._name}) if self.env.context.get('async_mode', True): session = ConnectorSession.from_env(self.env) generate_invoices_job.delay( session, self._name, self.ids, invoicer.id) else: # Prevent two generations at the same time jobs = self.env['queue.job'].search([ ('channel', '=', 'root.recurring_invoicer'), ('state', '=', 'started')]) if jobs: raise exceptions.Warning( _("Generation already running"), _("A generation has already started in background. " "Please wait for it to finish.")) self._generate_invoices(invoicer) return invoicer
def _postpone_deletion(self): postpone = datetime.now() + timedelta(seconds=10) session = ConnectorSession.from_env(self.env) unlink_children_job.delay(session, self._name, self.ids, eta=postpone)
def generate_invoices(self, invoicer=None): session = ConnectorSession.from_env(self.env) if invoicer is None: invoicer = self.env["recurring.invoicer"].create({"source": self._name}) job_uuid = generate_invoices_job.delay(session, self._name, self.ids, invoicer.id) return invoicer