def test_change_context_uninitialized(self):
     """ Change the context on a session not initialized with a context """
     session = ConnectorSession(self.cr, self.uid)
     test_key = "test_key"
     with session.change_context({test_key: "value"}):
         self.assertEqual(session.context.get("test_key"), "value")
     self.assertNotIn(test_key, session.context)
 def test_change_context_uninitialized(self):
     """ Change the context on a session not initialized with a context """
     session = ConnectorSession(self.cr, self.uid)
     test_key = 'test_key'
     with session.change_context({test_key: 'value'}):
         self.assertEqual(session.context.get('test_key'), 'value')
     self.assertNotIn(test_key, session.context)
    def import_product_product(self):
        try:
            print ":::::::::::"
            new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
            uid, context = self.env.uid, self.env.context
            with api.Environment.manage():
                self.env = api.Environment(new_cr, uid, context)
                self.test_connection()
                session = ConnectorSession(self.env.cr, self.env.uid,
                                           context=self.env.context)
                self.import_products_from_date = datetime.now()
                products = shopify.Product.find()
                for product in products:
                    vals_product_tmpl = {}
                    dict_attr = product.__dict__['attributes']
                    if not session.search('product.template',
                                          [('shopify_product_id',
                                            '=', dict_attr['id'])]):
                        image_urls = [getattr(i, 'src') for i in product.images]
                        if len(image_urls) > 0:
                            photo = base64.encodestring(urllib2.urlopen(image_urls[0]).read())
                            vals_product_tmpl.update({'image_medium': photo})

                        custom_collection = shopify.CustomCollection.find(product_id=dict_attr['id'])
                        if custom_collection:
                            for categ in custom_collection:
                                product_cate_obj = session.search('product.category',
                                                                  [('shopify_product_cate_id',
                                                                '=', categ.__dict__['attributes']['id'])])
                                if product_cate_obj:
                                    vals_product_tmpl.update({'categ_id': product_cate_obj[0]})
                        vals_product_tmpl.update({'name': dict_attr['title'],
                                                'type': 'consu',
                                                'shopify_product_id': dict_attr['id'],
                                                'description': dict_attr['body_html'],
                                                'state': 'add'})
                        product_tid = session.create('product.template', vals_product_tmpl)
                        new_cr.commit()
                        variants = dict_attr['variants']
                        for variant in variants:
                            dict_variant = variant.__dict__['attributes']
                            u = session.create('product.product',
                                               {'product_tmpl_id': product_tid,
                                                'product_sfy_variant_id': dict_variant['id']})
                            new_cr.commit()
        except:
            raise Warning(_('Facing a problems while importing product!'))
        finally:
            self.env.cr.close()
Esempio n. 4
0
class test_connector_session(common.TransactionCase):
    """ Test ConnectorSession """

    def setUp(self):
        super(test_connector_session, self).setUp()
        self.context = {'lang': 'fr_FR'}
        self.session = ConnectorSession(self.cr,
                                        self.uid,
                                        context=self.context)

    def test_change_user(self):
        """
        Change the user and check if it is reverted correctly at the end
        """
        original_uid = self.session.uid
        new_uid = 2
        with self.session.change_user(new_uid):
            self.assertEqual(self.session.uid, new_uid)
        self.assertEqual(self.session.uid, original_uid)

    def test_model_with_transaction(self):
        """
        Create a session with a model name, we should be able to access
        the model from a transaction
        """
        res_users = self.registry('res.users').search_count(self.cr,
                                                            self.uid,
                                                            [])
        sess_res_users_obj = self.session.pool.get('res.users')
        sess_res_users = sess_res_users_obj.search_count(self.cr,
                                                         self.uid,
                                                         [])
        self.assertEqual(sess_res_users, res_users)

    def test_change_context(self):
        """
        Change the context and check if it is reverted correctly at the end
        """
        test_key = 'test_key'
        self.assertNotIn(test_key, self.session.context)
        with self.session.change_context({test_key: 'value'}):
            self.assertIn(test_key, self.session.context)
        self.assertNotIn(test_key, self.session.context)

        # change the context on a session not initialized with a context
        session = ConnectorSession(self.cr, self.uid)
        with session.change_context({test_key: 'value'}):
            self.assertIn(test_key, session.context)
        self.assertNotIn(test_key, session.context)
Esempio n. 5
0
    def test_change_context(self):
        """
        Change the context and check if it is reverted correctly at the end
        """
        test_key = 'test_key'
        self.assertNotIn(test_key, self.session.context)
        with self.session.change_context({test_key: 'value'}):
            self.assertIn(test_key, self.session.context)
        self.assertNotIn(test_key, self.session.context)

        # change the context on a session not initialized with a context
        session = ConnectorSession(self.cr, self.uid)
        with session.change_context({test_key: 'value'}):
            self.assertIn(test_key, session.context)
        self.assertNotIn(test_key, session.context)
    def button_import(self):
        """
        Analyze the imports in order to create the letter's lines
        """
        if not self.manual_import:
            # when letters are in a folder on NAS redefine method
            for letters_import in self:
                letters_import.state = 'pending'
                if self.env.context.get('async_mode', True):
                    session = ConnectorSession.from_env(self.env)
                    ilh.import_letters_job.delay(
                        session, self._name, letters_import.id)
                else:
                    letters_import._run_analyze()
            return True
        else:
            # when letters selected by user, save them on NAS and call
            # super method
            for letters_import in self:
                if letters_import.data and self.env.context.get(
                        'async_mode', True):
                    for attachment in letters_import.data:
                        self._save_imported_letter(attachment)

            return super(ImportLettersHistory, self).button_import()
class test_connector_session(common.TransactionCase):
    """ Test ConnectorSession """

    def setUp(self):
        super(test_connector_session, self).setUp()
        self.context = {'lang': 'fr_FR'}
        self.session = ConnectorSession(self.cr,
                                        self.uid,
                                        context=self.context)

    def test_change_user(self):
        """
        Change the user and check if it is reverted correctly at the end
        """
        original_uid = self.session.uid
        new_uid = 2
        with self.session.change_user(new_uid):
            self.assertEqual(self.session.uid, new_uid)
        self.assertEqual(self.session.uid, original_uid)

    def test_model_with_transaction(self):
        """
        Create a session with a model name, we should be able to access
        the model from a transaction
        """
        res_users = self.registry('res.users')

        self.assertEqual(self.session.pool.get('res.users'), res_users)
 def test_from_env(self):
     """ ConnectorSession.from_env(env) """
     session = ConnectorSession.from_env(self.env)
     self.assertEqual(session.cr, self.env.cr)
     self.assertEqual(session.uid, self.env.uid)
     self.assertEqual(session.context, self.env.context)
     self.assertEqual(session.pool, self.env.registry)
 def process_reconciliations(self, mv_line_dicts):
     """ Launch reconciliation in a job. """
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         process_reconciliations_job.delay(
             session, self._name, mv_line_dicts)
     else:
         self._process_reconciliations(mv_line_dicts)
Esempio n. 10
0
 def import_sale_orders(self):
     session = ConnectorSession.from_env(self.env)
     for backend in self:
         sale_order_import_batch.delay(
             session,
             'cdiscount.sale.order',
             backend.id,
             priority=1)  # executed as soon as possible
     return True
Esempio n. 11
0
 def process_messages(self):
     new_messages = self.filtered(lambda m: m.state in ('new', 'failure'))
     new_messages.write({'state': 'pending', 'failure_reason': False})
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         process_messages_job.delay(session, self._name, self.ids)
     else:
         self._process_messages()
     return True
Esempio n. 12
0
 def _reset_open_invoices(self):
     """ Launch the task in asynchrnous job by default. """
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         reset_open_invoices_job.delay(
             session, self._name, self.ids)
     else:
         self._reset_open_invoices_job()
     return True
Esempio n. 13
0
    def write(self, cr, uid, ids, vals, context=None):
        if not hasattr(ids, '__iter__'):
            ids = [ids]

        # magento_qty maybe 0, also need to be updated
        if "magento_qty" in vals:
            for record_id in ids:
                session = ConnectorSession(cr, uid, context=context)
                if session.context.get('connector_no_export'):
                    continue
                if session.browse('magento.product.product', record_id).no_stock_sync:
                    continue
                inventory_fields = list(set(vals).intersection(INVENTORY_FIELDS))
                if inventory_fields:
                    export_product_inventory.delay(session, 'magento.product.product',
                                                   record_id, fields=inventory_fields,
                                                   priority=20)

        return super(magento_product_product, self).write(cr, uid, ids, vals,
                                             context=context)
 def clean_invoices(self):
     """ By default, launch asynchronous job to perform the task.
         Context value async_mode set to False can force to perform
         the task immediately.
     """
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         clean_generate_job.delay(session, self._name, self.ids)
     else:
         self._clean_generate_invoices()
     return True
Esempio n. 15
0
 def setUp(self):
     super(test_job_storage_multi_company, self).setUp()
     self.pool = openerp.modules.registry.RegistryManager.get(common.DB)
     self.session = ConnectorSession(self.cr, self.uid, context={})
     self.queue_job = self.registry('queue.job')
     grp_connector_manager = self.ref("connector.group_connector_manager")
     User = self.registry('res.users')
     Company = self.registry('res.company')
     Partner = self.registry('res.partner')
     self.other_partner_id_a = Partner.create(
         self.cr, self.uid,
         {"name": "My Company a",
          "is_company": True,
          "email": "*****@*****.**",
          })
     self.other_company_id_a = Company.create(
         self.cr, self.uid,
         {"name": "My Company a",
          "partner_id": self.other_partner_id_a,
          "rml_header1": "My Company Tagline",
          "currency_id": self.ref("base.EUR")
          })
     self.other_user_id_a = User.create(
         self.cr, self.uid,
         {"partner_id": self.other_partner_id_a,
          "company_id": self.other_company_id_a,
          "company_ids": [(4, self.other_company_id_a)],
          "login": "******",
          "name": "my user",
          "groups_id": [(4, grp_connector_manager)]
          })
     self.other_partner_id_b = Partner.create(
         self.cr, self.uid,
         {"name": "My Company b",
          "is_company": True,
          "email": "*****@*****.**",
          })
     self.other_company_id_b = Company.create(
         self.cr, self.uid,
         {"name": "My Company b",
          "partner_id": self.other_partner_id_b,
          "rml_header1": "My Company Tagline",
          "currency_id": self.ref("base.EUR")
          })
     self.other_user_id_b = User.create(
         self.cr, self.uid,
         {"partner_id": self.other_partner_id_b,
          "company_id": self.other_company_id_b,
          "company_ids": [(4, self.other_company_id_b)],
          "login": "******",
          "name": "my user 1",
          "groups_id": [(4, grp_connector_manager)]
          })
 def clean_invoices(self, since_date=None, to_date=None, keep_lines=None):
     """ By default, launch asynchronous job to perform the task.
         Context value async_mode set to False can force to perform
         the task immediately.
     """
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         clean_invoices_job.delay(
             session, self._name, self.ids, since_date, to_date,
             keep_lines)
     else:
         self._clean_invoices(since_date, to_date, keep_lines)
 def button_import(self):
     """
     Analyze the attachment in order to create the letter's lines
     """
     for letters_import in self:
         if letters_import.data:
             letters_import.state = 'pending'
             if self.env.context.get('async_mode', True):
                 session = ConnectorSession.from_env(self.env)
                 import_letters_job.delay(
                     session, self._name, letters_import.id)
             else:
                 letters_import._run_analyze()
     return True
Esempio n. 18
0
    def write(self, cr, uid, ids, vals, context=None):
        if not hasattr(ids, "__iter__"):
            ids = [ids]

        # cancel sales order on Magento (do not export the other
        # state changes, Magento handles them itself)
        if vals.get("state") == "cancel":
            session = ConnectorSession(cr, uid, context=context)
            for order in session.browse("sale.order", ids):
                old_state = order.state
                if old_state == "cancel":
                    continue  # skip if already canceled
                for binding in order.magento_bind_ids:
                    export_state_change.delay(
                        session,
                        "magento.sale.order",
                        binding.id,
                        # so if the state changes afterwards,
                        # it won't be exported
                        allowed_states=["cancel"],
                        description="Cancel sales order %s" % binding.magento_id,
                    )
        return super(sale_order, self).write(cr, uid, ids, vals, context=context)
Esempio n. 19
0
 def setUp(self):
     super(TestJobStorageMultiCompany, self).setUp()
     self.session = ConnectorSession(self.cr, self.uid, context={})
     self.queue_job = self.env['queue.job']
     grp_connector_manager = self.ref("connector.group_connector_manager")
     User = self.env['res.users']
     Company = self.env['res.company']
     Partner = self.env['res.partner']
     self.other_partner_a = Partner.create(
         {"name": "My Company a",
          "is_company": True,
          "email": "*****@*****.**",
          })
     self.other_company_a = Company.create(
         {"name": "My Company a",
          "partner_id": self.other_partner_a.id,
          "rml_header1": "My Company Tagline",
          "currency_id": self.ref("base.EUR")
          })
     self.other_user_a = User.create(
         {"partner_id": self.other_partner_a.id,
          "company_id": self.other_company_a.id,
          "company_ids": [(4, self.other_company_a.id)],
          "login": "******",
          "name": "my user",
          "groups_id": [(4, grp_connector_manager)]
          })
     self.other_partner_b = Partner.create(
         {"name": "My Company b",
          "is_company": True,
          "email": "*****@*****.**",
          })
     self.other_company_b = Company.create(
         {"name": "My Company b",
          "partner_id": self.other_partner_b.id,
          "rml_header1": "My Company Tagline",
          "currency_id": self.ref("base.EUR")
          })
     self.other_user_b = User.create(
         {"partner_id": self.other_partner_b.id,
          "company_id": self.other_company_b.id,
          "company_ids": [(4, self.other_company_b.id)],
          "login": "******",
          "name": "my user 1",
          "groups_id": [(4, grp_connector_manager)]
          })
    def setUp(self):
        super(TestImportMagentoConcurrentSync, self).setUp()
        self.registry2 = RegistryManager.get(get_db_name())
        self.cr2 = self.registry2.cursor()
        self.env2 = api.Environment(self.cr2, self.env.uid, {})
        backend2 = mock.Mock(name='Backend Record')
        backend2._name = 'magento.backend'
        backend2.id = self.backend_id
        self.backend2 = backend2
        self.connector_session2 = ConnectorSession.from_env(self.env2)

        @self.addCleanup
        def reset_cr2():
            # rollback and close the cursor, and reset the environments
            self.env2.reset()
            self.cr2.rollback()
            self.cr2.close()
    def process_commkit_notifications(self, commkit_updates, headers, eta=None):
        """ Create jobs which will process all incoming CommKit Notification
        messages. """
        session = ConnectorSession.from_env(self.env)
        action_id = self.env.ref("onramp_compassion.update_commkit").id
        for update_data in commkit_updates:
            # Create a GMC message to keep track of the updates
            gmc_message = self.env["gmc.message.pool"].create(
                {
                    "action_id": action_id,
                    "content": json.dumps(update_data),
                    "headers": json.dumps(dict(headers.items())),
                }
            )
            job_uuid = update_commkit_job.delay(session, self._name, update_data, gmc_message.id, eta=eta)
            gmc_message.request_id = job_uuid

        return True
 def import_product_categories(self):
     try:
         new_cr = sql_db.db_connect(self.env.cr.dbname).cursor()
         uid, context = self.env.uid, self.env.context
         with api.Environment.manage():
             self.env = api.Environment(new_cr, uid, context)
             self.import_categories_from_date = datetime.now()
             self.test_connection()
             session = ConnectorSession(self.env.cr, self.env.uid,
                                        context=self.env.context)
             product_category_ids = session.search('product.category', [('name',
                                                 '=', 'Shopify Products')])
             if not product_category_ids:
                 category_id = session.create('product.category',
                                              {'name': 'Shopify Products'})
                 new_cr.commit()
             shopify_collection = shopify.CustomCollection.find()
             if shopify_collection:
                 for category in shopify_collection:
                     vals = {}
                     dict_category = category.__dict__['attributes']
                     if product_category_ids:
                         vals.update({'parent_id': product_category_ids[0]})
                     else:
                         vals.update({'parent_id': category_id})
                     vals.update({'name': dict_category['title'],
                                  'description': dict_category['body_html'],
                                  'write_uid': self.env.uid,
                                  'shopify_product_cate_id': dict_category['id']})
                     product_cate_id = session.search('product.category',
                                                      [('shopify_product_cate_id',
                                                      '=', dict_category['id'])])
                     if not product_cate_id:
                         session.create('product.category', vals)
                         new_cr.commit()
                     else:
                         session.write('product.category', product_cate_id[0], vals)
                         new_cr.commit()
     except:
         raise Warning(_('Facing a problems while importing product categories!'))
     finally:
         self.env.cr.close()
Esempio n. 23
0
    def setUp(self):
        super(TestIntLoad, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = MoveLoadPolicy(self.env)

        self.parsed_header = '["ref", "date", "period_id", "journal_id", "line_id/account_id", "line_id/partner_id", "line_id/name", "line_id/analytic_account_id", "line_id/debit", "line_id/credit", "line_id/tax_code_id"]'  # noqa

        self.parsed_good_chunk = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        self.parsed_chunk_missing_journal = '[["1728274", "2014-02-02", "02/2014", "Sales Journal - (test)", "X11001", "Bank Wealthy and sons", "Camptocamp", "", "37.8", "", ""], ["", "", "", "", "X1111", "Bank Wealthy and sons", "Camptocamp", "AA009", "", "31.5", ""], ["", "", "", "", "X2001", "Bank Wealthy and sons", "Camptocamp", "AA001", "", "3.83", ""], ["", "", "", "", "X2110", "Bank Wealthy and sons", "Camptocamp", "AA001", "3.83", "", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-6.3", ""], ["", "", "", "", "X1000", "Bank Wealthy and sons", "Camptocamp", "", "", "-0", ""]]'  # noqa

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
                'prepared_header': self.parsed_header,
            })
    def child_released(self):
        """ Is called when a child is released to the global childpool. """
        self.write({
            'sponsor_id': False,
            'state': 'R'
        })

        sponsored_children = self.filtered('has_been_sponsored')
        other_children = self - sponsored_children
        other_children.get_lifecycle_event()

        # the children will be deleted when we reach their expiration date
        default_expiration = datetime.now() + timedelta(weeks=1)
        for child in other_children:
            postpone = fields.Datetime.from_string(child.hold_expiration) or \
                default_expiration
            session = ConnectorSession.from_env(other_children.env)
            unlink_children_job.delay(session, self._name, child.ids,
                                      eta=postpone)

        return True
Esempio n. 25
0
 def action_open(self):
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(HRSalaryExpense, self).action_open()
     # Enqueue
     if self.async_process and self.state != 'open':
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = 'Generate Entries - Salary Expense %s' % self.number
         uuid = action_open_hr_salary.delay(session,
                                            self._name,
                                            self.id,
                                            description=description)
         # Checking for running task, use the same signature as delay()
         task_name = "%s('%s', %s)" % \
             ('action_open_hr_salary', self._name, self.id)
         self._check_queue(task_name,
                           desc=description,
                           type='always',
                           uuid=uuid)
     else:
         return super(HRSalaryExpense, self).action_open()
Esempio n. 26
0
 def split_entries(self):
     """ Inherit existing function, and test if it async_process is True """
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(PabiImportJournalEntries, self).split_entries()
     # Enqueue
     if self.async_process:
         if self.job_id:  # Job already started, check at My Jobs menu
             message = _('Import JE - Split Entries')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Import JE - Split Entries(s)' % self.name
         uuid = action_import_je_split_entries.delay(
             session, self._name, self.id, description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('sample_pabi_async_process.'
                                       'import_je_split_entries')
     else:
         return super(PabiImportJournalEntries, self).split_entries()
Esempio n. 27
0
 def action_button_confirm(self):
     if self._context.get('pos_async_process', False):
         self.ensure_one()
         if self._context.get('job_uuid', False):  # Called from @job
             return super(SaleOrder, self).action_button_confirm()
         if self.pos_job_id:
             message = _('Confirm POS Order')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Confirm POS Order' % self.name
         uuid = action_confirm_pos_order.delay(session,
                                               self._name,
                                               self.id,
                                               description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.'
                                       'confirm_pos_order')
     else:
         return super(SaleOrder, self).action_button_confirm()
Esempio n. 28
0
 def manual_invoice(self):
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(SaleOrder, self).manual_invoice()
     # Enqueue
     if self.async_process and (self.order_policy == 'order'
                                or self.use_invoice_plan):
         if self.job_id:
             message = _('Creating Invoice(s)')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Creating Invoice(s)' % self.name
         uuid = action_sale_manual_invoice.delay(
             session, self._name, self.id, description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('cmo_async_process.'
                                       'sale_invoice_plan')
     else:
         return super(SaleOrder, self).manual_invoice()
Esempio n. 29
0
 def write(self, vals):
     # cancel sales order on Magento (do not export the other
     # state changes, Magento handles them itself)
     if vals.get('state') == 'cancel':
         session = ConnectorSession(self.env.cr, self.env.uid,
                                    context=self.env.context)
         for order in self:
             old_state = order.state
             if old_state == 'cancel':
                 continue  # skip if already canceled
             for binding in order.magento_bind_ids:
                 export_state_change.delay(
                     session,
                     'magento.sale.order',
                     binding.id,
                     # so if the state changes afterwards,
                     # it won't be exported
                     allowed_states=['cancel'],
                     description="Cancel sales order %s" %
                                 binding.magento_id)
     return super(SaleOrder, self).write(vals)
Esempio n. 30
0
 def action_done_background(self):
     if self._context.get('button_validate_async_process', False):
         self.ensure_one()
         if self._context.get('job_uuid', False):  # Called from @job
             return self.action_done()
         if self.button_validate_job_id:
             message = _('Confirm Post')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Confirm Post' % (self.ref or self.name)
         uuid = action_done_async_process.delay(session,
                                                self._name,
                                                self.id,
                                                description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         # job.process_id = self.env.ref('pabi_async_process.'
         #                              'confirm_pos_order')
     else:
         return self.action_done()
Esempio n. 31
0
 def action_generate(self):
     if self._context.get('job_uuid', False):  # Called from @job
         return super(AccountSubscriptionGenerate, self).action_generate()
     # Enqueue
     if self.async_process:
         session = ConnectorSession(self._cr, self._uid, self._context)
         period = self.calendar_period_id.calendar_name
         model_types = ', '.join([x.name for x in self.model_type_ids])
         description = 'Generate Entries - %s - %s' % (period, model_types)
         uuid = action_generate_recurring_entries.delay(
             session, self._name, self.id, description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.'
                                       'action_generate_entries')
         # Checking for running task, use the same signature as delay()
         task_name = "%s('%s', %s)" % ('action_generate_recurring_entries',
                                       self._name, self.id)
         self._check_queue(task_name, desc=description, type='always')
     else:
         return super(AccountSubscriptionGenerate, self).action_generate()
Esempio n. 32
0
 def action_save_carry_over_background(self):
     #if self._context.get('button_carry_over_async_process', False):
     print "action_save_carry_over_background 1"
     self.ensure_one()
     self.name = '{:03d}'.format(self.id)
     """if self._context.get('job_uuid', False):  # Called from @job
         print "action_save_carry_over_background 2"
         return self.action_done_save()"""
     if self.button_carry_over_job_id:
         print "action_save_carry_over_background 3"
         message = ('Save Carry Over')
         action = self.env.ref('pabi_utils.action_my_queue_job')
         raise RedirectWarning(message, action.id, ('Go to My Jobs'))
     session = ConnectorSession(self._cr, self._uid, self._context)
     description = '%s - Commitment Save Carry Over' % (self.doctype)
     print "action_save_carry_over_background 4"
     uuid = action_done_save_async_process.delay(session,
                                                 self._name,
                                                 self.id,
                                                 description=description)
     job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
Esempio n. 33
0
 def copy_quotation(self):
     self_copy = self.with_context(__copy_from_quotation=True)
     result = super(SaleOrder, self_copy).copy_quotation()
     # link binding of the canceled order to the new order, so the
     # operations done on the new order will be sync'ed with Magento
     new_id = result['res_id']
     binding_model = self.env['magento.sale.order']
     bindings = binding_model.search([('openerp_id', '=', self.id)])
     bindings.write({'openerp_id': new_id})
     session = ConnectorSession(self.env.cr,
                                self.env.uid,
                                context=self.env.context)
     for binding in bindings:
         # the sales' status on Magento is likely 'canceled'
         # so we will export the new status (pending, processing, ...)
         export_state_change.delay(session,
                                   'magento.sale.order',
                                   binding.id,
                                   description="Reopen sales order %s" %
                                   binding.magento_id)
     return result
Esempio n. 34
0
 def compute_depreciation_board(self):
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(AccountAsset, self).compute_depreciation_board()
     # Enqueue
     if self.async_process:
         if self.job_id:
             message = _('Compute asset depreciation job is still running!')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s  - Asset Depreciation Job' % self.name
         uuid = action_compute_depreciation_board.delay(
             session, self._name, self.id, description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.'
                                       'asset_compute_depreciation_board')
         return True
     else:
         return super(AccountAsset, self).compute_depreciation_board()
Esempio n. 35
0
 def action_invoice_create(self):
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(PurchaseOrder, self).action_invoice_create()
     # Enqueue
     if self._context.get('async_process', False):
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Create Supplier Invoice(s)' % self.name
         uuid = action_purchase_create_invoice.delay(
             session, self._name, self.id, description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.'
                                       'purchase_invoice_plan')
         # Checking for running task, use the same signature as delay()
         task_name = "%s('%s', %s)" % \
             ('action_purchase_create_invoice', self._name, self.id)
         self._check_queue(task_name, desc=self.name,
                           type='always', uuid=uuid)
     else:
         return super(PurchaseOrder, self).action_invoice_create()
 def create(self, vals):
     res = super(GeneralFeedEventOrder, self).create(vals)
     if len(res.move) == 0:
         quants_obj = self.env['stock.quant']
         moves_obj = self.env['stock.move']
         target_quant = quants_obj.search([
             ('lot_id', '=', res.feed_lot.id),
             ('location_id', '=', res.feed_location.id)])
         new_move = moves_obj.create({
             'name': res.name+'-'+res.feed_lot.name+'-mov',
             'create_date': fields.Date.today(),
             'date': res.start_date,
             'product_id': res.feed_product.id,
             'product_uom_qty': res.feed_quantity,
             'product_uom': res.uom.id,
             'location_id': res.feed_location.id,
             'location_dest_id': res.location_dest.id,
             'company_id': res.feed_location.company_id.id,
             'origin': res.name,
             })
         for q in target_quant:
             q.reservation_id = new_move.id
         res.move = new_move
     else:
         new_move = res.move
     '''
     if prod_tmpl.feed_lactating:
         self.specific_feed(True, res, new_move)
     elif prod_tmpl.feed_transit:
         self.specific_feed(False, res, new_move)
     else:
         self.standard_feed(res, new_move)
     '''
     self.standard_feed(res, new_move)
     if new_move.state != 'done':
         res.move.action_done()
     session = ConnectorSession.from_env(self.env)
     confirm_feed_event.delay(
         session, 'farm.general.feed.event', res.id)
     return res
Esempio n. 37
0
    def _cancel_jobs(self, cr, uid, context=None):
        """Find payment.orders where the mark has been removed and cancel the jobs.
        """

        if context is None:
            context = {}

        session = ConnectorSession(cr, uid, context=context)
        storage = OpenERPJobStorage(session)

        paymentorder_ids = self.search(cr, uid, [
            ('to_process', '=', False),
            ('post_job_uuid', '!=', False)
        ], context=context)

        for paymentorder in self.browse(cr, uid, paymentorder_ids, context=context):
            job_rec = storage.load(paymentorder.post_job_uuid)
            if job_rec.state in (u'pending', u'enqueued'):
                job_rec.set_done(result=_(
                    u'Task set to Done because the user unmarked the payment order'
                ))
                storage.store(job_rec)
Esempio n. 38
0
 def catalog_api_load_mapping(self):
     self.ensure_one()
     session = ConnectorSession.from_env(self.env)
     env = ConnectorEnvironment(self, session, 'prime.catalog.service')
     service = env.get_connector_unit(PrimeCatalogService)
     service.url = self.url
     service.token = self.token
     esa = self.env['external.service.attribute']
     mapping = service.get_mapping()
     clear_fields = {
         key: value
         for key, value in mapping.items() if isinstance(value, dict)
     }
     for code, data in clear_fields.items():
         field = {
             'backend_id': self.id,
             'parent_id': False,
             'code': code,
             'type_id': data['type'],
             'additional_info': json_dumps(data)
         }
         esa.get_or_create(field)
Esempio n. 39
0
 def action_assign(self, cr, uid, ids, *args, **kwargs):
     res = super(stock_picking, self).action_assign(cr, uid, ids, *args,
                                                    **kwargs)
     if res:
         session = ConnectorSession(cr, uid, context=None)
         picking_records = self.read(cr,
                                     uid,
                                     ids,
                                     ['id', 'type', 'wms_disable_events'],
                                     context=None)
         for picking_vals in picking_records:
             if picking_vals['wms_disable_events']:
                 continue
             if picking_vals['type'] == 'out':
                 on_picking_out_available.fire(session, self._name,
                                               picking_vals['id'])
             elif picking_vals['type'] == 'in':
                 on_picking_in_available.fire(session, self._name,
                                              picking_vals['id'])
             else:
                 continue
     return res
 def import_sale_orders(self, cr, uid, ids, context=None):
     session = ConnectorSession(cr, uid, context=context)
     import_start_time = datetime.now()
     for storeview in self.browse(cr, uid, ids, context=context):
         if storeview.no_sales_order_sync:
             _logger.debug("The storeview '%s' is active in Magento "
                           "but its sales orders should not be imported." %
                           storeview.name)
             continue
         backend_id = storeview.backend_id.id
         if storeview.import_orders_from_date:
             from_date = datetime.strptime(
                 storeview.import_orders_from_date,
                 DEFAULT_SERVER_DATETIME_FORMAT)
         else:
             from_date = None
         sale_order_import_batch.delay(
             session,
             'magento.sale.order',
             backend_id,
             {'magento_storeview_id': storeview.magento_id,
              'from_date': from_date,
              'to_date': import_start_time},
             priority=1)  # executed as soon as possible
     # Records from Magento are imported based on their `created_at`
     # date.  This date is set on Magento at the beginning of a
     # transaction, so if the import is run between the beginning and
     # the end of a transaction, the import of a record may be
     # missed.  That's why we add a small buffer back in time where
     # the eventually missed records will be retrieved.  This also
     # means that we'll have jobs that import twice the same records,
     # but this is not a big deal because the sales orders will be
     # imported the first time and the jobs will be skipped on the
     # subsequent imports
     next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
     next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
     self.write(cr, uid, ids, {'import_orders_from_date': next_time},
                context=context)
     return True
Esempio n. 41
0
 def clean_old_messages_async(self):
     keep_messages_for = self.env.ref(
         'bus_integration.backend').keep_messages_for
     if not keep_messages_for:
         return
     limit_date = datetime.now() - relativedelta(days=keep_messages_for)
     old_msgs = self.search([('create_date', '<',
                              fields.Datetime.to_string(limit_date))])
     chunk_size = 100
     cpt = 0
     max = int(math.ceil(len(old_msgs) / float(chunk_size)))
     while old_msgs:
         cpt += 1
         chunk = old_msgs[:chunk_size]
         old_msgs = old_msgs[chunk_size:]
         session = ConnectorSession(self.env.cr, self.env.uid,
                                    self.env.context)
         job_bus_message_cleaner_chunk.delay(
             session,
             'bus.message',
             chunk.ids,
             description="bus message cleaner (chunk %s/%s)" % (cpt, max))
Esempio n. 42
0
 def do_transfer(self):
     self.ensure_one()
     if self._context.get('job_uuid', False):  # Called from @job
         return super(StockPicking, self).do_transfer()
     # Enqueue
     if self.async_process:
         if self.job_id:
             message = _('Do Transfer(s)')
             action = self.env.ref('pabi_utils.action_my_queue_job')
             raise RedirectWarning(message, action.id, _('Go to My Jobs'))
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Do Transfer(s)' % self.name
         uuid = action_do_transfer.delay(session,
                                         self._name,
                                         self.id,
                                         description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.'
                                       'stock_transfer')
     else:
         return super(StockPicking, self).do_transfer()
Esempio n. 43
0
 def run_report(self):
     # Enqueue
     if self.async_process:
         data = super(AccountTaxReportWizard, self).run_report()
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = '%s - Print Tax Report' % self.tax_id.name
         uuid = action_run_tax_report.delay(session,
                                            data,
                                            self.print_format,
                                            description=description)
         job = self.env['queue.job'].search([('uuid', '=', uuid)], limit=1)
         # Process Name
         job.process_id = self.env.ref('pabi_async_process.' 'tax_report')
         # Checking for running task, use the same signature as delay()
         task_name = "%s(%s, u'%s')" % \
             ('action_run_tax_report', data, self.print_format)
         self._check_queue(task_name,
                           desc=description,
                           type='never',
                           uuid=uuid)
     else:
         return super(AccountTaxReportWizard, self).run_report()
Esempio n. 44
0
 def action_done(self, cr, uid, ids, context=None):
     res = super(stock_picking, self).action_done(cr,
                                                  uid,
                                                  ids,
                                                  context=context)
     session = ConnectorSession(cr, uid, context=context)
     # Look if it exists a backorder, in that case call for partial
     picking_records = self.read(cr,
                                 uid,
                                 ids,
                                 ['id', 'related_backorder_ids', 'type'],
                                 context=context)
     for picking_vals in picking_records:
         if picking_vals['type'] != 'out':
             continue
         if picking_vals['related_backorder_ids']:
             picking_method = 'partial'
         else:
             picking_method = 'complete'
         on_picking_out_done.fire(session, self._name, picking_vals['id'],
                                  picking_method)
     return res
    def button_mark(self, cr, uid, ids, context=None):
        """Create a single job that will create one job per payment order.

        Return action.

        """
        session = ConnectorSession(cr, uid, context=context)
        for wizard_id in ids:
            # to find out what _classic_write does, read the documentation.
            wizard_data = self.read(cr,
                                    uid,
                                    wizard_id,
                                    context=context,
                                    load='_classic_write')
            wizard_data.pop('id')
            active_ids = context['active_ids']
            if context.get('automated_test_execute_now'):
                process_wizard(session, self._name, wizard_data)
            else:
                process_wizard.delay(session, self._name, wizard_data)

        return {'type': 'ir.actions.act_window_close'}
Esempio n. 46
0
 def import_sale_orders(self):
     session = ConnectorSession(self.env.cr, self.env.uid,
                                context=self.env.context)
     import_start_time = datetime.now()
     for storeview in self:
         if storeview.no_sales_order_sync:
             _logger.debug("The storeview '%s' is active in Magento "
                           "but is configured not to import the "
                           "sales orders", storeview.name)
             continue
         backend_id = storeview.backend_id.id
         if storeview.import_orders_from_date:
             from_string = fields.Datetime.from_string
             from_date = from_string(storeview.import_orders_from_date)
         else:
             from_date = None
         sale_order_import_batch.delay(
             session,
             'magento.sale.order',
             backend_id,
             {'magento_storeview_id': storeview.magento_id,
              'from_date': from_date,
              'to_date': import_start_time},
             priority=1)  # executed as soon as possible
     # Records from Magento are imported based on their `created_at`
     # date.  This date is set on Magento at the beginning of a
     # transaction, so if the import is run between the beginning and
     # the end of a transaction, the import of a record may be
     # missed.  That's why we add a small buffer back in time where
     # the eventually missed records will be retrieved.  This also
     # means that we'll have jobs that import twice the same records,
     # but this is not a big deal because the sales orders will be
     # imported the first time and the jobs will be skipped on the
     # subsequent imports
     next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
     next_time = fields.Datetime.to_string(next_time)
     self.write({'import_orders_from_date': next_time})
     return True
Esempio n. 47
0
    def _procure_orderpoint_confirm(self, use_new_cursor=False, company_id=False):
        """
        Create procurement based on Orderpoint

        :param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
            This is appropriate for batch jobs only.
        """
        orderpoint_env = self.env['stock.warehouse.orderpoint']
        dom = company_id and [('company_id', '=', company_id)] or []
        if self.env.context.get('compute_product_ids') and not self.env.context.get('compute_all_products'):
            dom += [('product_id', 'in', self.env.context.get('compute_product_ids'))]
        if self.env.context.get('compute_supplier_ids') and not self.env.context.get('compute_all_products'):
            supplierinfo_ids = self.env['product.supplierinfo']. \
                search([('name', 'in', self.env.context['compute_supplier_ids'])])
            read_supplierinfos = supplierinfo_ids.read(['id', 'product_tmpl_id'], load=False)
            dom += [('product_id.product_tmpl_id', 'in', [item['product_tmpl_id'] for item in read_supplierinfos])]
        orderpoint_ids = orderpoint_env.search(dom)
        op_ids = orderpoint_ids.read(['id', 'product_id'], load=False)

        result = dict()
        for row in op_ids:
            if row['product_id'] not in result:
                result[row['product_id']] = list()
            result[row['product_id']].append(row['id'])
        product_ids = result.values()

        while product_ids:
            products = product_ids[:ORDERPOINT_CHUNK]
            product_ids = product_ids[ORDERPOINT_CHUNK:]
            orderpoints = flatten(products)
            if self.env.context.get('without_job'):
                for op in self.env['stock.warehouse.orderpoint'].browse(orderpoints):
                    op.process()
            else:
                process_orderpoints.delay(ConnectorSession.from_env(self.env), 'stock.warehouse.orderpoint',
                                          orderpoints, self.env.context,
                                          description="Computing orderpoints %s" % orderpoints)
        return {}
    def setUp(self):
        super(TestIntCSVParse, self).setUp()
        self.backend_record = Mock()
        self.session = ConnectorSession(self.cr, self.uid)
        self.model_name = 'ir.attachment.binding'
        self.backend_id = self.session.create(
            'file_import.backend',
            {
                'name': 'Test File Import',
                'ftp_host': 'localhost',
                'ftp_user': '******',
                'ftp_password': '******',
                'ftp_input_folder': 'to_openerp',
                'ftp_failed_folder': 'from_openerp',
            })

        self.env = Environment(
            self.backend_record,
            self.session,
            self.model_name
        )

        self.policy = CSVParsePolicy(self.env)

        with open(expand_path('two_chunks.csv')) as input_file:
            file_content = input_file.read()

        self.document_id = self.session.create(
            'ir.attachment.binding', {
                'datas': base64.b64encode(file_content),
                'datas_fname': 'two_chunks.csv',
                'name': 'two_chunks.csv',
                'backend_id': self.backend_id,
            })

        self.document = self.session.browse(
            'ir.attachment.binding',
            self.document_id)
Esempio n. 49
0
 def _import_from_date(self,
                       cr,
                       uid,
                       ids,
                       model,
                       from_date_field,
                       context=None):
     if not hasattr(ids, '__iter__'):
         ids = [ids]
     self.check_magento_structure(cr, uid, ids, context=context)
     session = ConnectorSession(cr, uid, context=context)
     import_start_time = datetime.now()
     for backend in self.browse(cr, uid, ids, context=context):
         from_date = getattr(backend, from_date_field)
         if from_date:
             from_date = datetime.strptime(from_date,
                                           DEFAULT_SERVER_DATETIME_FORMAT)
         else:
             from_date = None
         import_batch.delay(session,
                            model,
                            backend.id,
                            filters={
                                'from_date': from_date,
                                'to_date': import_start_time
                            })
     # Records from Magento are imported based on their `created_at`
     # date.  This date is set on Magento at the beginning of a
     # transaction, so if the import is run between the beginning and
     # the end of a transaction, the import of a record may be
     # missed.  That's why we add a small buffer back in time where
     # the eventually missed records will be retrieved.  This also
     # means that we'll have jobs that import twice the same records,
     # but this is not a big deal because they will be skipped when
     # the last `sync_date` is the same.
     next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER)
     next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
     self.write(cr, uid, ids, {from_date_field: next_time}, context=context)
Esempio n. 50
0
 def action_get_report(self):
     self.ensure_one()
     # Enqueue
     if self.async_process:
         session = ConnectorSession(self._cr, self._uid, self._context)
         description = 'XLSX Report - %s' % (self._name, )
         uuid = get_report_job.delay(session,
                                     self._name,
                                     self.id,
                                     description=description)
         self.write({'state': 'get', 'uuid': uuid})
     else:
         out_file, out_name = self.get_report()
         self.write({'state': 'get', 'data': out_file, 'name': out_name})
     return {
         'type': 'ir.actions.act_window',
         'res_model': self._name,
         'view_mode': 'form',
         'view_type': 'form',
         'res_id': self.id,
         'views': [(False, 'form')],
         'target': 'new',
     }
Esempio n. 51
0
 def setUp(self):
     super(test_related_action_storage, self).setUp()
     cr, uid = self.cr, self.uid
     backend_model = self.registry('magento.backend')
     self.session = ConnectorSession(cr, uid)
     warehouse_id = self.ref('stock.warehouse0')
     backend_id = backend_model.create(
         cr,
         uid,
         {'name': 'Test Magento',
             'version': '1.7',
             'location': 'http://anyurl',
             'username': '******',
             'warehouse_id': warehouse_id,
             'password': '******'})
     self.backend = backend_model.browse(cr, uid, backend_id)
     # import the base informations
     with mock_api(magento_base_responses):
         import_batch(self.session, 'magento.website', backend_id)
         import_batch(self.session, 'magento.store', backend_id)
         import_batch(self.session, 'magento.storeview', backend_id)
     self.MagentoProduct = self.registry('magento.product.product')
     self.QueueJob = self.registry('queue.job')
 def setUp(self):
     super(test_import_address_book, self).setUp()
     self.backend_model = self.registry('magento.backend')
     self.session = ConnectorSession(self.cr, self.uid)
     self.model = self.registry('magento.res.partner')
     self.address_model = self.registry('magento.address')
     backend_ids = self.backend_model.search(
         self.cr, self.uid, [('name', '=', 'Test Magento Address book')])
     if backend_ids:
         self.backend_id = backend_ids[0]
     else:
         data_obj = self.registry('ir.model.data')
         warehouse_id = data_obj.get_object_reference(
             self.cr, self.uid, 'stock', 'warehouse0')[1]
         self.backend_id = self.backend_model.create(
             self.cr, self.uid, {
                 'name': 'Test Magento Address book',
                 'version': '1.7',
                 'location': 'http://anyurl',
                 'username': '******',
                 'warehouse_id': warehouse_id,
                 'password': '******'
             })
    def handler_web_children_hold(self):

        headers = request.httprequest.headers
        self._validate_headers(headers)

        # load children via a research on childpool
        child_research = request.env['compassion.childpool.search'].sudo()
        research = child_research.create({'take': 5})
        research.rich_mix()

        # create a hold for all children found
        session = ConnectorSession.from_env(request.env)
        hold_children_job.delay(session, research.id)

        data = ""
        # return principal children info
        for child in research.global_child_ids:
            data += child.name + ' ' + child.birthdate + '<br>'

        headers = Headers()
        response = Response(data, content_type='text/html', headers=headers)

        return response
Esempio n. 54
0
 def cron_compute_coverage_state(self):
     pol_coverage_to_recompute = self.search([
         ('order_id.state', 'not in', ['draft', 'done', 'cancel']),
         ('order_id.partner_id.active', '=', True),
         ('remaining_qty', '>', 0), ('product_id', '!=', False)
     ])
     products_to_process_ids = list(
         set([line.product_id.id for line in pol_coverage_to_recompute]))
     products = self.env['product.product'].search([
         ('id', 'in', products_to_process_ids)
     ])
     nb_products = len(products)
     index = 0
     for product in products:
         index += 1
         description = u"Update coverage states for product %s (%s/%s)" % (
             product.default_code, index, nb_products)
         job_compute_coverage_state.delay(
             ConnectorSession.from_env(self.env),
             'purchase.order.line',
             pol_coverage_to_recompute.ids, [product.id],
             context=dict(self.env.context),
             description=description)
Esempio n. 55
0
    def webhook_issue(self, issue_id=None, **kw):
        ensure_db()
        request.uid = openerp.SUPERUSER_ID
        env = request.env
        backend = env['jira.backend'].search([('use_webhooks', '=', True)],
                                             limit=1)
        if not backend:
            _logger.warning('Received a webhook from Jira but cannot find a '
                            'Jira backend with webhooks activated')
            return

        action = request.jsonrequest['webhookEvent']

        worklog = request.jsonrequest['issue']
        issue_id = worklog['id']

        session = ConnectorSession.from_env(env)
        if action == 'jira:issue_deleted':
            delete_record.delay(session, 'jira.project.task', backend.id,
                                issue_id)
        else:
            import_record.delay(session, 'jira.project.task', backend.id,
                                issue_id)
 def generate_invoices(self, invoicer=None):
     """ By default, launch asynchronous job to perform the task.
         Context value async_mode set to False can force to perform
         the task immediately.
     """
     if invoicer is None:
         invoicer = self.env['recurring.invoicer'].create(
             {'source': self._name})
     if self.env.context.get('async_mode', True):
         session = ConnectorSession.from_env(self.env)
         generate_invoices_job.delay(
             session, self._name, self.ids, invoicer.id)
     else:
         # Prevent two generations at the same time
         jobs = self.env['queue.job'].search([
             ('channel', '=', 'root.recurring_invoicer'),
             ('state', '=', 'started')])
         if jobs:
             raise exceptions.Warning(
                 _("Generation already running"),
                 _("A generation has already started in background. "
                   "Please wait for it to finish."))
         self._generate_invoices(invoicer)
     return invoicer
 def setUp(self):
     super(TestFTPGetPolicyWithOE, self).setUp()
     self.backend_record = Mock()
     self.session = ConnectorSession(self.cr, self.uid)
     self.model_name = 'ir.attachment.binding'
     self.backend_model = self.registry('file_import.backend')
     self.backend_id = self.backend_model.create(
         self.cr,
         self.uid,
         {
             'name': 'Test File Import',
             'ftp_host': 'localhost',
             'ftp_user': '******',
             'ftp_password': '******',
             'ftp_input_folder': 'to_openerp',
             'ftp_failed_folder': 'from_openerp',
         },
         self.session.context)
     self.env = Environment(
         self.backend_record,
         self.session,
         self.model_name
     )
     self.policy = FTPFileGetterPolicy(self.env)
class test_connector_session(common.TransactionCase):
    """ Test ConnectorSession """

    def setUp(self):
        super(test_connector_session, self).setUp()
        self.context = {"lang": "fr_FR"}
        self.session = ConnectorSession(self.cr, self.uid, context=self.context)

    def test_env(self):
        """ Check the session properties """
        session = self.session
        self.assertEqual(session.cr, session.env.cr)
        self.assertEqual(session.uid, session.env.uid)
        self.assertEqual(session.context, session.env.context)
        self.assertEqual(session.pool, session.env.registry)

    def test_from_env(self):
        """ ConnectorSession.from_env(env) """
        session = ConnectorSession.from_env(self.env)
        self.assertEqual(session.cr, self.env.cr)
        self.assertEqual(session.uid, self.env.uid)
        self.assertEqual(session.context, self.env.context)
        self.assertEqual(session.pool, self.env.registry)

    def test_change_user(self):
        """
        Change the user and check if it is reverted correctly at the end
        """
        original_uid = self.session.uid
        original_env = self.session.env
        new_uid = self.env.ref("base.user_demo").id
        with self.session.change_user(new_uid):
            # a new openerp.api.Environment is generated with the user
            self.assertNotEqual(self.session.env, original_env)
            self.assertEqual(self.session.uid, new_uid)
        self.assertEqual(self.session.env, original_env)
        self.assertEqual(self.session.uid, original_uid)

    def test_model_with_transaction(self):
        """ Use a method on a model from the pool """
        res_users = self.registry("res.users").search_count(self.cr, self.uid, [])
        sess_res_users_obj = self.session.pool.get("res.users")
        sess_res_users = sess_res_users_obj.search_count(self.cr, self.uid, [])
        self.assertEqual(sess_res_users, res_users)

    def test_new_model_with_transaction(self):
        """ Use a method on a model from the new api """
        res_users = self.env["res.users"].search_count([])
        sess_res_users_model = self.session.env["res.users"]
        sess_res_users = sess_res_users_model.search_count([])
        self.assertEqual(sess_res_users, res_users)

    def test_change_context(self):
        """ Change the context, it is reverted at the end """
        test_key = "test_key"
        self.assertNotIn(test_key, self.session.context)
        with self.session.change_context({test_key: "value"}):
            self.assertEqual(self.session.context.get("test_key"), "value")
        self.assertNotIn(test_key, self.session.context)

    def test_change_context_keyword(self):
        """ Change the context by keyword, it is reverted at the end """
        test_key = "test_key"
        self.assertNotIn(test_key, self.session.context)
        with self.session.change_context(test_key="value"):
            self.assertEqual(self.session.context.get("test_key"), "value")
        self.assertNotIn(test_key, self.session.context)

    def test_change_context_uninitialized(self):
        """ Change the context on a session not initialized with a context """
        session = ConnectorSession(self.cr, self.uid)
        test_key = "test_key"
        with session.change_context({test_key: "value"}):
            self.assertEqual(session.context.get("test_key"), "value")
        self.assertNotIn(test_key, session.context)

    def test_is_module_installed(self):
        """ Test on an installed module """
        self.assertTrue(self.session.is_module_installed("connector"))

    def test_is_module_uninstalled(self):
        """ Test on an installed module """
        self.assertFalse(self.session.is_module_installed("lambda"))

    def test_is_module_installed_cache_not_propagated(self):
        """ Test if the cache is well different for the different modules """
        self.assertTrue(self.session.is_module_installed("connector"))
        self.assertFalse(self.session.is_module_installed("#dummy#"))

    def test_is_module_installed_cache_invalidation(self):
        """ Test on an invalidation of cache about installed modules """
        module = self.env["ir.module.module"]
        domain = [("name", "=", "base")]
        self.assertTrue(self.session.is_module_installed("base"))
        # only to check that the cache works, the in validation is done only
        # if the field state is modified by write method, UGLY but no other
        # solution
        self.env.cr.execute("UPDATE ir_module_module " "SET state='uninstalled' " "WHERE name='base'")
        self.assertTrue(self.session.is_module_installed("base"))
        module.search(domain).state = "uninstalled"
        self.assertFalse(self.session.is_module_installed("base"))
        module.search(domain).state = "installed"
        self.assertTrue(self.session.is_module_installed("base"))
Esempio n. 59
0
 def _postpone_deletion(self):
     postpone = datetime.now() + timedelta(seconds=10)
     session = ConnectorSession.from_env(self.env)
     unlink_children_job.delay(session, self._name, self.ids, eta=postpone)
 def setUp(self):
     super(test_connector_session, self).setUp()
     self.context = {"lang": "fr_FR"}
     self.session = ConnectorSession(self.cr, self.uid, context=self.context)