Beispiel #1
0
 def test_async_import(self):
     """ Basic asynchronous import test with default large chunk size. """
     res = self._do_import("account.move.csv", use_queue=True)
     self.assertFalse(res, repr(res))
     # no moves should be created yet
     move_count = self.move_obj.search([("name", "in", ("TEST-1", "TEST-2",
                                                        "TEST-3"))])
     self.assertEqual(len(move_count), 0)
     # but we must have one job to split the file
     split_job = self.job_obj.search([])
     self.assertEqual(len(split_job), 1)
     # job names are important
     self.assertEqual(split_job.name,
                      "Import Account Entry from file account.move.csv")
     # perform job
     Job.load(self.env, split_job.uuid).perform()
     # check one job has been generated to load the file (one chunk)
     load_job = self.job_obj.search([("id", "!=", split_job.id)])
     self.assertEqual(len(load_job), 1)
     self.assertEqual(
         load_job.name,
         "Import Account Entry from file account.move.csv - #0 - lines 2 to 10",
     )
     # perform job
     Job.load(self.env, load_job.uuid).perform()
     self._check_import_result()
Beispiel #2
0
 def test_30_export_all_categories(self):
     queue_job = self.env['queue.job']
     existing_jobs = queue_job.search([])
     # a job is created to export all products
     self.shopinvader_backend.export_all_category()
     new_jobs = queue_job.search([])
     new_job = new_jobs - existing_jobs
     self.assertEqual(1, len(new_job))
     job = Job.load(self.env, new_job.uuid)
     self.assertEqual(_('Prepare a batch export of indexes'),
                      job.description)
     existing_jobs = new_jobs
     # perform the job
     job.perform()
     new_jobs = queue_job.search([])
     new_job = new_jobs - existing_jobs
     self.assertEqual(1, len(new_job))
     job = Job.load(self.env, new_job.uuid)
     count = self.env['shopinvader.category'].search_count([])
     self.assertEqual(
         _("Export %d records of %d for index 'algolia-category'") %
         (count, count), job.description)
     # the last job is the one performing the export
     job = Job.load(self.env, new_job.uuid)
     with mock_api(self.env) as mocked_api:
         job.perform()
     self.assertTrue('algolia-category' in mocked_api.index)
     index = mocked_api.index['algolia-category']
     self.assertEqual(1, len(index._calls),
                      "All categories must be exported in 1 call")
     method, values = index._calls[0]
     self.assertEqual('add_objects', method)
     self.assertEqual(count, len(values),
                      "All categories should be exported")
Beispiel #3
0
 def test_batch(self):
     self.cr.execute('delete from queue_job')
     batch = self.env['queue.job.batch'].get_new_batch('TEST')
     self.assertFalse(batch.job_ids)
     model = self.env['test.queue.job'].with_context(job_batch=batch)
     job_1 = model.with_delay().testing_method()
     self.assertEqual(job_1.db_record().state, 'pending')
     job_2 = model.with_delay().testing_method()
     self.assertEqual(job_2.db_record().state, 'pending')
     batch.refresh()
     jobs = job_1.db_record()
     jobs |= job_2.db_record()
     self.assertEqual(jobs, batch.job_ids)
     batch.set_read()
     self.assertTrue(batch.is_read)
     self.assertEqual(batch.state, 'draft')
     self.assertEqual(job_2.state, 'pending')
     self.assertEqual(job_2.state, 'pending')
     batch.enqueue()
     self.assertEqual(batch.state, 'enqueued')
     self.assertEqual(job_2.state, 'pending')
     self.assertEqual(job_2.state, 'pending')
     job_domain = [
         ('uuid', '!=', job_1.uuid),
         ('uuid', '!=', job_2.uuid),
     ]
     update = self.env['queue.job'].search(job_domain)
     self.assertFalse(update)
     job = Job.load(self.env, job_1.uuid)
     job.perform()
     job.set_done()
     job.store()
     update = self.env['queue.job'].search(job_domain)
     self.assertTrue(update)
     self.assertEqual(1, len(update))
     job = Job.load(self.env, update.uuid)
     job.perform()
     job.set_done()
     job.store()
     batch.refresh()
     self.assertEqual(batch.state, 'progress')
     self.assertEqual(batch.completeness, 0.5)
     job_domain.append(('uuid', '!=', update.uuid))
     update = self.env['queue.job'].search(job_domain)
     self.assertFalse(update)
     job = Job.load(self.env, job_2.uuid)
     job.perform()
     job.set_done()
     job.store()
     update = self.env['queue.job'].search(job_domain)
     self.assertTrue(update)
     self.assertEqual(1, len(update))
     job = Job.load(self.env, update.uuid)
     job.perform()
     job.set_done()
     job.store()
     batch.refresh()
     self.assertEqual(batch.state, 'finished')
     self.assertEqual(batch.completeness, 1)
     self.assertFalse(batch.is_read)
 def test_async_import(self):
     """ Basic asynchronous import test with default large chunk size. """
     res = self._do_import('account.move.csv', use_queue=True)
     self.assertFalse(res, repr(res))
     # no moves should be created yet
     move_count = self.move_obj.search([('name', 'in', ('TEST-1', 'TEST-2',
                                                        'TEST-3'))])
     self.assertEqual(len(move_count), 0)
     # but we must have one job to split the file
     split_job = self.job_obj.search([])
     self.assertEqual(len(split_job), 1)
     # job names are important
     self.assertEqual(split_job.name,
                      "Import Journal Entries from file account.move.csv")
     # perform job
     Job.load(self.env, split_job.uuid).perform()
     # check one job has been generated to load the file (one chunk)
     load_job = self.job_obj.search([('id', '!=', split_job.id)])
     self.assertEqual(len(load_job), 1)
     self.assertEqual(
         load_job.name,
         "Import Journal Entries from file account.move.csv - "
         "#0 - lines 2 to 10")
     # perform job
     Job.load(self.env, load_job.uuid).perform()
     self._check_import_result()
 def test_batch_processing(self):
     wiz = self.wiz_obj.create(
         {"batch_processing": True, "date_end": self.nextmonth}
     )
     # I check if this asset is draft
     self.assertEqual(self.asset01.state, "draft")
     # I confirm this asset
     self.asset01.validate()
     # I check if this asset is running
     self.assertEqual(self.asset01.state, "open")
     self.asset01.compute_depreciation_board()
     # I check that there is no depreciation line
     depreciation_line = self.asset01.depreciation_line_ids.filtered(
         lambda r: r.type == "depreciate" and r.move_id
     )
     self.assertTrue(len(depreciation_line) == 0)
     wiz.with_context(test_queue_job_no_delay=False).asset_compute()
     depreciation_line = self.asset01.depreciation_line_ids.filtered(
         lambda r: r.type == "depreciate" and r.move_id
     )
     self.assertTrue(len(depreciation_line) == 0)
     job_name = "Creating jobs to create moves for assets to %s" % (self.nextmonth)
     jobs = self.env["queue.job"].search(
         [("name", "=", job_name)], order="date_created desc", limit=1
     )
     self.assertEqual(
         jobs.job_function_id,
         self.env.ref(
             "account_asset_batch_compute."
             "job_function_account_asset_compute_asset_compute"
         ),
     )
     self.assertTrue(len(jobs) == 1)
     job = Job.load(self.env, jobs.uuid)
     # perform job
     job.perform()
     depreciation_line = self.asset01.depreciation_line_ids.filtered(
         lambda r: r.type == "depreciate" and r.move_id
     )
     self.assertTrue(len(depreciation_line) == 0)
     job_name = "Creating move for asset with id {} to {}".format(
         self.asset01.id,
         self.nextmonth,
     )
     jobs = self.env["queue.job"].search(
         [("name", "=", job_name)], order="date_created desc", limit=1
     )
     self.assertTrue(len(jobs) == 1)
     self.assertEqual(
         jobs.job_function_id,
         self.env.ref(
             "account_asset_batch_compute.job_function_account_asset_compute_entries"
         ),
     )
     job = Job.load(self.env, jobs.uuid)
     job.perform()
     depreciation_line = self.asset01.depreciation_line_ids.filtered(
         lambda r: r.type == "depreciate" and r.move_id
     )
     self.assertEqual(len(depreciation_line), 1)
Beispiel #6
0
 def test_job_unlinked(self):
     test_job = Job(self.method, args=("o", "k"), kwargs={"c": "!"})
     test_job.store()
     stored = self.queue_job.search([("uuid", "=", test_job.uuid)])
     stored.unlink()
     with self.assertRaises(NoSuchJobError):
         Job.load(self.env, test_job.uuid)
 def test_export_jobs(self):
     queue_job_model = self.env['queue.job']
     existing_jobs = queue_job_model.search([])
     with mock_api(self.env) as mocked_api:
         self.assertFalse(self.se_index.name in mocked_api.index)
         self.se_index_model.export_all_index(delay=True)
     # by default the export method create 2 jobs
     # the first one to split the bindings to export into batch
     # the second one to export each batch
     new_jobs = queue_job_model.search([])
     new_job = new_jobs - existing_jobs
     self.assertEqual(1, len(new_job))
     job = Job.load(self.env, new_job.uuid)
     self.assertEqual(_('Prepare a batch export of indexes'),
                      job.description)
     # at this stage the mocked_api is not yet called
     self.assertFalse(self.se_index.name in mocked_api.index)
     # perform the job
     existing_jobs = new_jobs
     job.perform()
     new_jobs = queue_job_model.search([])
     new_job = new_jobs - existing_jobs
     self.assertEqual(1, len(new_job))
     job = Job.load(self.env, new_job.uuid)
     count = self.env['res.partner'].search_count([])
     self.assertEqual(
         _("Export %d records of %d for index 'partner index'") %
         (count, count), job.description)
     self.assertFalse(self.se_index.name in mocked_api.index)
     # the last job is the one performing the export
     job = Job.load(self.env, new_job.uuid)
     with mock_api(self.env) as mocked_api:
         job.perform()
     self.assertTrue(self.se_index.name in mocked_api.index)
Beispiel #8
0
 def test_job_unlinked(self):
     test_job = Job(self.method, args=('o', 'k'), kwargs={'c': '!'})
     test_job.store()
     stored = self.queue_job.search([('uuid', '=', test_job.uuid)])
     stored.unlink()
     with self.assertRaises(NoSuchJobError):
         Job.load(self.env, test_job.uuid)
 def test_batch(self):
     self.cr.execute("delete from queue_job")
     batch = self.env["queue.job.batch"].get_new_batch("TEST")
     self.assertFalse(batch.job_ids)
     model = self.env["test.queue.job"].with_context(job_batch=batch)
     job_1 = model.with_delay().testing_method()
     self.assertEqual(job_1.db_record().state, "pending")
     job_2 = model.with_delay().testing_method()
     self.assertEqual(job_2.db_record().state, "pending")
     batch.refresh()
     jobs = job_1.db_record()
     jobs |= job_2.db_record()
     self.assertEqual(jobs, batch.job_ids)
     batch.set_read()
     self.assertTrue(batch.is_read)
     self.assertEqual(batch.state, "draft")
     self.assertEqual(job_2.state, "pending")
     self.assertEqual(job_2.state, "pending")
     batch.enqueue()
     self.assertEqual(batch.state, "enqueued")
     self.assertEqual(job_2.state, "pending")
     self.assertEqual(job_2.state, "pending")
     job_domain = [
         ("uuid", "!=", job_1.uuid),
         ("uuid", "!=", job_2.uuid),
     ]
     update = self.env["queue.job"].search(job_domain)
     self.assertFalse(update)
     job = Job.load(self.env, job_1.uuid)
     job.perform()
     job.set_done()
     job.store()
     update = self.env["queue.job"].search(job_domain)
     self.assertTrue(update)
     self.assertEqual(1, len(update))
     job = Job.load(self.env, update.uuid)
     job.perform()
     job.set_done()
     job.store()
     batch.refresh()
     self.assertEqual(batch.state, "progress")
     self.assertEqual(batch.completeness, 0.5)
     job_domain.append(("uuid", "!=", update.uuid))
     update = self.env["queue.job"].search(job_domain)
     self.assertFalse(update)
     job = Job.load(self.env, job_2.uuid)
     job.perform()
     job.set_done()
     job.store()
     update = self.env["queue.job"].search(job_domain)
     self.assertTrue(update)
     self.assertEqual(1, len(update))
     job = Job.load(self.env, update.uuid)
     job.perform()
     job.set_done()
     job.store()
     batch.refresh()
     self.assertEqual(batch.state, "finished")
     self.assertEqual(batch.completeness, 1)
     self.assertFalse(batch.is_read)
Beispiel #10
0
    def test_read(self):
        eta = datetime.now() + timedelta(hours=5)
        test_job = Job(
            self.method,
            args=("o", "k"),
            kwargs={"c": "!"},
            priority=15,
            eta=eta,
            description="My description",
        )
        test_job.user_id = 1
        test_job.worker_pid = 99999  # normally set on "set_start"
        test_job.company_id = self.env.ref("base.main_company").id
        test_job.store()
        job_read = Job.load(self.env, test_job.uuid)
        self.assertEqual(test_job.uuid, job_read.uuid)
        self.assertEqual(test_job.model_name, job_read.model_name)
        self.assertEqual(test_job.func, job_read.func)
        self.assertEqual(test_job.args, job_read.args)
        self.assertEqual(test_job.kwargs, job_read.kwargs)
        self.assertEqual(test_job.method_name, job_read.method_name)
        self.assertEqual(test_job.description, job_read.description)
        self.assertEqual(test_job.state, job_read.state)
        self.assertEqual(test_job.priority, job_read.priority)
        self.assertEqual(test_job.exc_info, job_read.exc_info)
        self.assertEqual(test_job.result, job_read.result)
        self.assertEqual(test_job.user_id, job_read.user_id)
        self.assertEqual(test_job.company_id, job_read.company_id)
        self.assertEqual(test_job.worker_pid, 99999)
        delta = timedelta(seconds=1)  # DB does not keep milliseconds
        self.assertAlmostEqual(test_job.date_created,
                               job_read.date_created,
                               delta=delta)
        self.assertAlmostEqual(test_job.date_started,
                               job_read.date_started,
                               delta=delta)
        self.assertAlmostEqual(test_job.date_enqueued,
                               job_read.date_enqueued,
                               delta=delta)
        self.assertAlmostEqual(test_job.date_done,
                               job_read.date_done,
                               delta=delta)
        self.assertAlmostEqual(test_job.eta, job_read.eta, delta=delta)

        test_date = datetime(2015, 3, 15, 21, 7, 0)
        job_read.date_enqueued = test_date
        job_read.date_started = test_date
        job_read.date_done = test_date
        job_read.store()

        job_read = Job.load(self.env, test_job.uuid)
        self.assertAlmostEqual(job_read.date_started, test_date, delta=delta)
        self.assertAlmostEqual(job_read.date_enqueued, test_date, delta=delta)
        self.assertAlmostEqual(job_read.date_done, test_date, delta=delta)
 def test_reset_expired_password(self):
     # Simulate cron call
     # Check if one job is created per backend with activated validity delay
     # Run the job and check if reset password is done
     self._init_job_counter()
     self.backend._launch_reset_expired_password()
     self._check_nbr_job_created(1)
     created_job = self.created_jobs
     self._init_job_counter()
     Job.load(self.env, created_job.uuid).perform()
     # The reset password job + the export to locomotive
     self._check_nbr_job_created(2)
 def test_job(self):
     self.assertEqual(
         self.sales.mapped("state"),
         ["draft", "draft", "draft", "draft", "draft", "sale"],
     )
     jobs = self.env["queue.job"].search([])
     self.assertEqual(len(jobs), 5)
     for job in jobs:
         Job.load(self.env, job.uuid).perform()
     self.assertEqual(set(self.sales.mapped("state")), {"sale"})
     self._close_session()
     self._check_closing_session()
 def test_async_import_smaller_aligned_chunks(self):
     """ Chunks aligned on record boundaries.
     Last chunk ends exactly at file end. """
     res = self._do_import('account.move.csv', use_queue=True, chunk_size=3)
     self.assertFalse(res, repr(res))
     # but we must have one job to split the file
     split_job = self.job_obj.search([])
     self.assertEqual(len(split_job), 1)
     # perform job
     Job.load(self.env, split_job.uuid).perform()
     # check one job has been generated to load the file (three chunks)
     load_jobs = self.job_obj.search([('id', '!=', split_job.id)],
                                     order='name')
     self.assertEqual(len(load_jobs), 3)
     self.assertEqual(
         load_jobs[0].name,
         "Import Journal Entries from file account.move.csv - "
         "#0 - lines 2 to 4")
     self.assertEqual(
         load_jobs[1].name,
         "Import Journal Entries from file account.move.csv - "
         "#1 - lines 5 to 7")
     self.assertEqual(
         load_jobs[2].name,
         "Import Journal Entries from file account.move.csv - "
         "#2 - lines 8 to 10")
     # perform job
     Job.load(self.env, load_jobs[0].uuid).perform()
     Job.load(self.env, load_jobs[1].uuid).perform()
     Job.load(self.env, load_jobs[2].uuid).perform()
     self._check_import_result()
Beispiel #14
0
 def test_async_import_smaller_misaligned_chunks(self):
     """ Chunk size smaller than record. """
     res = self._do_import("account.move.csv", use_queue=True, chunk_size=2)
     self.assertFalse(res, repr(res))
     # but we must have one job to split the file
     split_job = self.job_obj.search([])
     self.assertEqual(len(split_job), 1)
     # perform job
     Job.load(self.env, split_job.uuid).perform()
     # check one job has been generated to load the file (three chunks)
     load_jobs = self.job_obj.search([("id", "!=", split_job.id)],
                                     order="name")
     self.assertEqual(len(load_jobs), 3)
     self.assertEqual(
         load_jobs[0].name,
         "Import Account Entry from file account.move.csv - #0 - lines 2 to 4",
     )
     self.assertEqual(
         load_jobs[1].name,
         "Import Account Entry from file account.move.csv - #1 - lines 5 to 7",
     )
     self.assertEqual(
         load_jobs[2].name,
         "Import Account Entry from file account.move.csv - #2 - lines 8 to 10",
     )
     # perform job
     Job.load(self.env, load_jobs[0].uuid).perform()
     Job.load(self.env, load_jobs[1].uuid).perform()
     Job.load(self.env, load_jobs[2].uuid).perform()
     self._check_import_result()
Beispiel #15
0
    def test_02_delete_move_before_job_run(self):
        """
        Create a move and call the validate account move wizard to
        post it, and then delete the move.
        """
        move = self.create_account_move(3000)

        wizard = self.create_move_validate_wizard('mark', eta=1000)
        wizard.with_context({
            'active_ids': [move.id],
            'automated_test_execute_now': True,
        }).validate_move()

        job_uuid = move.post_job_uuid

        self.assertTrue(
            bool(job_uuid), msg="The job has not been created.")

        move.unlink()

        post_job = Job.load(self.env, job_uuid)
        post_job.perform()

        self.assertEquals(
            post_job.result,
            u'Nothing to do because the record has been deleted')
Beispiel #16
0
    def test_channel_on_job(self):
        self.env['queue.job.function'].search([]).unlink()
        self.env['queue.job.channel'].search([('name', '!=', 'root')]).unlink()

        method = self.env['test.queue.channel'].job_a
        self.env['queue.job.function']._register_job(method)
        path_a = '<%s>.%s' % (method.im_class._name, method.__name__)
        job_func = self.function_model.search([('name', '=', path_a)])
        self.assertEquals(job_func.channel, 'root')

        test_job = Job(method)
        test_job.store()
        stored = self.env['queue.job'].search([('uuid', '=', test_job.uuid)])
        self.assertEquals(stored.channel, 'root')
        job_read = Job.load(self.env, test_job.uuid)
        self.assertEquals(job_read.channel, 'root')

        channel = self.channel_model.create({
            'name': 'sub',
            'parent_id': self.root_channel.id
        })
        job_func.channel_id = channel

        test_job = Job(method)
        test_job.store()
        stored = self.env['queue.job'].search([('uuid', '=', test_job.uuid)])
        self.assertEquals(stored.channel, 'root.sub')

        # it's also possible to override the channel
        test_job = Job(method, channel='root.sub.sub.sub')
        test_job.store()
        stored = self.env['queue.job'].search([('uuid', '=', test_job.uuid)])
        self.assertEquals(stored.channel, test_job.channel)
Beispiel #17
0
    def test_channel_on_job(self):
        method = self.env["test.queue.channel"].job_a
        path_a = self.env["queue.job.function"].job_function_name(
            "test.queue.channel", "job_a")
        job_func = self.function_model.search([("name", "=", path_a)])

        self.assertEqual(job_func.channel, "root")

        test_job = Job(method)
        test_job.store()
        stored = test_job.db_record()
        self.assertEqual(stored.channel, "root")
        job_read = Job.load(self.env, test_job.uuid)
        self.assertEqual(job_read.channel, "root")

        sub_channel = self.env.ref("test_queue_job.channel_sub")
        job_func.channel_id = sub_channel

        test_job = Job(method)
        test_job.store()
        stored = test_job.db_record()
        self.assertEqual(stored.channel, "root.sub")

        # it's also possible to override the channel
        test_job = Job(method, channel="root.sub")
        test_job.store()
        stored = test_job.db_record()
        self.assertEqual(stored.channel, test_job.channel)
Beispiel #18
0
    def test_channel_on_job(self):
        self.env["queue.job.function"].search([]).unlink()
        self.env["queue.job.channel"].search([("name", "!=", "root")]).unlink()

        method = self.env["test.queue.channel"].job_a
        self.env["queue.job.function"]._register_job(
            self.env["test.queue.channel"], method
        )
        path_a = "<{}>.{}".format(method.__self__.__class__._name, method.__name__)
        job_func = self.function_model.search([("name", "=", path_a)])
        self.assertEquals(job_func.channel, "root")

        test_job = Job(method)
        test_job.store()
        stored = self.env["queue.job"].search([("uuid", "=", test_job.uuid)])
        self.assertEquals(stored.channel, "root")
        job_read = Job.load(self.env, test_job.uuid)
        self.assertEquals(job_read.channel, "root")

        channel = self.channel_model.create(
            {"name": "sub", "parent_id": self.root_channel.id}
        )
        job_func.channel_id = channel

        test_job = Job(method)
        test_job.store()
        stored = self.env["queue.job"].search([("uuid", "=", test_job.uuid)])
        self.assertEquals(stored.channel, "root.sub")

        # it's also possible to override the channel
        test_job = Job(method, channel="root.sub.sub.sub")
        test_job.store()
        stored = self.env["queue.job"].search([("uuid", "=", test_job.uuid)])
        self.assertEquals(stored.channel, test_job.channel)
Beispiel #19
0
    def test_01_wizard_asynchronous_post(self):
        """
        Create a move and call the validate account move wizard to
        post it.
        """
        move = self.create_account_move(1000)

        self.assertEqual(move.state, 'draft')

        wizard = self.create_move_validate_wizard('mark')
        wizard.with_context({
            'active_ids': [move.id],
            'automated_test_execute_now': True,
        }).validate_move()
        move.invalidate_cache()
        job_uuid = move.post_job_uuid

        self.assertTrue(move.to_post,
                        msg="Move should be marked as 'to post'.")
        self.assertTrue(bool(job_uuid),
                        msg="A job should have been assigned to the move.")

        post_job = Job.load(self.env, job_uuid)
        post_job.perform()

        self.assertEqual(move.state, 'posted', msg="Move should be posted.")
    def test_job_execute_after_closing(self):
        self.assertEqual(
            self.sales.mapped("state"),
            ["draft", "draft", "draft", "draft", "draft", "sale"],
        )
        jobs = self.env["queue.job"].search([])
        self.assertEqual(len(jobs), 5)
        for job in jobs[:4]:
            Job.load(self.env, job.uuid).perform()
        self.assertEqual(
            self.sales.mapped("state"),
            ["draft", "sale", "sale", "sale", "sale", "sale"],
        )
        self._close_session()
        self._check_closing_session()

        Job.load(self.env, jobs[0].uuid).perform()
Beispiel #21
0
 def test_store_env_su_sudo(self):
     demo_user = self.env.ref("base.user_demo")
     self.env = self.env(user=demo_user)
     delayable = self.env["test.queue.job"].sudo().with_delay()
     test_job = delayable.testing_method()
     stored = test_job.db_record()
     job_instance = Job.load(self.env, stored.uuid)
     self.assertTrue(job_instance.recordset.env.su)
     self.assertTrue(job_instance.user_id, demo_user)
Beispiel #22
0
 def test_job_identity_key_str(self):
     id_key = 'e294e8444453b09d59bdb6efbfec1323'
     test_job_1 = Job(self.method,
                      priority=15,
                      description="Test I am the first one",
                      identity_key=id_key)
     test_job_1.store()
     job1 = Job.load(self.env, test_job_1.uuid)
     self.assertEqual(job1.identity_key, id_key)
 def test_2(self):
     wiz = self.wiz_obj.create({
         'batch_processing': True,
         'date_end': self.nextmonth
     })
     # I check if this asset is draft
     self.assertEqual(self.asset01.state, 'draft')
     # I confirm this asset
     self.asset01.validate()
     # I check if this asset is running
     self.assertEqual(self.asset01.state, 'open')
     self.asset01.compute_depreciation_board()
     # I check that there is no depreciation line
     depreciation_line = self.asset01.depreciation_line_ids\
         .filtered(lambda r: r.type == 'depreciate' and r.move_id)
     self.assertTrue(len(depreciation_line) == 0)
     wiz.with_context(test_queue_job_no_delay=False).asset_compute()
     depreciation_line = self.asset01.depreciation_line_ids \
         .filtered(lambda r: r.type == 'depreciate' and r.move_id)
     self.assertTrue(len(depreciation_line) == 0)
     job_name = "Creating jobs to create moves for assets to %s" % (
         self.nextmonth)
     jobs = self.env['queue.job'].search([('name', '=', job_name)],
                                         order='date_created desc',
                                         limit=1)
     self.assertTrue(len(jobs) == 1)
     job = Job.load(self.env, jobs.uuid)
     # perform job
     job.perform()
     depreciation_line = self.asset01.depreciation_line_ids \
         .filtered(lambda r: r.type == 'depreciate' and r.move_id)
     self.assertTrue(len(depreciation_line) == 0)
     job_name = "Creating move for asset with id %s to %s" % (
         self.asset01.id, self.nextmonth)
     jobs = self.env['queue.job'].search([('name', '=', job_name)],
                                         order='date_created desc',
                                         limit=1)
     self.assertTrue(len(jobs) == 1)
     job = Job.load(self.env, jobs.uuid)
     job.perform()
     depreciation_line = self.asset01.depreciation_line_ids \
         .filtered(lambda r: r.type == 'depreciate' and r.move_id)
     self.assertEquals(len(depreciation_line), 1)
Beispiel #24
0
 def test_accented_bytestring(self):
     test_job = Job(self.method,
                    args=('öô¿‽', 'ñě'),
                    kwargs={'c': 'ßø'},
                    priority=15,
                    description="My dé^Wdescription")
     test_job.store()
     job_read = Job.load(self.env, test_job.uuid)
     self.assertEqual(job_read.args, ('öô¿‽', 'ñě'))
     self.assertEqual(job_read.kwargs, {'c': 'ßø'})
     self.assertEqual(job_read.description, "My dé^Wdescription")
 def test_async_import_small_misaligned_chunks(self):
     """ Chunk size larger than record. """
     res = self._do_import('account.move.csv', use_queue=True, chunk_size=4)
     self.assertFalse(res, repr(res))
     # but we must have one job to split the file
     split_job = self.job_obj.search([])
     self.assertEqual(len(split_job), 1)
     # perform job
     Job.load(self.env, split_job.uuid).perform()
     # check one job has been generated to load the file (two chunks)
     load_jobs = self.job_obj.search([('id', '!=', split_job.id)],
                                     order='name')
     self.assertEqual(len(load_jobs), 2)
     self.assertEqual(
         load_jobs[0].name,
         "Import Account Entry from file account.move.csv - "
         "#0 - lines 2 to 7")
     self.assertEqual(
         load_jobs[1].name,
         "Import Account Entry from file account.move.csv - "
         "#1 - lines 8 to 10")
     # perform job
     Job.load(self.env, load_jobs[0].uuid).perform()
     Job.load(self.env, load_jobs[1].uuid).perform()
     self._check_import_result()
Beispiel #26
0
 def test_accented_bytestring(self):
     test_job = Job(
         self.method,
         args=("öô¿‽", "ñě"),
         kwargs={"c": "ßø"},
         priority=15,
         description="My dé^Wdescription",
     )
     test_job.store()
     job_read = Job.load(self.env, test_job.uuid)
     self.assertEqual(job_read.args, ("öô¿‽", "ñě"))
     self.assertEqual(job_read.kwargs, {"c": "ßø"})
     self.assertEqual(job_read.description, "My dé^Wdescription")
Beispiel #27
0
 def test_unicode(self):
     test_job = Job(self.method,
                    args=(u'öô¿‽', u'ñě'),
                    kwargs={'c': u'ßø'},
                    priority=15,
                    description=u"My dé^Wdescription")
     test_job.store()
     job_read = Job.load(self.env, test_job.uuid)
     self.assertEqual(test_job.args, job_read.args)
     self.assertEqual(job_read.args, (u'öô¿‽', u'ñě'))
     self.assertEqual(test_job.kwargs, job_read.kwargs)
     self.assertEqual(job_read.kwargs, {'c': u'ßø'})
     self.assertEqual(test_job.description, job_read.description)
     self.assertEqual(job_read.description, u"My dé^Wdescription")
Beispiel #28
0
 def test_accented_bytestring(self):
     test_job = Job(self.method,
                    args=('öô¿‽', 'ñě'),
                    kwargs={'c': 'ßø'},
                    priority=15,
                    description="My dé^Wdescription")
     test_job.user_id = 1
     test_job.store()
     job_read = Job.load(self.env, test_job.uuid)
     # the job's args and description have been created as bytestring but
     # are decoded to utf8 by the ORM so make them comparable
     self.assertEqual(job_read.args,
                      ('öô¿‽'.decode('utf8'), 'ñě'.decode('utf8')))
     self.assertEqual(job_read.kwargs, {'c': 'ßø'.decode('utf8')})
     self.assertEqual(job_read.description,
                      "My dé^Wdescription".decode('utf8'))
Beispiel #29
0
 def test_unicode(self):
     test_job = Job(
         self.method,
         args=(u"öô¿‽", u"ñě"),
         kwargs={"c": u"ßø"},
         priority=15,
         description=u"My dé^Wdescription",
     )
     test_job.store()
     job_read = Job.load(self.env, test_job.uuid)
     self.assertEqual(test_job.args, job_read.args)
     self.assertEqual(job_read.args, (u"öô¿‽", u"ñě"))
     self.assertEqual(test_job.kwargs, job_read.kwargs)
     self.assertEqual(job_read.kwargs, {"c": u"ßø"})
     self.assertEqual(test_job.description, job_read.description)
     self.assertEqual(job_read.description, u"My dé^Wdescription")
Beispiel #30
0
    def _cancel_post_jobs(self):
        """
        Find moves where the mark has been removed and cancel the jobs.
        For the moves that are posted already it's too late: we skip them.
        """
        moves = self.search([
            ('to_post', '=', False),
            ('post_job_uuid', '!=', False),
            ('state', '=', 'draft'),
        ])

        for move in moves:
            job_rec = Job.load(self.env, move.post_job_uuid)
            if job_rec.state in ('pending', 'enqueued'):
                job_rec.set_done(result=_("Task set to Done because the "
                                          "user unmarked the move."))
                job_rec.store()