def test_job_unlinked(self): test_job = Job(func=dummy_task_args, model_name="res.users", args=("o", "k"), kwargs={"c": "!"}) storage = OpenERPJobStorage(self.session) storage.store(test_job) stored = self.queue_job.search([("uuid", "=", test_job.uuid)]) stored.unlink() with self.assertRaises(NoSuchJobError): storage.load(test_job.uuid)
def test_read(self): eta = datetime.now() + timedelta(hours=5) test_job = Job(func=dummy_task_args, model_name='res.users', args=('o', 'k'), kwargs={'c': '!'}, priority=15, eta=eta, description="My description") test_job.user_id = 1 test_job.company_id = self.env.ref("base.main_company").id storage = OpenERPJobStorage(self.session) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(test_job.uuid, job_read.uuid) self.assertEqual(test_job.model_name, job_read.model_name) self.assertEqual(test_job.func, job_read.func) self.assertEqual(test_job.args, job_read.args) self.assertEqual(test_job.kwargs, job_read.kwargs) self.assertEqual(test_job.func_name, job_read.func_name) self.assertEqual(test_job.func_string, job_read.func_string) self.assertEqual(test_job.description, job_read.description) self.assertEqual(test_job.state, job_read.state) self.assertEqual(test_job.priority, job_read.priority) self.assertEqual(test_job.exc_info, job_read.exc_info) self.assertEqual(test_job.result, job_read.result) self.assertEqual(test_job.user_id, job_read.user_id) self.assertEqual(test_job.company_id, job_read.company_id) delta = timedelta(seconds=1) # DB does not keep milliseconds self.assertAlmostEqual(test_job.date_created, job_read.date_created, delta=delta) self.assertAlmostEqual(test_job.date_started, job_read.date_started, delta=delta) self.assertAlmostEqual(test_job.date_enqueued, job_read.date_enqueued, delta=delta) self.assertAlmostEqual(test_job.date_done, job_read.date_done, delta=delta) self.assertAlmostEqual(test_job.eta, job_read.eta, delta=delta) test_date = datetime(2015, 3, 15, 21, 7, 0) job_read.date_enqueued = test_date job_read.date_started = test_date job_read.date_done = test_date job_read.canceled = True storage.store(job_read) job_read = storage.load(test_job.uuid) self.assertAlmostEqual(job_read.date_started, test_date, delta=delta) self.assertAlmostEqual(job_read.date_enqueued, test_date, delta=delta) self.assertAlmostEqual(job_read.date_done, test_date, delta=delta) self.assertEqual(job_read.canceled, True)
def test_job_unlinked(self): test_job = Job(func=dummy_task_args, model_name='res.users', args=('o', 'k'), kwargs={'c': '!'}) storage = OpenERPJobStorage(self.session) storage.store(test_job) stored = self.queue_job.search([('uuid', '=', test_job.uuid)]) stored.unlink() with self.assertRaises(NoSuchJobError): storage.load(test_job.uuid)
def _cancel_jobs(self, cr, uid, context=None): """Find moves where the mark has been removed and cancel the jobs. For the moves that are posted already it's too late: we skip them. """ if context is None: context = {} session = ConnectorSession(cr, uid, context=context) storage = OpenERPJobStorage(session) move_ids = self.search(cr, uid, [ ('to_post', '=', False), ('post_job_uuid', '!=', False), ('state', '=', 'draft'), ], context=context) for move in self.browse(cr, uid, move_ids, context=context): job_rec = storage.load(move.post_job_uuid) if job_rec.state in (u'pending', u'enqueued'): job_rec.set_done(result=_( u'Task set to Done because the user unmarked the move')) storage.store(job_rec)
def _cancel_jobs(self, cr, uid, context=None): """Find moves where the mark has been removed and cancel the jobs. For the moves that are posted already it's too late: we skip them. """ if context is None: context = {} session = ConnectorSession(cr, uid, context=context) storage = OpenERPJobStorage(session) move_ids = self.search(cr, uid, [ ('to_post', '=', False), ('post_job_uuid', '!=', False), ('state', '=', 'draft'), ], context=context) for move in self.browse(cr, uid, move_ids, context=context): job = storage.load(move.post_job_uuid) if job.state in (u'pending', u'enqueued'): job.set_done(result=_( u'Task set to Done because the user unmarked the move' )) storage.store(job)
def test_job_worker(self): worker = self.env["queue.worker"].create({"uuid": "57569b99-c2c1-47b6-aad1-72f953c92c87"}) test_job = Job(func=dummy_task_args, model_name="res.users", args=("o", "k"), kwargs={"c": "!"}) test_job.worker_uuid = worker.uuid storage = OpenERPJobStorage(self.session) self.assertEqual(storage._worker_id(worker.uuid), worker.id) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(job_read.worker_uuid, worker.uuid)
def open_related_action(self): """ Open the related action associated to the job """ self.ensure_one() session = ConnectorSession(self.env.cr, self.env.uid, context=self.env.context) storage = OpenERPJobStorage(session) job = storage.load(self.uuid) action = job.related_action(session) if action is None: raise exceptions.Warning(_('No action available for this job')) return action
def test_job_worker(self): worker = self.env['queue.worker'].create( {'uuid': '57569b99-c2c1-47b6-aad1-72f953c92c87'}) test_job = Job(func=dummy_task_args, model_name='res.users', args=('o', 'k'), kwargs={'c': '!'}) test_job.worker_uuid = worker.uuid storage = OpenERPJobStorage(self.session) self.assertEqual(storage._worker_id(worker.uuid), worker.id) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(job_read.worker_uuid, worker.uuid)
def test_job_worker(self): worker = self.env['queue.worker'].create( {'uuid': '57569b99-c2c1-47b6-aad1-72f953c92c87'} ) test_job = Job(func=dummy_task_args, model_name='res.users', args=('o', 'k'), kwargs={'c': '!'}) test_job.worker_uuid = worker.uuid storage = OpenERPJobStorage(self.session) self.assertEqual(storage._worker_id(worker.uuid), worker.id) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(job_read.worker_uuid, worker.uuid)
def _change_job_state(self, state, result=None): """ Change the state of the `Job` object itself so it will change the other fields (date, result, ...) """ session = ConnectorSession(self.env.cr, self.env.uid, context=self.env.context) storage = OpenERPJobStorage(session) for job in self: job = storage.load(job.uuid) if state == DONE: job.set_done(result=result) elif state == PENDING: job.set_pending(result=result) else: raise ValueError('State not supported: %s' % state) storage.store(job)
def test_unicode(self): test_job = Job(func=dummy_task_args, model_name='res.users', args=(u'öô¿‽', u'ñě'), kwargs={'c': u'ßø'}, priority=15, description=u"My dé^Wdescription") test_job.user_id = 1 storage = OpenERPJobStorage(self.session) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(test_job.args, job_read.args) self.assertEqual(job_read.args, ('res.users', u'öô¿‽', u'ñě')) self.assertEqual(test_job.kwargs, job_read.kwargs) self.assertEqual(job_read.kwargs, {'c': u'ßø'}) self.assertEqual(test_job.description, job_read.description) self.assertEqual(job_read.description, u"My dé^Wdescription")
def test_accented_bytestring(self): job = Job(func=dummy_task_args, model_name='res.users', args=('öô¿‽', 'ñě'), kwargs={'c': 'ßø'}, priority=15, description="My dé^Wdescription") job.user_id = 1 storage = OpenERPJobStorage(self.session) storage.store(job) job_read = storage.load(job.uuid) self.assertEqual(job.args, job_read.args) self.assertEqual(job_read.args, ('res.users', 'öô¿‽', 'ñě')) self.assertEqual(job.kwargs, job_read.kwargs) self.assertEqual(job_read.kwargs, {'c': 'ßø'}) # the job's description has been created as bytestring but is # decoded to utf8 by the ORM so make them comparable self.assertEqual(job.description, job_read.description.encode('utf8')) self.assertEqual(job_read.description, "My dé^Wdescription".decode('utf8'))
def test_unicode(self): test_job = Job( func=dummy_task_args, model_name="res.users", args=(u"öô¿‽", u"ñě"), kwargs={"c": u"ßø"}, priority=15, description=u"My dé^Wdescription", ) test_job.user_id = 1 storage = OpenERPJobStorage(self.session) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(test_job.args, job_read.args) self.assertEqual(job_read.args, ("res.users", u"öô¿‽", u"ñě")) self.assertEqual(test_job.kwargs, job_read.kwargs) self.assertEqual(job_read.kwargs, {"c": u"ßø"}) self.assertEqual(test_job.description, job_read.description) self.assertEqual(job_read.description, u"My dé^Wdescription")
def test_job_identity_key_no_duplicate(self): """ If a job with same identity key in queue do not add a new one """ storage = OpenERPJobStorage(self.session) test_job_uuid = storage.enqueue(func=task_a, args=(1, "a"), identity_key=identity_exact) test_job = storage.load(test_job_uuid) stored = self.queue_job.search([('identity_key', '=', test_job.identity_key)]) self.assertTrue(stored) self.assertEqual(1, len(stored)) self.assertEqual(stored.uuid, test_job.uuid) test_job_uuid2 = storage.enqueue(func=task_a, args=(1, "a"), identity_key=identity_exact) self.assertEqual(test_job_uuid2, test_job.uuid) stored = self.queue_job.search([('identity_key', '=', test_job.identity_key)]) self.assertTrue(stored) self.assertEqual(1, len(stored))
def test_accented_bytestring(self): test_job = Job( func=dummy_task_args, model_name="res.users", args=("öô¿‽", "ñě"), kwargs={"c": "ßø"}, priority=15, description="My dé^Wdescription", ) test_job.user_id = 1 storage = OpenERPJobStorage(self.session) storage.store(test_job) job_read = storage.load(test_job.uuid) self.assertEqual(test_job.args, job_read.args) self.assertEqual(job_read.args, ("res.users", "öô¿‽", "ñě")) self.assertEqual(test_job.kwargs, job_read.kwargs) self.assertEqual(job_read.kwargs, {"c": "ßø"}) # the job's description has been created as bytestring but is # decoded to utf8 by the ORM so make them comparable self.assertEqual(test_job.description, job_read.description.encode("utf8")) self.assertEqual(job_read.description, "My dé^Wdescription".decode("utf8"))
def _cancel_jobs(self, cr, uid, context=None): """Find payment.orders where the mark has been removed and cancel the jobs. """ if context is None: context = {} session = ConnectorSession(cr, uid, context=context) storage = OpenERPJobStorage(session) paymentorder_ids = self.search(cr, uid, [ ('to_process', '=', False), ('post_job_uuid', '!=', False) ], context=context) for paymentorder in self.browse(cr, uid, paymentorder_ids, context=context): job_rec = storage.load(paymentorder.post_job_uuid) if job_rec.state in (u'pending', u'enqueued'): job_rec.set_done(result=_( u'Task set to Done because the user unmarked the payment order' )) storage.store(job_rec)
def test_read(self): eta = datetime.now() + timedelta(hours=5) job = Job(func=dummy_task_args, model_name='res.users', args=('o', 'k'), kwargs={'c': '!'}, priority=15, eta=eta, description="My description") job.user_id = 1 storage = OpenERPJobStorage(self.session) storage.store(job) job_read = storage.load(job.uuid) self.assertEqual(job.uuid, job_read.uuid) self.assertEqual(job.model_name, job_read.model_name) self.assertEqual(job.func, job_read.func) self.assertEqual(job.args, job_read.args) self.assertEqual(job.kwargs, job_read.kwargs) self.assertEqual(job.func_name, job_read.func_name) self.assertEqual(job.func_string, job_read.func_string) self.assertEqual(job.description, job_read.description) self.assertEqual(job.state, job_read.state) self.assertEqual(job.priority, job_read.priority) self.assertEqual(job.exc_info, job_read.exc_info) self.assertEqual(job.result, job_read.result) self.assertEqual(job.user_id, job_read.user_id) delta = timedelta(seconds=1) # DB does not keep milliseconds self.assertAlmostEqual(job.date_created, job_read.date_created, delta=delta) self.assertAlmostEqual(job.date_started, job_read.date_started, delta=delta) self.assertAlmostEqual(job.date_enqueued, job_read.date_enqueued, delta=delta) self.assertAlmostEqual(job.date_done, job_read.date_done, delta=delta) self.assertAlmostEqual(job.eta, job_read.eta, delta=delta)
class TestBaseImportConnector(common.TransactionCase): FIELDS = [ 'date', 'journal_id/id', 'name', 'ref', 'line_ids/account_id/id', 'line_ids/name', 'line_ids/debit', 'line_ids/credit', 'line_ids/partner_id/id', ] OPTIONS = { OPT_SEPARATOR: ',', OPT_QUOTING: '"', OPT_HAS_HEADER: True, } def setUp(self): super(TestBaseImportConnector, self).setUp() self.import_obj = self.registry['base_import.import'] self.move_obj = self.registry['account.move'] self.job_obj = self.registry['queue.job'] self.session = ConnectorSession(self.cr, self.uid) self.storage = OpenERPJobStorage(self.session) def _read_test_file(self, file_name): file_name = os.path.join(os.path.dirname(__file__), file_name) return open(file_name).read() def _do_import(self, file_name, use_connector, chunk_size=None): data = self._read_test_file(file_name) import_id = self.import_obj.create(self.cr, self.uid, { 'res_model': 'account.move', 'file': data, 'file_name': file_name, }) options = dict(self.OPTIONS) options[OPT_USE_CONNECTOR] = use_connector options[OPT_CHUNK_SIZE] = chunk_size return self.import_obj.do(self.cr, self.uid, import_id, self.FIELDS, options) def _check_import_result(self): move_ids = self.move_obj.search(self.cr, self.uid, [('name', 'in', ('TEST-1', 'TEST-2', 'TEST-3'))]) self.assertEqual(len(move_ids), 3) def test_normal_import(self): """ Test the standard import still works. """ res = self._do_import('account.move.csv', use_connector=False) self.assertFalse(res, repr(res)) self._check_import_result() def test_async_import(self): """ Basic asynchronous import test with default large chunk size. """ res = self._do_import('account.move.csv', use_connector=True) self.assertFalse(res, repr(res)) # no moves should be created yet move_ids = self.move_obj.search(self.cr, self.uid, [('name', 'in', ('TEST-1', 'TEST-2', 'TEST-3'))]) self.assertEqual(len(move_ids), 0) # but we must have one job to split the file split_job_ids = self.job_obj.search(self.cr, self.uid, []) self.assertEqual(len(split_job_ids), 1) split_job = self.job_obj.browse(self.cr, self.uid, split_job_ids[0]) # job names are important self.assertEqual(split_job.name, "Import Account Entry from file account.move.csv") # perform job self.storage.load(split_job.uuid).perform(self.session) # check one job has been generated to load the file (one chunk) load_job_ids = self.job_obj.search(self.cr, self.uid, [('id', '!=', split_job.id)]) self.assertEqual(len(load_job_ids), 1) load_job = self.job_obj.browse(self.cr, self.uid, load_job_ids[0]) self.assertEqual( load_job.name, "Import Account Entry from file account.move.csv - " "#0 - lines 2 to 10") # perform job self.storage.load(load_job.uuid).perform(self.session) self._check_import_result() def test_async_import_small_misaligned_chunks(self): """ Chunk size larger than record. """ res = self._do_import('account.move.csv', use_connector=True, chunk_size=4) self.assertFalse(res, repr(res)) # but we must have one job to split the file split_job_ids = self.job_obj.search(self.cr, self.uid, []) self.assertEqual(len(split_job_ids), 1) split_job = self.job_obj.browse(self.cr, self.uid, split_job_ids[0]) # perform job self.storage.load(split_job.uuid).perform(self.session) # check one job has been generated to load the file (two chunks) load_job_ids = self.job_obj.search(self.cr, self.uid, [('id', '!=', split_job.id)], order='name') self.assertEqual(len(load_job_ids), 2) load_jobs = self.job_obj.browse(self.cr, self.uid, load_job_ids) self.assertEqual( load_jobs[0].name, "Import Account Entry from file account.move.csv - " "#0 - lines 2 to 7") self.assertEqual( load_jobs[1].name, "Import Account Entry from file account.move.csv - " "#1 - lines 8 to 10") # perform job self.storage.load(load_jobs[0].uuid).perform(self.session) self.storage.load(load_jobs[1].uuid).perform(self.session) self._check_import_result() def test_async_import_smaller_misaligned_chunks(self): """ Chunk size smaller than record. """ res = self._do_import('account.move.csv', use_connector=True, chunk_size=2) self.assertFalse(res, repr(res)) # but we must have one job to split the file split_job_ids = self.job_obj.search(self.cr, self.uid, []) self.assertEqual(len(split_job_ids), 1) split_job = self.job_obj.browse(self.cr, self.uid, split_job_ids[0]) # perform job self.storage.load(split_job.uuid).perform(self.session) # check one job has been generated to load the file (three chunks) load_job_ids = self.job_obj.search(self.cr, self.uid, [('id', '!=', split_job.id)], order='name') self.assertEqual(len(load_job_ids), 3) load_jobs = self.job_obj.browse(self.cr, self.uid, load_job_ids) self.assertEqual( load_jobs[0].name, "Import Account Entry from file account.move.csv - " "#0 - lines 2 to 4") self.assertEqual( load_jobs[1].name, "Import Account Entry from file account.move.csv - " "#1 - lines 5 to 7") self.assertEqual( load_jobs[2].name, "Import Account Entry from file account.move.csv - " "#2 - lines 8 to 10") # perform job self.storage.load(load_jobs[0].uuid).perform(self.session) self.storage.load(load_jobs[1].uuid).perform(self.session) self.storage.load(load_jobs[2].uuid).perform(self.session) self._check_import_result() def test_async_import_smaller_aligned_chunks(self): """ Chunks aligned on record boundaries. Last chunk ends exactly at file end. """ res = self._do_import('account.move.csv', use_connector=True, chunk_size=3) self.assertFalse(res, repr(res)) # but we must have one job to split the file split_job_ids = self.job_obj.search(self.cr, self.uid, []) self.assertEqual(len(split_job_ids), 1) split_job = self.job_obj.browse(self.cr, self.uid, split_job_ids[0]) # perform job self.storage.load(split_job.uuid).perform(self.session) # check one job has been generated to load the file (three chunks) load_job_ids = self.job_obj.search(self.cr, self.uid, [('id', '!=', split_job.id)], order='name') self.assertEqual(len(load_job_ids), 3) load_jobs = self.job_obj.browse(self.cr, self.uid, load_job_ids) self.assertEqual( load_jobs[0].name, "Import Account Entry from file account.move.csv - " "#0 - lines 2 to 4") self.assertEqual( load_jobs[1].name, "Import Account Entry from file account.move.csv - " "#1 - lines 5 to 7") self.assertEqual( load_jobs[2].name, "Import Account Entry from file account.move.csv - " "#2 - lines 8 to 10") # perform job self.storage.load(load_jobs[0].uuid).perform(self.session) self.storage.load(load_jobs[1].uuid).perform(self.session) self.storage.load(load_jobs[2].uuid).perform(self.session) self._check_import_result()