def test_worker_database_access(self): u''' Test database access from within the worker. ''' # See https://github.com/ckan/ckan/issues/3243 pkg_name = u'test-worker-database-access' try: pkg_dict = call_action(u'package_show', id=pkg_name) except NotFound: pkg_dict = call_action(u'package_create', name=pkg_name) pkg_dict[u'title'] = u'foo' pkg_dict = call_action(u'package_update', **pkg_dict) titles = u'1 2 3'.split() for title in titles: self.enqueue(database_job, args=[pkg_dict[u'id'], title]) jobs.Worker().work(burst=True) # Aside from ensuring that the jobs succeeded, this also checks # that database access still works in the main process. pkg_dict = call_action(u'package_show', id=pkg_name) assert_equal(pkg_dict[u'title'], u'foo' + u''.join(titles))
def test_worker_database_access(self): u""" Test database access from within the worker. """ # See https://github.com/ckan/ckan/issues/3243 pkg_name = u"test-worker-database-access" try: pkg_dict = call_action(u"package_show", id=pkg_name) except NotFound: pkg_dict = call_action(u"package_create", name=pkg_name) pkg_dict[u"title"] = u"foo" pkg_dict = call_action(u"package_update", **pkg_dict) titles = u"1 2 3".split() for title in titles: self.enqueue(database_job, args=[pkg_dict[u"id"], title]) jobs.Worker().work(burst=True) # Aside from ensuring that the jobs succeeded, this also checks # that database access still works in the main process. pkg_dict = call_action(u"package_show", id=pkg_name) assert pkg_dict[u"title"] == u"foo" + u"".join(titles)
def test_worker_logging_lifecycle(self): u''' Test that a logger's lifecycle is logged. ''' queue = u'my_queue' job = self.enqueue(queue=queue) with recorded_logs(u'ckan.lib.jobs') as logs: worker = jobs.Worker([queue]) worker.work(burst=True) messages = logs.messages[u'info'] # We expect 4 log messages: Worker start, job start, job end, # worker end. assert_equal(len(messages), 4) ok_(worker.key in messages[0]) ok_(queue in messages[0]) ok_(worker.key in messages[1]) ok_(job.id in messages[1]) ok_(worker.key in messages[2]) ok_(job.id in messages[2]) ok_(worker.key in messages[3])
def test_worker_logging_lifecycle(self): u""" Test that a logger's lifecycle is logged. """ queue = u"my_queue" job = self.enqueue(queue=queue) with recorded_logs(u"ckan.lib.jobs") as logs: worker = jobs.Worker([queue]) worker.work(burst=True) messages = logs.messages[u"info"] # We expect 4 log messages: Worker start, job start, job end, # worker end. assert len(messages) == 4 assert worker.key in messages[0] assert queue in messages[0] assert worker.key in messages[1] assert job.id in messages[1] assert worker.key in messages[2] assert job.id in messages[2] assert worker.key in messages[3]
def test_worker_datastore_access(self, app): """ Test DataStore access from within a worker. """ pkg = factories.Dataset() data = { "resource": {"package_id": pkg["id"]}, "fields": [{"id": "value", "type": "int"}], } with app.flask_app.test_request_context(): table = helpers.call_action("datastore_create", **data) res_id = table["resource_id"] for i in range(3): self.enqueue(datastore_job, args=[res_id, i]) jobs.Worker().work(burst=True) # Aside from ensuring that the job succeeded, this also checks # that accessing the Datastore still works in the main process. result = helpers.call_action("datastore_search", resource_id=res_id) assert [0, 1, 2] == [r["value"] for r in result["records"]]
def worker(burst, queues): """Start a worker that fetches jobs from queues and executes them. If no queue names are given then the worker listens to the default queue, this is equivalent to paster jobs worker default If queue names are given then the worker listens to those queues and only those: paster jobs worker my-custom-queue Hence, if you want the worker to listen to the default queue and some others then you must list the default queue explicitly: paster jobs worker default my-custom-queue If the `--burst` option is given then the worker will exit as soon as all its queues are empty. """ bg_jobs.Worker(queues).work(burst=burst)
def test_fork_within_a_transaction(self): u''' Test forking a worker horse within a database transaction. The horse should get a new SQLAlchemy session but leave the original session alone. ''' raise SkipTest(u'Failing intermittently') # FIXME pkg_name = u'test-fork-within-a-transaction' model.repo.new_revision() pkg = model.Package.get(pkg_name) if not pkg: pkg = model.Package(name=pkg_name) pkg.title = u'foo' pkg.save() pkg.title = u'bar' self.enqueue(database_job, [pkg.id, u'foo']) jobs.Worker().work(burst=True) assert_equal(pkg.title, u'bar') # Original session is unchanged pkg.Session.refresh(pkg) assert_equal(pkg.title, u'foofoo') # Worker only saw committed changes
def test_fork_within_a_transaction(self): u""" Test forking a worker horse within a database transaction. The original instances should be unchanged but their session must be closed. """ pkg_name = u"test-fork-within-a-transaction" pkg = model.Package.get(pkg_name) if not pkg: pkg = model.Package(name=pkg_name) pkg.title = u"foo" pkg.save() pkg.title = u"bar" self.enqueue(database_job, [pkg.id, u"foo"]) jobs.Worker().work(burst=True) assert pkg.title == u"bar" # Original instance is unchanged # The original session has been closed, `pkg.Session` uses the new # session in which `pkg` is not registered. assert pkg not in pkg.Session pkg = model.Package.get(pkg.id) # Get instance from new session assert pkg.title == u"foofoo" # Worker only saw committed changes
def test_fork_within_a_transaction(self): u''' Test forking a worker horse within a database transaction. The original instances should be unchanged but their session must be closed. ''' pkg_name = u'test-fork-within-a-transaction' model.repo.new_revision() pkg = model.Package.get(pkg_name) if not pkg: pkg = model.Package(name=pkg_name) pkg.title = u'foo' pkg.save() pkg.title = u'bar' self.enqueue(database_job, [pkg.id, u'foo']) jobs.Worker().work(burst=True) assert_equal(pkg.title, u'bar') # Original instance is unchanged # The original session has been closed, `pkg.Session` uses the new # session in which `pkg` is not registered. assert_false(pkg in pkg.Session) pkg = model.Package.get(pkg.id) # Get instance from new session assert_equal(pkg.title, u'foofoo') # Worker only saw committed changes
def test_worker_datastore_access(self): ''' Test DataStore access from within a worker. ''' pkg = factories.Dataset() data = { 'resource': { 'package_id': pkg['id'], }, 'fields': [{ 'id': 'value', 'type': 'int' }], } table = helpers.call_action('datastore_create', **data) res_id = table['resource_id'] for i in range(3): self.enqueue(datastore_job, args=[res_id, i]) jobs.Worker().work(burst=True) # Aside from ensuring that the job succeeded, this also checks # that accessing the Datastore still works in the main process. result = helpers.call_action('datastore_search', resource_id=res_id) assert_equal([0, 1, 2], [r['value'] for r in result['records']])