def main(): # Need to share the same backend, so that data can be shared... persistence_conf = { 'connection': 'memory', } saver = persistence.fetch(persistence_conf) with contextlib.closing(saver.get_connection()) as conn: # This ensures that the needed backend setup/data directories/schema # upgrades and so on... exist before they are attempted to be used... conn.upgrade() fc1 = fake_client.FakeClient() # Done like this to share the same client storage location so the correct # zookeeper features work across clients... fc2 = fake_client.FakeClient(storage=fc1.storage) entities = [ generate_reviewer(fc1, saver), generate_conductor(fc2, saver), ] for t, stopper in entities: t.start() try: watch = timeutils.StopWatch(duration=RUN_TIME) watch.start() while not watch.expired(): time.sleep(0.1) finally: for t, stopper in reversed(entities): stopper() t.join()
def make_clients(self, count, shared_storage=True): clients = [] storage = None for _i in range(0, count): if storage is None: client = fake_client.FakeClient() self.addCleanup(client.close) storage = client.storage else: client = fake_client.FakeClient(storage=storage) self.addCleanup(client.close) clients.append(client) return clients
def setUp(self): super(TestClaimListener, self).setUp() self.client = fake_client.FakeClient() self.addCleanup(self.client.stop) self.board = jobs.fetch('test', 'zookeeper', client=self.client) self.addCleanup(self.board.close) self.board.connect()
def main(): if six.PY3: # TODO(harlowja): Hack to make eventlet work right, remove when the # following is fixed: https://github.com/eventlet/eventlet/issues/230 from taskflow.utils import eventlet_utils as _eu # noqa try: import eventlet as _eventlet # noqa except ImportError: pass with contextlib.closing(fake_client.FakeClient()) as c: created = [] for i in compat_range(0, PRODUCERS): p = threading_utils.daemon_thread(producer, i + 1, c) created.append(p) p.start() consumed = collections.deque() for i in compat_range(0, WORKERS): w = threading_utils.daemon_thread(worker, i + 1, c, consumed) created.append(w) w.start() while created: t = created.pop() t.join() # At the end there should be nothing leftover, let's verify that. board = backends.fetch('verifier', SHARED_CONF.copy(), client=c) board.connect() with contextlib.closing(board): if board.job_count != 0 or len(consumed) != EXPECTED_UNITS: return 1 return 0
def _get_jobboard_backend(conf, persistence=None): client = None if conf.taskflow.connection == 'memory': client = fake_client.FakeClient() return boards.fetch(conf.job_board_name, {'board': conf.taskflow.job_board_url}, client=client, persistence=persistence)
def _create_board(self, persistence=None): client = fake_client.FakeClient() board = impl_zookeeper.ZookeeperJobBoard('test-board', {}, client=client, persistence=persistence) self.addCleanup(board.close) self.addCleanup(kazoo_utils.finalize_client, client) return (client, board)
def make_components(self, name='testing', wait_timeout=0.1): client = fake_client.FakeClient() persistence = impl_memory.MemoryBackend() board = impl_zookeeper.ZookeeperJobBoard(name, {}, client=client, persistence=persistence) conductor = stc.SingleThreadedConductor(name, board, persistence, wait_timeout=wait_timeout) return self.ComponentBundle(board, client, persistence, conductor)
def create_board(self, persistence=None): if persistence is None: persistence = impl_memory.MemoryBackend() client = fake_client.FakeClient() board = impl_zookeeper.ZookeeperJobBoard('test-board', {}, client=client, persistence=persistence) self.addCleanup(board.close) self.addCleanup(self.close_client, client) return (client, board)
def setUp(self): super(ZakePersistenceTest, self).setUp() conf = { "path": "/taskflow", } self.client = fake_client.FakeClient() self.client.start() self._backend = impl_zookeeper.ZkBackend(conf, client=self.client) conn = self._backend.get_connection() conn.upgrade()
def setUp(self): super(TestClaimListener, self).setUp() self.client = fake_client.FakeClient() self.addCleanup(self.client.stop) self.board = jobs.fetch('test', 'zookeeper', client=self.client, persistence=impl_memory.MemoryBackend()) self.addCleanup(self.board.close) self.board.connect()
def test_zk_entry_point_existing_client(self): existing_client = fake_client.FakeClient() conf = { 'board': 'zookeeper', } kwargs = { 'client': existing_client, } with contextlib.closing(backends.fetch('test', conf, **kwargs)) as be: self.assertIsInstance(be, impl_zookeeper.ZookeeperJobBoard) self.assertIs(existing_client, be._client)
def make_components(self): client = fake_client.FakeClient() persistence = impl_memory.MemoryBackend() board = impl_zookeeper.ZookeeperJobBoard('testing', {}, client=client, persistence=persistence) conductor_kwargs = self.conductor_kwargs.copy() conductor_kwargs['persistence'] = persistence conductor = backends.fetch(self.kind, 'testing', board, **conductor_kwargs) return ComponentBundle(board, client, persistence, conductor)
def test_bad_factory(self): persistence = impl_memory.MemoryBackend() client = fake_client.FakeClient() board = impl_zookeeper.ZookeeperJobBoard('testing', {}, client=client, persistence=persistence) self.assertRaises(ValueError, backends.fetch, 'nonblocking', 'testing', board, persistence=persistence, executor_factory='testing')
def make_components(self, listener_factories): client = fake_client.FakeClient() persistence = impl_memory.MemoryBackend() board = impl_zookeeper.ZookeeperJobBoard('testing', {}, client=client, persistence=persistence) conductor_kwargs = { 'wait_timeout': 0.01, 'listener_factories': listener_factories, 'persistence': persistence, } conductor = backends.fetch('blocking', 'testing', board, **conductor_kwargs) return ComponentBundle(board, client, persistence, conductor)
def setUp(self): super(TaskflowServiceTest, self).setUp() _zk_client = zake_client.FakeClient() self.persistence = tf_client.create_persistence(client=_zk_client) self.jobboard = tf_client.create_jobboard("service_test", persistence=self.persistence, client=_zk_client) self.tf_client = tf_client.Client("service_test", persistence=self.persistence, jobboard=self.jobboard) self.tf_service = tf_service.ConductorService.create( host="service_test", jobboard=self.tf_client.jobboard, persistence=self.tf_client.persistence, )
def test_zookeeper(): """Test zookeeper backend for generate id""" zk = fake_client.FakeClient() zk.start() test_zk = ZkIDGenerator(None, path="toto") assert (test_zk.zk is None) assert (test_zk.path == "toto") test_zk.zk = zk test_zk.path = "/DirectoryManager/increment" assert (test_zk.zk == zk) assert (test_zk.path == "/DirectoryManager/increment") assert (test_zk.increment() == 1) assert (test_zk.increment() == 2) assert (test_zk.increment() == 3) assert (test_zk.increment() == 4) zk.stop()
def main(): with contextlib.closing(fake_client.FakeClient()) as c: created = [] for i in compat_range(0, PRODUCERS): p = threading_utils.daemon_thread(producer, i + 1, c) created.append(p) p.start() consumed = collections.deque() for i in compat_range(0, WORKERS): w = threading_utils.daemon_thread(worker, i + 1, c, consumed) created.append(w) w.start() while created: t = created.pop() t.join() # At the end there should be nothing leftover, let's verify that. board = backends.fetch('verifier', SHARED_CONF.copy(), client=c) board.connect() with contextlib.closing(board): if board.job_count != 0 or len(consumed) != EXPECTED_UNITS: return 1 return 0
def create_board(**kwargs): client = fake_client.FakeClient() board = impl_zookeeper.ZookeeperJobBoard('test-board', conf=dict(kwargs), client=client) return (client, board)
def test_command_custom_version(self): client = fake_client.FakeClient(server_version=(1, 1, 1)) with start_close(client) as c: stats = c.command(b'stat') self.assertIn("standalone", stats) self.assertIn('1.1.1', stats)
# Copyright 2015 Hewlett-Packard Development Company, L.P. # # Authors: Davide Agnello <*****@*****.**> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copyright [2014] Hewlett-Packard Development Company, L.P. # limitations under the License. import zake.fake_client as fake_client import cue.taskflow.client as tf_client _zk_client = fake_client.FakeClient() persistence = tf_client.create_persistence(client=_zk_client) jobboard = tf_client.create_jobboard("test_board", persistence=persistence, client=_zk_client) tf_client = tf_client.get_client_instance(persistence=persistence, jobboard=jobboard)
def setUp(self): super(TestClient, self).setUp() self.client = fake_client.FakeClient() self.addCleanup(self.client.close)
def __init__(self, member_id, parsed_url, options): super(ZakeDriver, self).__init__(member_id, parsed_url, options) self._coord = fake_client.FakeClient(storage=self.fake_storage)
def _make_client(cls, parsed_url, options): if 'storage' in options: storage = options['storage'] else: storage = cls.fake_storage return fake_client.FakeClient(storage=storage)
def setUp(self): super(ZkCopyTest, self).setUp() self.client = fake_client.FakeClient() self.client.start() self.addCleanup(self.client.stop) self.addCleanup(self.client.close)