def multiprocessing_engine(self, pool_size=None): storage = Storage.create(debug=True, in_memory=False) cache = Cache.create(storage=storage) with closing(LocalMultiprocessEngine(self.scheduler, storage, cache, pool_size=pool_size, debug=True)) as e: e.start() yield e
def __init__(self, scheduler, storage=None, cache=None, use_cache=True, include_trace_on_error=True): """ :param scheduler: The local scheduler for creating execution graphs. :type scheduler: :class:`pants.engine.scheduler.LocalScheduler` :param storage: The storage instance for serializables keyed by their hashes. :type storage: :class:`pants.engine.storage.Storage` :param cache: The cache instance for storing execution results, by default it uses the same Storage instance if not specified. :type cache: :class:`pants.engine.storage.Cache` :param use_cache: True to enable usage of the cache. The cache incurs a large amount of overhead for small tasks, and needs TODO: further improvement. :type use_cache: bool :param include_trace_on_error: Include the trace through the graph upon encountering errors. :type include_trace_on_error: bool """ self._include_trace_on_error = include_trace_on_error self._scheduler = scheduler self._storage = storage or Storage.create() self._cache = cache or Cache.create(storage) self._use_cache = use_cache
def hybrid_engine(self, pool_size=None): async_nodes = (FilesystemNode,) storage = Storage.create(in_memory=True) cache = Cache.create(storage=storage) with closing(ThreadHybridEngine(self.scheduler, storage, threaded_node_types=async_nodes, cache=cache, pool_size=pool_size, debug=True)) as e: yield e
def _process_initializer(storage): """Another picklable top-level function that provides multi-processes' initial states. States are returned as a tuple. States are `Closable` so they can be cleaned up once processes are done. """ storage = Storage.clone(storage) return (storage, Cache.create(storage=storage))
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs') self.scheduler, self.storage = setup_json_scheduler(build_root, debug=True) self.cache = Cache.create(Storage.create()) self.java = Address.parse('src/java/codegen/simple')
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create() self.cache = Cache.create(storage=self.storage) self.request = Runnable(func=_runnable, args=('this is an arg', ), cacheable=True) self.result = 'something'
def hybrid_engine(self, pool_size=None): async_nodes = (FilesystemNode,) storage = Storage.create(debug=True, in_memory=False) cache = Cache.create(storage=storage) with closing(ThreadHybridEngine(self.scheduler, storage, threaded_node_types=async_nodes, cache=cache, pool_size=pool_size, debug=True)) as e: e.start() yield e
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create() self.cache = Cache.create(storage=self.storage) request = StepRequest(step_id=123, node='some node', dependencies={'some dep': 'some state', 'another dep': 'another state'}, inline_nodes=False, project_tree='some project tree') self.result = StepResult(state='something') self.keyed_request = self.storage.key_for_request(request)
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create(in_memory=True) self.cache = Cache.create(storage=self.storage) request = StepRequest( step_id=123, node="some node", dependencies={"some dep": "some state", "another dep": "another state"}, project_tree="some project tree", ) self.result = StepResult(state="something") self.keyed_request = self.storage.key_for_request(request)
def __init__(self, scheduler, storage=None, cache=None): """ :param scheduler: The local scheduler for creating execution graphs. :type scheduler: :class:`pants.engine.scheduler.LocalScheduler` :param storage: The storage instance for serializables keyed by their hashes. :type storage: :class:`pants.engine.storage.Storage` :param cache: The cache instance for storing execution results, by default it uses the same Storage instance if not specified. :type cache: :class:`pants.engine.storage.Cache` """ self._scheduler = scheduler self._storage = storage or Storage.create() self._cache = cache or Cache.create(storage)
def __init__(self, scheduler, storage=None, cache=None, use_cache=True): """ :param scheduler: The local scheduler for creating execution graphs. :type scheduler: :class:`pants.engine.scheduler.LocalScheduler` :param storage: The storage instance for serializables keyed by their hashes. :type storage: :class:`pants.engine.storage.Storage` :param cache: The cache instance for storing execution results, by default it uses the same Storage instance if not specified. :type cache: :class:`pants.engine.storage.Cache` :param use_cache: True to enable usage of the cache. The cache incurs a large amount of overhead for small tasks, and needs TODO: further improvement. :type use_cache: bool """ self._scheduler = scheduler self._storage = storage or Storage.create() self._cache = cache or Cache.create(storage) self._use_cache = use_cache
def setUp(self): """Setup cache as well as request and result.""" self.storage = Storage.create() self.cache = Cache.create(storage=self.storage) self.request = Runnable(func=_runnable, args=("this is an arg",), cacheable=True) self.result = "something"