def test_persistent_dict_cache_collisions(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) key1 = PDictTestingKeyOrValue(1, hash_key=0) key2 = PDictTestingKeyOrValue(2, hash_key=0) pdict[key1] = 1 # check lookup with pytest.warns(CollisionWarning): with pytest.raises(NoSuchEntryError): pdict.fetch(key2) # check deletion with pytest.warns(CollisionWarning): with pytest.raises(NoSuchEntryError): del pdict[key2] # check presence after deletion assert pdict[key1] == 1 # check store_if_not_present pdict.store_if_not_present(key2, 2) assert pdict[key1] == 1 finally: shutil.rmtree(tmpdir)
def test_persistent_dict_storage_and_lookup(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) from random import randrange def rand_str(n=20): return "".join(chr(65 + randrange(26)) for i in range(n)) keys = [(randrange(2000) - 1000, rand_str(), None, SomeTag(rand_str()), frozenset({"abc", 123})) for i in range(20)] values = [randrange(2000) for i in range(20)] d = dict(zip(keys, values)) # {{{ check lookup for k, v in zip(keys, values): pdict[k] = v for k, v in d.items(): assert d[k] == pdict[k] assert v == pdict[k] # }}} # {{{ check updating for k, v in zip(keys, values): pdict[k] = v + 1 for k, v in d.items(): assert d[k] + 1 == pdict[k] assert v + 1 == pdict[k] # }}} # {{{ check store_if_not_present for k, _ in zip(keys, values): pdict.store_if_not_present(k, d[k] + 2) for k, v in d.items(): assert d[k] + 1 == pdict[k] assert v + 1 == pdict[k] pdict.store_if_not_present(2001, 2001) assert pdict[2001] == 2001 # }}} # check not found with pytest.raises(NoSuchEntryError): pdict.fetch(3000) finally: shutil.rmtree(tmpdir)
def test_persistent_dict_deletion(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) pdict[0] = 0 del pdict[0] with pytest.raises(NoSuchEntryError): pdict.fetch(0) with pytest.raises(NoSuchEntryError): del pdict[1] finally: shutil.rmtree(tmpdir)
def test_persistent_dict(): from pytools.persistent_dict import PersistentDict pdict = PersistentDict("pytools-test") pdict.clear() from random import randrange def rand_str(n=20): return "".join( chr(65+randrange(26)) for i in range(n)) keys = [(randrange(2000), rand_str(), None) for i in range(20)] values = [randrange(2000) for i in range(20)] d = dict(zip(keys, values)) for k, v in zip(keys, values): pdict[k] = v pdict.store(k, v, info_files={"hey": str(v)}) for k, v in d.items(): assert d[k] == pdict[k] for k, v in zip(keys, values): pdict.store(k, v+1, info_files={"hey": str(v)}) for k, v in d.items(): assert d[k] + 1 == pdict[k]
def test_persistent_dict_storage_and_lookup(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) from random import randrange def rand_str(n=20): return "".join( chr(65+randrange(26)) for i in range(n)) keys = [(randrange(2000), rand_str(), None) for i in range(20)] values = [randrange(2000) for i in range(20)] d = dict(list(zip(keys, values))) # {{{ check lookup for k, v in zip(keys, values): pdict[k] = v for k, v in d.items(): assert d[k] == pdict[k] # }}} # {{{ check updating for k, v in zip(keys, values): pdict[k] = v + 1 for k, v in d.items(): assert d[k] + 1 == pdict[k] # }}} # {{{ check store_if_not_present for k, v in zip(keys, values): pdict.store_if_not_present(k, d[k] + 2) for k, v in d.items(): assert d[k] + 1 == pdict[k] pdict.store_if_not_present(2001, 2001) assert pdict[2001] == 2001 # }}} # check not found with pytest.raises(NoSuchEntryError): pdict.fetch(3000) finally: shutil.rmtree(tmpdir)
def test_persistent_dict_synchronization(): try: tmpdir = tempfile.mkdtemp() pdict1 = PersistentDict("pytools-test", container_dir=tmpdir) pdict2 = PersistentDict("pytools-test", container_dir=tmpdir) # check lookup pdict1[0] = 1 assert pdict2[0] == 1 # check updating pdict1[0] = 2 assert pdict2[0] == 2 # check deletion del pdict1[0] with pytest.raises(NoSuchEntryError): pdict2.fetch(0) finally: shutil.rmtree(tmpdir)
def test_persistent_dict_clear(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) pdict[0] = 1 pdict.fetch(0) pdict.clear() with pytest.raises(NoSuchEntryError): pdict.fetch(0) finally: shutil.rmtree(tmpdir)
result.append(generated) from loopy.codegen.result import merge_codegen_results return merge_codegen_results(self, result) @property def ast_builder(self): if self.is_generating_device_code: return self.kernel.target.get_device_ast_builder() else: return self.kernel.target.get_host_ast_builder() # }}} code_gen_cache = PersistentDict("loopy-code-gen-cache-v3-"+DATA_MODEL_VERSION, key_builder=LoopyKeyBuilder()) class PreambleInfo(ImmutableRecord): """ .. attribute:: kernel .. attribute:: seen_dtypes .. attribute:: seen_functions .. attribute:: seen_atomic_dtypes """ # {{{ main code generation entrypoint def generate_code_v2(kernel): """
# {{{ generate set_args gen("") gen("def set_args(%s):" % (", ".join(["self"] + arg_names))) with Indentation(gen): add_local_imports(gen) gen.extend(err_handler) # }}} return gen.get_picklable_module(), enqueue_name invoker_cache = PersistentDict("pyopencl-invoker-cache-v1", key_builder=_NumpyTypesKeyBuilder()) def generate_enqueue_and_set_args(function_name, num_passed_args, num_cl_args, scalar_arg_dtypes, work_around_arg_count_bug, warn_about_arg_count_bug): cache_key = (function_name, num_passed_args, num_cl_args, scalar_arg_dtypes, work_around_arg_count_bug, warn_about_arg_count_bug) from_cache = False try: result = invoker_cache[cache_key] from_cache = True
# Even if boostable_into is empty, leave boostable flag on--it is used # for boosting into unused hw axes. insn = insn.copy(boostable_into=boostable_into) else: insn = insn.copy(boostable_into=set()) new_insns.append(insn) return kernel.copy(instructions=new_insns) # }}} preprocess_cache = PersistentDict("loopy-preprocess-cache-v2-"+DATA_MODEL_VERSION, key_builder=LoopyKeyBuilder()) def preprocess_kernel(kernel, device=None): if device is not None: from warnings import warn warn("passing 'device' to preprocess_kernel() is deprecated", DeprecationWarning, stacklevel=2) from loopy.kernel import kernel_state if kernel.state != kernel_state.INITIAL: raise LoopyError("cannot re-preprocess an already preprocessed " "kernel") # {{{ cache retrieval
def test_persistent_dict_storage_and_lookup(): try: tmpdir = tempfile.mkdtemp() pdict = PersistentDict("pytools-test", container_dir=tmpdir) from random import randrange def rand_str(n=20): return "".join(chr(65 + randrange(26)) for i in range(n)) keys = [(randrange(2000) - 1000, rand_str(), None, SomeTag(rand_str()), frozenset({"abc", 123})) for i in range(20)] values = [randrange(2000) for i in range(20)] d = dict(zip(keys, values)) # {{{ check lookup for k, v in zip(keys, values): pdict[k] = v for k, v in d.items(): assert d[k] == pdict[k] assert v == pdict[k] # }}} # {{{ check updating for k, v in zip(keys, values): pdict[k] = v + 1 for k, v in d.items(): assert d[k] + 1 == pdict[k] assert v + 1 == pdict[k] # }}} # {{{ check store_if_not_present for k, _ in zip(keys, values): pdict.store_if_not_present(k, d[k] + 2) for k, v in d.items(): assert d[k] + 1 == pdict[k] assert v + 1 == pdict[k] pdict.store_if_not_present(2001, 2001) assert pdict[2001] == 2001 # }}} # {{{ check dataclasses for v in [17, 18]: key = MyStruct("hi", v) pdict[key] = v # reuse same key, with stored hash assert pdict[key] == v with pytest.raises(NoSuchEntryError): pdict[MyStruct("hi", 19)] for v in [17, 18]: # make new key instances assert pdict[MyStruct("hi", v)] == v # }}} # {{{ check enums pdict[MyEnum.YES] = 1 with pytest.raises(NoSuchEntryError): pdict[MyEnum.NO] assert pdict[MyEnum.YES] == 1 pdict[MyIntEnum.YES] = 12 with pytest.raises(NoSuchEntryError): pdict[MyIntEnum.NO] assert pdict[MyIntEnum.YES] == 12 # }}} # check not found with pytest.raises(NoSuchEntryError): pdict.fetch(3000) finally: shutil.rmtree(tmpdir)
result = self.buf_var if access_subscript: result = result.index(tuple(access_subscript)) # Can't possibly be nested, but recurse anyway to # make sure substitution rules referenced below here # do not get thrown away. self.rec(result, expn_state.copy(arg_context={})) return result # }}} buffer_array_cache = PersistentDict("loopy-buffer-array-cache-" + DATA_MODEL_VERSION, key_builder=LoopyKeyBuilder()) # Adding an argument? also add something to the cache_key below. def buffer_array(kernel, var_name, buffer_inames, init_expression=None, store_expression=None, within=None, default_tag="l.auto", temporary_scope=None, temporary_is_local=None, fetch_bounding_box=False): """
result = outputs[packing_info.name] = \ np.zeros(packing_info.sep_shape, dtype=np.object) for index, unpacked_name in packing_info.subscripts_and_names: result[index] = outputs.pop(unpacked_name) return outputs # }}} # {{{ KernelExecutorBase typed_and_scheduled_cache = PersistentDict( "loopy-typed-and-scheduled-cache-v1-"+DATA_MODEL_VERSION, key_builder=LoopyKeyBuilder()) class KernelExecutorBase(object): """An object connecting a kernel to a :class:`pyopencl.Context` for execution. .. automethod:: __init__ .. automethod:: __call__ """ def __init__(self, kernel): """ :arg kernel: a loopy.LoopKernel """