def test_simple_split(): sdfg = dace.SDFG("hbm_bank_split_first_dim") _, b, a = mkc(sdfg, None, "b", "a", StorageType.CPU_Heap, StorageType.CPU_Heap, [4, 10, 10], [40, 10], "b") for xform in optimizer.Optimizer(sdfg).get_pattern_matches(patterns=BankSplit): xform.apply(sdfg.node(xform.state_id), sdfg) sdfg(a=a, b=b) assert np.allclose(b[1], a[10:20, :]) assert np.allclose(b[3], a[30:40, :])
def test_second_dim_split_2d(): sdfg = dace.SDFG("hbm_bank_split_sec_dim_split2d") s, a, b = mkc(sdfg, None, "a", "b", StorageType.CPU_Heap, StorageType.CPU_Heap, [10, 100], [10, 10, 10], "b") for xform in optimizer.Optimizer(sdfg).get_pattern_matches(patterns=BankSplit): xform.split_array_info = [1, 10] xform.apply(sdfg.node(xform.state_id), sdfg) a = np.random.uniform(0, 10, [10, 100]).astype(np.int32) sdfg(a=a, b=b) for i in range(10): assert np.allclose(a[0:10, 10 * i:(10 * i + 10)], b[i])
def test_explicit_split_3d(): sdfg = dace.SDFG("hbm_bank_split_explicit_3d") s, a, b = mkc(sdfg, None, "a", "b", StorageType.CPU_Heap, StorageType.CPU_Heap, [120, 100, 100], [24, 40, 50, 25]) for xform in optimizer.Optimizer(sdfg).get_pattern_matches(patterns=BankSplit): xform.split_array_info = [3, 2, 4] xform.apply(sdfg.node(xform.state_id), sdfg) a = np.random.uniform(0, 100, [120, 100, 100]).astype(np.int32) sdfg(a=a, b=b) assert np.allclose(a[80:120, 50:100, 75:100], b[23]) assert np.allclose(a[0:40, 50:100, 75:100], b[7]) assert np.allclose(a[40:80, 0:50, 25:50], b[9])
def test_even_split_3d(): sdfg = dace.SDFG("hbm_bank_split_even_split_3d") s, b, a = mkc(sdfg, None, "b", "a", StorageType.CPU_Heap, StorageType.CPU_Heap, [8, 50, 50, 50], [100, 100, 100], "b") for xform in optimizer.Optimizer(sdfg).get_pattern_matches(patterns=BankSplit): xform.split_array_info = [2, 2, 2] xform.apply(sdfg.node(xform.state_id), sdfg) b = np.random.uniform(0, 100, [8, 50, 50, 50]).astype(np.int32) sdfg(a=a, b=b) assert np.allclose(a[0:50, 0:50, 0:50], b[0, :, :, :]) assert np.allclose(a[50:100, 50:100, 50:100], b[7, :, :, :]) assert np.allclose(a[0:50, 50:100, 0:50], b[2, :, :, :])
def get_property_metdata(): """ Generate a dictionary of class properties and their metadata. This iterates over all classes registered as serializable in DaCe's serialization module, checks whether there are properties present (true for any class registered via the @make.properties decorator), and then assembels their metadata to a dictionary. """ # Lazy import to cut down on module load time. from dace.sdfg.nodes import full_class_path # In order to get all transformation metadata the @make.properties # annotation for each transformation needs to have run, so the # transformations are registered in `dace.serialize._DACE_SERIALIZE_TYPES`. # The simplest way to achieve this is by simply getting all pattern matches # of a dummy SDFG. Since this code should only be run once per SDFG editor, # this doesn't add any continuous overhead like it would if we were to # send transformation metadata along with `get_transformations`. from dace.transformation import optimizer _ = optimizer.Optimizer(dace.SDFG('dummy')).get_pattern_matches() meta_dict = {} meta_dict['__reverse_type_lookup__'] = {} meta_dict['__libs__'] = {} for typename in dace.serialize._DACE_SERIALIZE_TYPES: t = dace.serialize._DACE_SERIALIZE_TYPES[typename] if hasattr(t, '__properties__'): meta_key = typename if (issubclass(t, dace.sdfg.nodes.LibraryNode) and not t == dace.sdfg.nodes.LibraryNode): meta_key = full_class_path(t) meta_dict[meta_key] = {} libnode_implementations = None if hasattr(t, 'implementations'): libnode_implementations = list(t.implementations.keys()) for propname, prop in t.__properties__.items(): meta_dict[meta_key][propname] = prop.meta_to_json(prop) if hasattr(prop, 'key_type') and hasattr(prop, 'value_type'): # For dictionary properties, add their key and value types. meta_dict[meta_key][propname][ 'key_type'] = prop.key_type.__name__ meta_dict[meta_key][propname][ 'value_type'] = prop.value_type.__name__ elif hasattr(prop, 'element_type'): meta_dict[meta_key][propname][ 'element_type'] = prop.element_type.__name__ if prop.choices is not None: # If there are specific choices for this property (i.e. this # property is an enum), list those as metadata as well. if inspect.isclass(prop.choices): if issubclass(prop.choices, aenum.Enum): choices = [] for choice in prop.choices: choice_short = str(choice).split('.')[-1] if choice_short != 'Undefined': choices.append(choice_short) meta_dict[meta_key][propname]['choices'] = choices elif (propname == 'implementation' and libnode_implementations is not None): # For implementation properties, add all library # implementations as choices. meta_dict[meta_key][propname][ 'choices'] = libnode_implementations # Create a reverse lookup method for each meta type. This allows # us to get meta information about things other than properties # contained in some SDFG properties (types, CodeBlocks, etc.). if meta_dict[meta_key][propname]['metatype']: meta_type = meta_dict[meta_key][propname]['metatype'] if not meta_type in meta_dict['__reverse_type_lookup__']: meta_dict['__reverse_type_lookup__'][ meta_type] = meta_dict[meta_key][propname] # For library nodes we want to make sure they are all easily # accessible under '__libs__', to be able to list them all out. if (issubclass(t, dace.sdfg.nodes.LibraryNode) and not t == dace.sdfg.nodes.LibraryNode): meta_dict['__libs__'][typename] = meta_key # Save a lookup for enum values not present yet. enum_list = [ typename for typename, dtype in inspect.getmembers(dace.dtypes, inspect.isclass) if issubclass(dtype, aenum.Enum) ] for enum_name in enum_list: if not enum_name in meta_dict['__reverse_type_lookup__']: choices = [] for choice in getattr(dace.dtypes, enum_name): choice_short = str(choice).split('.')[-1] if choice_short != 'Undefined': choices.append(choice_short) meta_dict['__reverse_type_lookup__'][enum_name] = { 'category': 'General', 'metatype': enum_name, 'choices': choices, } return { 'meta_dict': meta_dict, }