def calculate_COM(offsets): """ Determine the centre of mass (COM) in a collection of offsets. The COM is a basis to span the vectors in ``offsets``. Also return the distance of each element E in ``offsets`` from the COM (i.e., the coefficients that when multiplied by the COM give exactly E). """ COM = [] for ofs in zip(*offsets): handle = [] for i in zip(*ofs): strides = sorted(set(i)) # Heuristic: # - middle point if odd number of values, or # - strides average otherwise index = int((len(strides) - 1) / 2) if (len(strides) - 1) % 2 == 0: handle.append(strides[index]) else: handle.append(int(np.mean(strides, dtype=int))) COM.append(tuple(handle)) distances = [] for ofs in offsets: handle = distance(COM, ofs) if len(handle) != 1: raise DSEException("%s cannot be represented by the COM %s" % (str(ofs), str(COM))) distances.append(handle.pop()) return COM, distances
def __init__(self, passes, template=None, profile=True): try: passes = passes.split(',') except AttributeError: # Already in tuple format if not all(i in CustomRewriter.passes_mapper for i in passes): raise DSEException("Unknown passes `%s`" % str(passes)) self.passes = passes super(CustomRewriter, self).__init__(profile, template)
def __init__(self, exprs, **kwargs): # Check input legality mapper = OrderedDict([(i.lhs, i) for i in exprs]) if len(set(mapper)) != len(mapper): raise DSEException( "Found redundant node, cannot build TemporariesGraph.") # Construct Temporaries, tracking reads and readby tensor_map = DefaultOrderedDict(list) for i in mapper: tensor_map[as_symbol(i)].append(i) reads = DefaultOrderedDict(set) readby = DefaultOrderedDict(set) for k, v in mapper.items(): handle = retrieve_terminals(v.rhs) for i in list(handle): if i.is_Indexed: for idx in i.indices: handle |= retrieve_terminals(idx) reads[k].update( set(flatten([tensor_map.get(as_symbol(i), []) for i in handle]))) for i in reads[k]: readby[i].add(k) # Make sure read-after-writes are honored for scalar temporaries processed = [i for i in mapper if i.is_Indexed] queue = [i for i in mapper if i not in processed] while queue: k = queue.pop(0) if not readby[k]: processed.insert(0, k) elif all(i in processed for i in readby[k]): index = min(processed.index(i) for i in readby[k]) processed.insert(index, k) else: queue.append(k) # Build up the TemporariesGraph temporaries = [(i, Temporary(*mapper[i].args, inc=q_inc(mapper[i]), reads=reads[i], readby=readby[i])) for i in processed] super(TemporariesGraph, self).__init__(temporaries, **kwargs) # Determine indices along the space and time dimensions terms = [ v for k, v in self.items() if v.is_tensor and not q_indirect(k) ] indices = filter_ordered(flatten([i.function.indices for i in terms])) self.space_indices = tuple(i for i in indices if i.is_Space) self.time_indices = tuple(i for i in indices if i.is_Time)
def temporaries_graph(temporaries): """ Create a dependency graph given a list of :class:`sympy.Eq`. """ # Check input is legal and initialize the temporaries graph temporaries = [Temporary(*i.args) for i in temporaries] nodes = [i.lhs for i in temporaries] if len(set(nodes)) != len(nodes): raise DSEException("Found redundant node in the TemporariesGraph.") graph = TemporariesGraph(zip(nodes, temporaries)) # Add edges (i.e., reads and readby info) to the graph mapper = OrderedDict() for i in nodes: mapper.setdefault(as_symbol(i), []).append(i) for k, v in graph.items(): # Scalars handle = terminals(v.rhs) # Tensors (does not inspect indirections such as A[B[i]]) for i in list(handle): if q_indexed(i): for idx in i.indices: handle |= terminals(idx) # Derive actual reads reads = set(flatten([mapper.get(as_symbol(i), []) for i in handle])) # Propagate information v.reads.update(reads) for i in v.reads: graph[i].readby.add(k) return graph