def fast_destroy(self, fgraph, app, reason): """ Do the check for only 1 level. For now: - Destroyed variables can have only 1 clients. - Allow view to have multiple clients. - Allow sequence of view. - But don't allow to destroy view """ dm = app.op.destroy_map if not dm: return inputs = set( itertools.chain.from_iterable(dm.values()) ) # list of app's destroyed inputs for inp_idx in inputs: inp = app.inputs[inp_idx] if getattr(inp.tag, "indestructible", False) or isinstance(inp, Constant): self.fail_validate[app] = InconsistencyError( f"Attempting to destroy indestructible variables: {inp}" ) elif len(fgraph.clients[inp]) > 1: self.fail_validate[app] = InconsistencyError( "Destroyed variable has more than one client. " + str(reason) ) elif inp.owner: app2 = inp.owner inp_idx2 = app2.outputs.index(inp) v = app2.op.view_map d = app2.op.destroy_map if v: v = v.get(inp_idx2, []) if len(v) > 0: self.fail_validate[app] = InconsistencyError( "Destroyed variable has view_map. " + str(reason) ) elif d: d = d.get(inp_idx2, []) if len(d) > 0: self.fail_validate[app] = InconsistencyError( "Destroyed variable has destroy_map. " + str(reason) )
def _build_droot_impact(destroy_handler): droot = {} # destroyed view + nonview variables -> foundation impact = {} # destroyed nonview variable -> it + all views of it root_destroyer = {} # root -> destroyer apply for app in destroy_handler.destroyers: for output_idx, input_idx_list in app.op.destroy_map.items(): if len(input_idx_list) != 1: raise NotImplementedError() input_idx = input_idx_list[0] input = app.inputs[input_idx] # Find non-view variable which is ultimatly viewed by input. view_i = destroy_handler.view_i _r = input while _r is not None: r = _r _r = view_i.get(r) input_root = r if input_root in droot: raise InconsistencyError( f"Multiple destroyers of {input_root}") droot[input_root] = input_root root_destroyer[input_root] = app # The code here add all the variables that are views of r into # an OrderedSet input_impact input_impact = OrderedSet() q = deque() q.append(input_root) while len(q) > 0: v = q.popleft() for n in destroy_handler.view_o.get(v, []): input_impact.add(n) q.append(n) for v in input_impact: assert v not in droot droot[v] = input_root impact[input_root] = input_impact impact[input_root].add(input_root) return droot, impact, root_destroyer
def validate(self, fgraph): """ Return None. Raise InconsistencyError when a) orderings() raises an error b) orderings cannot be topologically sorted. """ if self.destroyers: if self.algo == "fast": if self.fail_validate: app_err_pairs = self.fail_validate self.fail_validate = OrderedDict() # self.fail_validate can only be a hint that maybe/probably # there is a cycle.This is because inside replace() we could # record many reasons to not accept a change, but we don't # know which one will fail first inside validate(). Thus,the # graph might have already changed when we raise the # self.fail_validate error. So before raising the error, we # double check here. for app in app_err_pairs: if app in fgraph.apply_nodes: self.fast_destroy(fgraph, app, "validate") if self.fail_validate: self.fail_validate = app_err_pairs raise app_err_pairs[app] else: ords = self.orderings(fgraph, ordered=False) if _contains_cycle(fgraph, ords): raise InconsistencyError( "Dependency graph contains cycles") else: # James's Conjecture: # If there are no destructive ops, then there can be no cycles. # FB: This isn't always True. It can happened that # optimization introduce node that depend on itself. This # is very rare and should not happen in general. It will be # caught later. The error will be far from the source. But # doing this conjecture should speed up compilation most of # the time. The user should create such dependency except # if he mess too much with the internal. pass return True
def orderings(self, fgraph, ordered=True): """ Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ if ordered: set_type = OrderedSet rval = OrderedDict() else: set_type = set rval = dict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [ r for r in droot if getattr(r.tag, "indestructible", False) or isinstance(r, Constant) ] if illegal_destroy: raise InconsistencyError( f"Attempting to destroy indestructible variables: {illegal_destroy}" ) # add destroyed variable clients as computational dependencies for app in self.destroyers: # keep track of clients that should run before the current Apply root_clients = set_type() # for each destroyed input... for output_idx, input_idx_list in app.op.destroy_map.items(): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, "destroyhandler_tolerate_same", []) assert isinstance(tolerate_same, list) tolerated = { idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx } tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, "destroyhandler_tolerate_aliased", []) assert isinstance(tolerate_aliased, list) ignored = { idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx } for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact and ( i not in tolerated or input is not destroyed_variable): raise InconsistencyError( f"Input aliasing: {app} ({destroyed_idx}, {i})" ) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input for r in root_impact: assert not [ a for a, c in self.clients[r].items() if not c ] root_clients.update( [a for a, c in self.clients[r].items() if c]) # app itself is a client of the destroyed inputs, # but should not run before itself root_clients.remove(app) if root_clients: rval[app] = root_clients return rval