def __init__(self, include, require=None, exclude=None, subquery=None, position_cutoff=None): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude)
def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. Give the FunctionGraph instance: 1) A new method "destroyers(var)" TODO: what does this do exactly? 2) A new attribute, "destroy_handler" TODO: WRITEME: what does this do besides the checks? """ ####### Do the checking ########### already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph is not None: raise Exception("A DestroyHandler instance can only serve one" " FunctionGraph. (Matthew 6:24)") for attr in ("destroyers", "destroy_handler"): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present" " or in conflict with another plugin." ) ####### Annotate the FunctionGraph ############ def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() # set of Apply instances with non-null destroy_map self.view_i = OrderedDict() # variable -> variable used in calculation self.view_o = OrderedDict() # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = OrderedDict() # variable -> apply -> ninputs self.stale_droot = True self.debug_all_apps = OrderedSet() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph)
class Query(object): """ Parameters ---------- position_cutoff : float Used by SequenceDB to keep only optimizer that are positioned before the cut_off point. """ def __init__(self, include, require=None, exclude=None, subquery=None, position_cutoff=None): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude) def __str__(self): return ("Query{inc=%s,ex=%s,require=%s,subquery=%s," "position_cutoff=%d}" % (self.include, self.exclude, self.require, self.subquery, self.position_cutoff)) # add all opt with this tag def including(self, *tags): return Query(self.include.union(tags), self.require, self.exclude, self.subquery, self.position_cutoff) # remove all opt with this tag def excluding(self, *tags): return Query(self.include, self.require, self.exclude.union(tags), self.subquery, self.position_cutoff) # keep only opt with this tag. def requiring(self, *tags): return Query(self.include, self.require.union(tags), self.exclude, self.subquery, self.position_cutoff)
def __init__( self, include, require=None, exclude=None, subquery=None, position_cutoff=float("inf"), extra_optimizations=None ): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if extra_optimizations is None: extra_optimizations = [] self.extra_optimizations = extra_optimizations if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude)
def on_prune(self, fgraph, app, reason): """Remove Apply instance from set which must be computed""" if app not in self.debug_all_apps: raise ProtocolError("prune without import") self.debug_all_apps.remove(app) # UPDATE self.clients for i, input in enumerate(OrderedSet(app.inputs)): del self.clients[input][app] if getattr(app.op, 'destroy_map', OrderedDict()): self.destroyers.remove(app) # Note: leaving empty client dictionaries in the struct. # Why? It's a pain to remove them. I think they aren't doing any harm, they will be # deleted on_detach(). # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems( getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] del self.view_i[o] self.view_o[i].remove(o) if not self.view_o[i]: del self.view_o[i] self.stale_droot = True
def on_import(self, fgraph, app, reason): """Add Apply instance to set which must be computed""" #if app in self.debug_all_apps: raise ProtocolError("double import") #self.debug_all_apps.add(app) #print 'DH IMPORT', app, id(app), id(self), len(self.debug_all_apps) # If it's a destructive op, add it to our watch list if getattr(app.op, 'destroy_map', {}): self.destroyers.add(app) # add this symbol to the forward and backward maps for o_idx, i_idx_list in getattr(app.op, 'view_map', {}).items(): if len(i_idx_list) > 1: raise NotImplementedError( 'destroying this output invalidates multiple inputs', (app.op)) o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] self.view_i[o] = i self.view_o.setdefault(i, OrderedSet()).add(o) # update self.clients for i, input in enumerate(app.inputs): self.clients.setdefault(input, {}).setdefault(app, 0) self.clients[input][app] += 1 for i, output in enumerate(app.outputs): self.clients.setdefault(output, {}) self.stale_droot = True
def orderings(self): """ Return dict d s.t. d[node] is a list of nodes that must be evaluated before node itself can be evaluated. This is used primarily by the destroy_handler feature to ensure that all clients of any destroyed inputs have already computed their outputs. Notes ----- This only calls the orderings() fct on all features. It does not take care of computing dependencies by itself. """ ords = OrderedDict() assert isinstance(self._features, list) for feature in self._features: if hasattr(feature, 'orderings'): orderings = feature.orderings(self) if not isinstance(orderings, OrderedDict): raise TypeError("Non-deterministic return value from " + str(feature.orderings) + ". Nondeterministic object is " + str(orderings)) for node, prereqs in iteritems(orderings): if not isinstance(prereqs, (list, OrderedSet)): raise TypeError( "prereqs must be a type with a " "deterministic iteration order, or toposort " " will be non-deterministic.") ords.setdefault(node, []).extend(prereqs) # eliminate duplicate prereqs for (node, prereqs) in iteritems(ords): ords[node] = list(OrderedSet(prereqs)) return ords
def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. TODO: WRITEME: what does this do besides the checks? """ ####### Do the checking ########### already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph not in [None, fgraph]: raise Exception("A DestroyHandler instance can only serve" " one FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere # and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present or in" " conflict with another plugin.") ####### end of checking ############ def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() #set of Apply instances with non-null destroy_map self.view_i = {} # variable -> variable used in calculation self.view_o = {} # variable -> set of variables that use this one as a direct input #clients: how many times does an apply use a given variable self.clients = {} # variable -> apply -> ninputs self.stale_droot = True # IG: It's unclear if this is meant to be included in deployed code. It looks like # it is unnecessary if FunctionGraph is working correctly, so I am commenting uses # of it (for speed) but leaving the commented code in place so it is easy to restore # for debugging purposes. # Note: is there anything like the C preprocessor for python? It would be useful to # just ifdef these things out # self.debug_all_apps = set() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph)
def _build_droot_impact(destroy_handler): droot = {} # destroyed view + nonview variables -> foundation impact = {} # destroyed nonview variable -> it + all views of it root_destroyer = {} # root -> destroyer apply for app in destroy_handler.destroyers: for output_idx, input_idx_list in app.op.destroy_map.items(): if len(input_idx_list) != 1: raise NotImplementedError() input_idx = input_idx_list[0] input = app.inputs[input_idx] # Find non-view variable which is ultimatly viewed by input. view_i = destroy_handler.view_i _r = input while _r is not None: r = _r _r = view_i.get(r) input_root = r if input_root in droot: raise InconsistencyError( "Multiple destroyers of %s" % input_root) droot[input_root] = input_root root_destroyer[input_root] = app # The code here add all the variables that are views of r into # an OrderedSet input_impact input_impact = OrderedSet() q = deque() q.append(input_root) while len(q) > 0: v = q.popleft() for n in destroy_handler.view_o.get(v, []): input_impact.add(n) q.append(n) for v in input_impact: assert v not in droot droot[v] = input_root impact[input_root] = input_impact impact[input_root].add(input_root) return droot, impact, root_destroyer
def _build_droot_impact(destroy_handler): droot = {} # destroyed view + nonview variables -> foundation impact = {} # destroyed nonview variable -> it + all views of it root_destroyer = {} # root -> destroyer apply for app in destroy_handler.destroyers: for output_idx, input_idx_list in app.op.destroy_map.items(): if len(input_idx_list) != 1: raise NotImplementedError() input_idx = input_idx_list[0] input = app.inputs[input_idx] # Find non-view variable which is ultimatly viewed by input. view_i = destroy_handler.view_i _r = input while _r is not None: r = _r _r = view_i.get(r) input_root = r if input_root in droot: raise InconsistencyError("Multiple destroyers of %s" % input_root) droot[input_root] = input_root root_destroyer[input_root] = app # The code here add all the variables that are views of r into # an OrderedSet input_impact input_impact = OrderedSet() q = deque() q.append(input_root) while len(q) > 0: v = q.popleft() for n in destroy_handler.view_o.get(v, []): input_impact.add(n) q.append(n) for v in input_impact: assert v not in droot droot[v] = input_root impact[input_root] = input_impact impact[input_root].add(input_root) return droot, impact, root_destroyer
def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one). 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. Give the FunctionGraph instance: 1) A new method "destroyers(var)" TODO: what does this do exactly? 2) A new attribute, "destroy_handler" TODO: WRITEME: what does this do besides the checks? """ # Do the checking # already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph is not None: raise Exception( "A DestroyHandler instance can only serve one" " FunctionGraph. (Matthew 6:24)" ) for attr in ("destroyers", "destroy_handler"): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present" " or in conflict with another plugin." ) # Annotate the FunctionGraph # self.unpickle(fgraph) fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = ( OrderedSet() ) # set of Apply instances with non-null destroy_map self.view_i = {} # variable -> variable used in calculation self.view_o = ( {} ) # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = OrderedDict() # variable -> apply -> ninputs self.stale_droot = True self.debug_all_apps = set() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph)
def register(self, name, obj, *tags, **kwargs): """ Parameters ---------- name : str Name of the optimizer. obj The optimizer to register. tags Tag name that allow to select the optimizer. kwargs If non empty, should contain only use_db_name_as_tag=False. By default, all optimizations registered in EquilibriumDB are selected when the EquilibriumDB name is used as a tag. We do not want this behavior for some optimizer like local_remove_all_assert. use_db_name_as_tag=False remove that behavior. This mean only the optimizer name and the tags specified will enable that optimization. """ # N.B. obj is not an instance of class `GlobalOptimizer`. # It is an instance of a DB.In the tests for example, # this is not always the case. if not isinstance(obj, (DB, opt.GlobalOptimizer, opt.LocalOptimizer)): raise TypeError("Object cannot be registered in OptDB", obj) if name in self.__db__: raise ValueError( "The name of the object cannot be an existing" " tag or the name of another existing object.", obj, name, ) if kwargs: assert "use_db_name_as_tag" in kwargs assert kwargs["use_db_name_as_tag"] is False else: if self.name is not None: tags = tags + (self.name,) obj.name = name # This restriction is there because in many place we suppose that # something in the DB is there only once. if obj.name in self.__db__: raise ValueError( f"""You can\'t register the same optimization multiple time in a DB. Tryed to register "{obj.name}" again under the new name "{name}". Use theano.gof.ProxyDB to work around that""" ) self.__db__[name] = OrderedSet([obj]) self._names.add(name) self.__db__[obj.__class__.__name__].add(obj) self.add_tags(name, *tags)
def __init__(self, include, require=None, exclude=None, subquery=None, position_cutoff=float('inf'), extra_optimizations=None): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if extra_optimizations is None: extra_optimizations = [] self.extra_optimizations = extra_optimizations if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude)
def __init__(self, include, require=None, exclude=None, subquery=None, position_cutoff=None): """ :type position_cutoff: float :param position_cutoff: Used by SequenceDB to keep only optimizer that are positioned before the cut_off point. """ self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude)
def on_change_input(self, fgraph, app, i, old_r, new_r, reason): """ app.inputs[i] changed from old_r to new_r. """ if app == 'output': # app == 'output' is special key that means FunctionGraph is redefining which nodes are being # considered 'outputs' of the graph. pass else: if app not in self.debug_all_apps: raise ProtocolError("change without import") # UPDATE self.clients self.clients[old_r][app] -= 1 if self.clients[old_r][app] == 0: del self.clients[old_r][app] self.clients.setdefault(new_r, OrderedDict()).setdefault(app, 0) self.clients[new_r][app] += 1 # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems( getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() i_idx = i_idx_list[0] output = app.outputs[o_idx] if i_idx == i: if app.inputs[i_idx] is not new_r: raise ProtocolError("wrong new_r on change") self.view_i[output] = new_r self.view_o[old_r].remove(output) if not self.view_o[old_r]: del self.view_o[old_r] self.view_o.setdefault(new_r, OrderedSet()).add(output) if self.algo == 'fast': if app in self.fail_validate: del self.fail_validate[app] self.fast_destroy(app, reason) self.stale_droot = True
def register(self, name, obj, *tags): # N.B. obj is not an instance of class Optimizer. # It is an instance of a DB.In the tests for example, # this is not always the case. if not isinstance(obj, (DB, opt.Optimizer, opt.LocalOptimizer)): raise TypeError('Object cannot be registered in OptDB', obj) if name in self.__db__: raise ValueError( 'The name of the object cannot be an existing' ' tag or the name of another existing object.', obj, name) if self.name is not None: tags = tags + (self.name, ) obj.name = name # This restriction is there because in many place we suppose that # something in the DB is there only once. if obj.name in self.__db__: raise ValueError('''You can\'t register the same optimization multiple time in a DB. Tryed to register "%s" again under the new name "%s". Use theano.gof.ProxyDB to work around that''' % (obj.name, name)) self.__db__[name] = OrderedSet([obj]) self._names.add(name) self.__db__[obj.__class__.__name__].add(obj) self.add_tags(name, *tags)
def orderings(self, fgraph): """ Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in self.clients[r].items() if not c] root_clients.update([a for a, c in self.clients[r].items() if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval
class DestroyHandler(toolbox.Bookkeeper): # noqa """ The DestroyHandler class detects when a graph is impossible to evaluate because of aliasing and destructive operations. Several data structures are used to do this. An Op can use its view_map property to declare that an output may be aliased to an input. If that output is destroyed, the input is also considered to be destroyed. The view_maps of several Ops can feed into one another and form a directed graph. The consequence of destroying any variable in such a graph is that all variables in the graph must be considered to be destroyed, because they could all be refering to the same underlying storage. In the current implementation, that graph is a tree, and the root of that tree is called the foundation. TODO: why "in the current implementation" ? is there another implementation planned? TODO: why is the graph a tree? isn't it possible that one variable could be aliased to many variables? for example, don't switch and ifelse have to do this? The original DestroyHandler (if 0'ed out above) computed several data structures from scratch each time it was asked to validate the graph. Because this happens potentially thousands of times and each graph to validate is extremely similar to the previous one, computing the data structures from scratch repeatedly was wasteful and resulted in high compile times for large graphs. This implementation computes the data structures once at initialization and then incrementally updates them. It is a work in progress. The following data structures have been converted to use the incremental strategy: <none> The following data structures remain to be converted: <unknown> """ pickle_rm_attr = ["destroyers"] def __init__(self, do_imports_on_attach=True): self.fgraph = None self.do_imports_on_attach = do_imports_on_attach """ Maps every variable in the graph to its "foundation" (deepest ancestor in view chain). TODO: change name to var_to_vroot. """ self.droot = OrderedDict() """ Maps a variable to all variables that are indirect or direct views of it (including itself) essentially the inverse of droot. TODO: do all variables appear in this dict, or only those that are foundations? TODO: do only destroyed variables go in here? one old docstring said so. TODO: rename to x_to_views after reverse engineering what x is """ self.impact = OrderedDict() """ If a var is destroyed, then this dict will map droot[var] to the apply node that destroyed var TODO: rename to vroot_to_destroyer """ self.root_destroyer = OrderedDict() def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one). 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. Give the FunctionGraph instance: 1) A new method "destroyers(var)" TODO: what does this do exactly? 2) A new attribute, "destroy_handler" TODO: WRITEME: what does this do besides the checks? """ # Do the checking # already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph is not None: raise Exception( "A DestroyHandler instance can only serve one" " FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present" " or in conflict with another plugin.") # Annotate the FunctionGraph # self.unpickle(fgraph) fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() # set of Apply instances with non-null destroy_map self.view_i = OrderedDict() # variable -> variable used in calculation self.view_o = OrderedDict() # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = OrderedDict() # variable -> apply -> ninputs self.stale_droot = True self.debug_all_apps = OrderedSet() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph) def unpickle(self, fgraph): def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of def refresh_droot_impact(self): """ Makes sure self.droot, self.impact, and self.root_destroyer are up to date, and returns them (see docstrings for these properties above). """ if self.stale_droot: self.droot, self.impact, self.root_destroyer =\ _build_droot_impact(self) self.stale_droot = False return self.droot, self.impact, self.root_destroyer def on_detach(self, fgraph): if fgraph is not self.fgraph: raise Exception("detaching wrong fgraph", fgraph) del self.destroyers del self.view_i del self.view_o del self.clients del self.stale_droot assert self.fgraph.destroyer_handler is self delattr(self.fgraph, 'destroyers') delattr(self.fgraph, 'destroy_handler') self.fgraph = None def on_import(self, fgraph, app, reason): """ Add Apply instance to set which must be computed. """ if app in self.debug_all_apps: raise ProtocolError("double import") self.debug_all_apps.add(app) # print 'DH IMPORT', app, id(app), id(self), len(self.debug_all_apps) # If it's a destructive op, add it to our watch list if getattr(app.op, 'destroy_map', {}): self.destroyers.add(app) # add this symbol to the forward and backward maps for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: raise NotImplementedError( 'destroying this output invalidates multiple inputs', (app. op)) o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] self.view_i[o] = i self.view_o.setdefault(i, OrderedSet()).add(o) # update self.clients for i, input in enumerate(app.inputs): self.clients.setdefault(input, OrderedDict()).setdefault(app, 0) self.clients[input][app] += 1 for i, output in enumerate(app.outputs): self.clients.setdefault(output, OrderedDict()) self.stale_droot = True def on_prune(self, fgraph, app, reason): """ Remove Apply instance from set which must be computed. """ if app not in self.debug_all_apps: raise ProtocolError("prune without import") self.debug_all_apps.remove(app) # UPDATE self.clients for i, input in enumerate(OrderedSet(app.inputs)): del self.clients[input][app] if getattr(app.op, 'destroy_map', OrderedDict()): self.destroyers.remove(app) # Note: leaving empty client dictionaries in the struct. # Why? It's a pain to remove them. I think they aren't doing any harm, they will be # deleted on_detach(). # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] del self.view_i[o] self.view_o[i].remove(o) if not self.view_o[i]: del self.view_o[i] self.stale_droot = True def on_change_input(self, fgraph, app, i, old_r, new_r, reason): """ app.inputs[i] changed from old_r to new_r. """ if app == 'output': # app == 'output' is special key that means FunctionGraph is redefining which nodes are being # considered 'outputs' of the graph. pass else: if app not in self.debug_all_apps: raise ProtocolError("change without import") # UPDATE self.clients self.clients[old_r][app] -= 1 if self.clients[old_r][app] == 0: del self.clients[old_r][app] self.clients.setdefault(new_r, OrderedDict()).setdefault(app, 0) self.clients[new_r][app] += 1 # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() i_idx = i_idx_list[0] output = app.outputs[o_idx] if i_idx == i: if app.inputs[i_idx] is not new_r: raise ProtocolError("wrong new_r on change") self.view_i[output] = new_r self.view_o[old_r].remove(output) if not self.view_o[old_r]: del self.view_o[old_r] self.view_o.setdefault(new_r, OrderedSet()).add(output) self.stale_droot = True def validate(self, fgraph): """ Return None. Raise InconsistencyError when a) orderings() raises an error b) orderings cannot be topologically sorted. """ if self.destroyers: ords = self.orderings(fgraph) if _contains_cycle(fgraph, ords): raise InconsistencyError("Dependency graph contains cycles") else: # James's Conjecture: # If there are no destructive ops, then there can be no cycles. # FB: This isn't always True. It can happend that # optimization introduce node that depend on itself. This # is very rare and should not happen in general. It will be # caught later. The error will be far from the source. But # doing this conjecture should speed up compilation most of # the time. The user should create such dependency except # if he mess too much with the internal. pass return True def orderings(self, fgraph): """ Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in self.clients[r].items() if not c] root_clients.update([a for a, c in self.clients[r].items() if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval
class DestroyHandler(toolbox.Bookkeeper): """ The DestroyHandler class detects when a graph is impossible to evaluate because of aliasing and destructive operations. Several data structures are used to do this. When an Op uses its view_map property to declare that an output may be aliased to an input, then if that output is destroyed, the input is also considering to be destroyed. The view_maps of several Ops can feed into one another and form a directed graph. The consequence of destroying any variable in such a graph is that all variables in the graph must be considered to be destroyed, because they could all be refering to the same underlying storage. In the current implementation, that graph is a tree, and the root of that tree is called the foundation. The `droot` property of this class maps from every graph variable to its foundation. The `impact` property maps backward from the foundation to all of the variables that depend on it. When any variable is destroyed, this class marks the foundation of that variable as being destroyed, with the `root_destroyer` property. """ droot = {} """ destroyed view + nonview variables -> foundation. """ impact = {} """ destroyed nonview variable -> it + all views of it. """ root_destroyer = {} """ root -> destroyer apply. """ def __init__(self, do_imports_on_attach=True): self.fgraph = None self.do_imports_on_attach = do_imports_on_attach def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. TODO: WRITEME: what does this do besides the checks? """ # Do the checking # already_there = False if self.fgraph not in [None, fgraph]: raise Exception("A DestroyHandler instance can only serve" " one FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere # and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present or in" " conflict with another plugin.") # end of checking # def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() # set of Apply instances with non-null destroy_map self.view_i = {} # variable -> variable used in calculation self.view_o = {} # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = {} # variable -> apply -> ninputs self.stale_droot = True # IG: It's unclear if this is meant to be included in deployed code. It looks like # it is unnecessary if FunctionGraph is working correctly, so I am commenting uses # of it (for speed) but leaving the commented code in place so it is easy to restore # for debugging purposes. # Note: is there anything like the C preprocessor for python? It would be useful to # just ifdef these things out # self.debug_all_apps = set() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph) def refresh_droot_impact(self): if self.stale_droot: self.droot, self.impact, self.root_destroyer = _build_droot_impact(self) self.stale_droot = False return self.droot, self.impact, self.root_destroyer def on_detach(self, fgraph): if fgraph is not self.fgraph: raise Exception("detaching wrong fgraph", fgraph) del self.destroyers del self.view_i del self.view_o del self.clients del self.stale_droot assert self.fgraph.destroyer_handler is self delattr(self.fgraph, 'destroyers') delattr(self.fgraph, 'destroy_handler') self.fgraph = None def on_import(self, fgraph, app, reason): """ Add Apply instance to set which must be computed. """ # if app in self.debug_all_apps: raise ProtocolError("double import") # self.debug_all_apps.add(app) # print 'DH IMPORT', app, id(app), id(self), len(self.debug_all_apps) # If it's a destructive op, add it to our watch list if getattr(app.op, 'destroy_map', {}): self.destroyers.add(app) # add this symbol to the forward and backward maps for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: raise NotImplementedError( 'destroying this output invalidates multiple inputs', (app. op)) o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] self.view_i[o] = i self.view_o.setdefault(i, OrderedSet()).add(o) # update self.clients for i, input in enumerate(app.inputs): self.clients.setdefault(input, {}).setdefault(app, 0) self.clients[input][app] += 1 for i, output in enumerate(app.outputs): self.clients.setdefault(output, {}) self.stale_droot = True def on_prune(self, fgraph, app, reason): """ Remove Apply instance from set which must be computed. """ # if app not in self.debug_all_apps: raise ProtocolError("prune without import") # self.debug_all_apps.remove(app) # UPDATE self.clients for i, input in enumerate(OrderedSet(app.inputs)): del self.clients[input][app] if getattr(app.op, 'destroy_map', {}): self.destroyers.remove(app) # Note: leaving empty client dictionaries in the struct. # Why? It's a pain to remove them. I think they aren't doing any harm, they will be # deleted on_detach(). # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] del self.view_i[o] self.view_o[i].remove(o) if not self.view_o[i]: del self.view_o[i] self.stale_droot = True def on_change_input(self, fgraph, app, i, old_r, new_r, reason): """ app.inputs[i] changed from old_r to new_r. """ if app == 'output': # app == 'output' is special key that means FunctionGraph is redefining which nodes are being # considered 'outputs' of the graph. pass else: # if app not in self.debug_all_apps: raise ProtocolError("change without import") # UPDATE self.clients self.clients[old_r][app] -= 1 if self.clients[old_r][app] == 0: del self.clients[old_r][app] self.clients.setdefault(new_r, {}).setdefault(app, 0) self.clients[new_r][app] += 1 # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() i_idx = i_idx_list[0] output = app.outputs[o_idx] if i_idx == i: if app.inputs[i_idx] is not new_r: raise ProtocolError("wrong new_r on change") self.view_i[output] = new_r self.view_o[old_r].remove(output) if not self.view_o[old_r]: del self.view_o[old_r] self.view_o.setdefault(new_r, OrderedSet()).add(output) self.stale_droot = True def validate(self, fgraph): """ Return None. Raise InconsistencyError when a) orderings() raises an error b) orderings cannot be topologically sorted. """ if self.destroyers: ords = self.orderings(fgraph) if _contains_cycle(fgraph, ords): raise InconsistencyError( "Dependency graph contains cycles") else: # James's Conjecture: # If there are no destructive ops, then there can be no cycles. pass return True def orderings(self, fgraph): """ Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [ r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: # print 'destroying illegally' raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in iteritems(self.clients[r]) if not c] root_clients.update([a for a, c in iteritems(self.clients[r]) if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval
def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. TODO: WRITEME: what does this do besides the checks? """ ####### Do the checking ########### already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph not in [None, fgraph]: raise Exception("A DestroyHandler instance can only serve" " one FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere # and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present or in" " conflict with another plugin.") ####### end of checking ############ def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet( ) #set of Apply instances with non-null destroy_map self.view_i = {} # variable -> variable used in calculation self.view_o = { } # variable -> set of variables that use this one as a direct input #clients: how many times does an apply use a given variable self.clients = {} # variable -> apply -> ninputs self.stale_droot = True # IG: It's unclear if this is meant to be included in deployed code. It looks like # it is unnecessary if FunctionGraph is working correctly, so I am commenting uses # of it (for speed) but leaving the commented code in place so it is easy to restore # for debugging purposes. # Note: is there anything like the C preprocessor for python? It would be useful to # just ifdef these things out # self.debug_all_apps = set() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph)
def get_impact(root, view_o): impact = OrderedSet() add_impact(root, view_o, impact) return impact
def __query__(self, q): if not isinstance(q, Query): raise TypeError('Expected a Query.', q) # The ordered set is needed for deterministic optimization. variables = OrderedSet() for tag in q.include: variables.update(self.__db__[tag]) for tag in q.require: variables.intersection_update(self.__db__[tag]) for tag in q.exclude: variables.difference_update(self.__db__[tag]) remove = OrderedSet() add = OrderedSet() for obj in variables: if isinstance(obj, DB): def_sub_query = q if q.extra_optimizations: def_sub_query = copy.copy(q) def_sub_query.extra_optimizations = [] sq = q.subquery.get(obj.name, def_sub_query) replacement = obj.query(sq) replacement.name = obj.name remove.add(obj) add.add(replacement) variables.difference_update(remove) variables.update(add) return variables
class Query(object): """ Parameters ---------- position_cutoff : float Used by SequenceDB to keep only optimizer that are positioned before the cut_off point. """ def __init__(self, include, require=None, exclude=None, subquery=None, position_cutoff=None, extra_optimizations=None): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if extra_optimizations is None: extra_optimizations = [] self.extra_optimizations = extra_optimizations if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude) def __str__(self): return ("Query{inc=%s,ex=%s,require=%s,subquery=%s," "position_cutoff=%d,extra_opts=%s}" % (self.include, self.exclude, self.require, self.subquery, self.position_cutoff, self.extra_optimizations)) def __setstate__(self, state): self.__dict__.update(state) if not hasattr(self, 'extra_optimizations'): self.extra_optimizations = [] # add all opt with this tag def including(self, *tags): return Query(self.include.union(tags), self.require, self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations) # remove all opt with this tag def excluding(self, *tags): return Query(self.include, self.require, self.exclude.union(tags), self.subquery, self.position_cutoff, self.extra_optimizations) # keep only opt with this tag. def requiring(self, *tags): return Query(self.include, self.require.union(tags), self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations) def register(self, *optimizations): return Query(self.include, self.require, self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations + list(optimizations))
class Query: """ Parameters ---------- position_cutoff : float Used by SequenceDB to keep only optimizer that are positioned before the cut_off point. """ def __init__( self, include, require=None, exclude=None, subquery=None, position_cutoff=math.inf, extra_optimizations=None, ): self.include = OrderedSet(include) self.require = require or OrderedSet() self.exclude = exclude or OrderedSet() self.subquery = subquery or {} self.position_cutoff = position_cutoff if extra_optimizations is None: extra_optimizations = [] self.extra_optimizations = extra_optimizations if isinstance(self.require, (list, tuple)): self.require = OrderedSet(self.require) if isinstance(self.exclude, (list, tuple)): self.exclude = OrderedSet(self.exclude) def __str__(self): return ("Query{inc=%s,ex=%s,require=%s,subquery=%s," "position_cutoff=%f,extra_opts=%s}" % ( self.include, self.exclude, self.require, self.subquery, self.position_cutoff, self.extra_optimizations, )) def __setstate__(self, state): self.__dict__.update(state) if not hasattr(self, "extra_optimizations"): self.extra_optimizations = [] # add all opt with this tag def including(self, *tags): return Query( self.include.union(tags), self.require, self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations, ) # remove all opt with this tag def excluding(self, *tags): return Query( self.include, self.require, self.exclude.union(tags), self.subquery, self.position_cutoff, self.extra_optimizations, ) # keep only opt with this tag. def requiring(self, *tags): return Query( self.include, self.require.union(tags), self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations, ) def register(self, *optimizations): return Query( self.include, self.require, self.exclude, self.subquery, self.position_cutoff, self.extra_optimizations + list(optimizations), )
def orderings(self, fgraph): """Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in self.clients[r].items() if not c] root_clients.update([a for a, c in self.clients[r].items() if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval
class DestroyHandler(toolbox.Bookkeeper): # noqa """ The DestroyHandler class detects when a graph is impossible to evaluate because of aliasing and destructive operations. Several data structures are used to do this. An Op can use its view_map property to declare that an output may be aliased to an input. If that output is destroyed, the input is also considered to be destroyed. The view_maps of several Ops can feed into one another and form a directed graph. The consequence of destroying any variable in such a graph is that all variables in the graph must be considered to be destroyed, because they could all be refering to the same underlying storage. In the current implementation, that graph is a tree, and the root of that tree is called the foundation. TODO: why "in the current implementation" ? is there another implementation planned? TODO: why is the graph a tree? isn't it possible that one variable could be aliased to many variables? for example, don't switch and ifelse have to do this? The original DestroyHandler (if 0'ed out above) computed several data structures from scratch each time it was asked to validate the graph. Because this happens potentially thousands of times and each graph to validate is extremely similar to the previous one, computing the data structures from scratch repeatedly was wasteful and resulted in high compile times for large graphs. This implementation computes the data structures once at initialization and then incrementally updates them. It is a work in progress. The following data structures have been converted to use the incremental strategy: <none> The following data structures remain to be converted: <unknown> """ pickle_rm_attr = ["destroyers"] def __init__(self, do_imports_on_attach=True): self.fgraph = None self.do_imports_on_attach = do_imports_on_attach """maps every variable in the graph to its "foundation" (deepest ancestor in view chain) TODO: change name to var_to_vroot""" self.droot = OrderedDict() """maps a variable to all variables that are indirect or direct views of it (including itself) essentially the inverse of droot TODO: do all variables appear in this dict, or only those that are foundations? TODO: do only destroyed variables go in here? one old docstring said so TODO: rename to x_to_views after reverse engineering what x is""" self.impact = OrderedDict() """if a var is destroyed, then this dict will map droot[var] to the apply node that destroyed var TODO: rename to vroot_to_destroyer""" self.root_destroyer = OrderedDict() def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. Give the FunctionGraph instance: 1) A new method "destroyers(var)" TODO: what does this do exactly? 2) A new attribute, "destroy_handler" TODO: WRITEME: what does this do besides the checks? """ # Do the checking # already_there = False if self.fgraph is fgraph: already_there = True if self.fgraph is not None: raise Exception( "A DestroyHandler instance can only serve one" " FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present" " or in conflict with another plugin.") # Annotate the FunctionGraph # self.unpickle(fgraph) fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() # set of Apply instances with non-null destroy_map self.view_i = OrderedDict() # variable -> variable used in calculation self.view_o = OrderedDict() # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = OrderedDict() # variable -> apply -> ninputs self.stale_droot = True self.debug_all_apps = OrderedSet() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph) def unpickle(self, fgraph): def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of def refresh_droot_impact(self): """ Makes sure self.droot, self.impact, and self.root_destroyer are up to date, and returns them. (see docstrings for these properties above) """ if self.stale_droot: droot = OrderedDict() # destroyed view + nonview variables -> foundation impact = OrderedDict() # destroyed nonview variable -> it + all views of it root_destroyer = OrderedDict() # root -> destroyer apply for app in self.destroyers: for output_idx, input_idx_list in iteritems(app.op.destroy_map): if len(input_idx_list) != 1: raise NotImplementedError() input_idx = input_idx_list[0] input = app.inputs[input_idx] input_root = getroot(input, self.view_i) if input_root in droot: raise InconsistencyError( "Multiple destroyers of %s" % input_root) droot[input_root] = input_root root_destroyer[input_root] = app input_impact = get_impact(input_root, self.view_o) for v in input_impact: assert v not in droot droot[v] = input_root impact[input_root] = input_impact impact[input_root].add(input_root) self.droot, self.impact, self.root_destroyer = droot, impact, root_destroyer self.stale_droot = False return self.droot, self.impact, self.root_destroyer def on_detach(self, fgraph): if fgraph is not self.fgraph: raise Exception("detaching wrong fgraph", fgraph) del self.destroyers del self.view_i del self.view_o del self.clients del self.stale_droot assert self.fgraph.destroyer_handler is self delattr(self.fgraph, 'destroyers') delattr(self.fgraph, 'destroy_handler') self.fgraph = None def on_import(self, fgraph, app, reason): """Add Apply instance to set which must be computed""" if app in self.debug_all_apps: raise ProtocolError("double import") self.debug_all_apps.add(app) # print 'DH IMPORT', app, id(app), id(self), len(self.debug_all_apps) # If it's a destructive op, add it to our watch list if getattr(app.op, 'destroy_map', {}): self.destroyers.add(app) # add this symbol to the forward and backward maps for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: raise NotImplementedError( 'destroying this output invalidates multiple inputs', (app. op)) o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] self.view_i[o] = i self.view_o.setdefault(i, OrderedSet()).add(o) # update self.clients for i, input in enumerate(app.inputs): self.clients.setdefault(input, OrderedDict()).setdefault(app, 0) self.clients[input][app] += 1 for i, output in enumerate(app.outputs): self.clients.setdefault(output, OrderedDict()) self.stale_droot = True def on_prune(self, fgraph, app, reason): """Remove Apply instance from set which must be computed""" if app not in self.debug_all_apps: raise ProtocolError("prune without import") self.debug_all_apps.remove(app) # UPDATE self.clients for i, input in enumerate(OrderedSet(app.inputs)): del self.clients[input][app] if getattr(app.op, 'destroy_map', OrderedDict()): self.destroyers.remove(app) # Note: leaving empty client dictionaries in the struct. # Why? It's a pain to remove them. I think they aren't doing any harm, they will be # deleted on_detach(). # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] del self.view_i[o] self.view_o[i].remove(o) if not self.view_o[i]: del self.view_o[i] self.stale_droot = True def on_change_input(self, fgraph, app, i, old_r, new_r, reason): """app.inputs[i] changed from old_r to new_r """ if app == 'output': # app == 'output' is special key that means FunctionGraph is redefining which nodes are being # considered 'outputs' of the graph. pass else: if app not in self.debug_all_apps: raise ProtocolError("change without import") # UPDATE self.clients self.clients[old_r][app] -= 1 if self.clients[old_r][app] == 0: del self.clients[old_r][app] self.clients.setdefault(new_r, OrderedDict()).setdefault(app, 0) self.clients[new_r][app] += 1 # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', OrderedDict())): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() i_idx = i_idx_list[0] output = app.outputs[o_idx] if i_idx == i: if app.inputs[i_idx] is not new_r: raise ProtocolError("wrong new_r on change") self.view_i[output] = new_r self.view_o[old_r].remove(output) if not self.view_o[old_r]: del self.view_o[old_r] self.view_o.setdefault(new_r, OrderedSet()).add(output) self.stale_droot = True def validate(self, fgraph): """Return None Raise InconsistencyError when a) orderings() raises an error b) orderings cannot be topologically sorted. """ if self.destroyers: ords = self.orderings(fgraph) if _contains_cycle(fgraph, ords): raise InconsistencyError("Dependency graph contains cycles") else: # James's Conjecture: # If there are no destructive ops, then there can be no cycles. # FB: This isn't always True. It can happend that # optimization introduce node that depend on itself. This # is very rare and should not happen in general. It will be # caught later. The error will be far from the source. But # doing this conjecture should speed up compilation most of # the time. The user should create such dependency except # if he mess too much with the internal. pass return True def orderings(self, fgraph): """Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in self.clients[r].items() if not c] root_clients.update([a for a, c in self.clients[r].items() if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval
def __query__(self, q): if not isinstance(q, Query): raise TypeError("Expected a Query.", q) # The ordered set is needed for deterministic optimization. variables = OrderedSet() for tag in q.include: variables.update(self.__db__[tag]) for tag in q.require: variables.intersection_update(self.__db__[tag]) for tag in q.exclude: variables.difference_update(self.__db__[tag]) remove = OrderedSet() add = OrderedSet() for obj in variables: if isinstance(obj, DB): def_sub_query = q if q.extra_optimizations: def_sub_query = copy.copy(q) def_sub_query.extra_optimizations = [] sq = q.subquery.get(obj.name, def_sub_query) replacement = obj.query(sq) replacement.name = obj.name remove.add(obj) add.add(replacement) variables.difference_update(remove) variables.update(add) return variables
class DestroyHandler(toolbox.Bookkeeper): """ The DestroyHandler class detects when a graph is impossible to evaluate because of aliasing and destructive operations. Several data structures are used to do this. When an Op uses its view_map property to declare that an output may be aliased to an input, then if that output is destroyed, the input is also considering to be destroyed. The view_maps of several Ops can feed into one another and form a directed graph. The consequence of destroying any variable in such a graph is that all variables in the graph must be considered to be destroyed, because they could all be refering to the same underlying storage. In the current implementation, that graph is a tree, and the root of that tree is called the foundation. The `droot` property of this class maps from every graph variable to its foundation. The `impact` property maps backward from the foundation to all of the variables that depend on it. When any variable is destroyed, this class marks the foundation of that variable as being destroyed, with the `root_destroyer` property. """ droot = {} """ destroyed view + nonview variables -> foundation """ impact = {} """ destroyed nonview variable -> it + all views of it """ root_destroyer = {} """ root -> destroyer apply """ def __init__(self, do_imports_on_attach=True): self.fgraph = None self.do_imports_on_attach = do_imports_on_attach def on_attach(self, fgraph): """ When attaching to a new fgraph, check that 1) This DestroyHandler wasn't already attached to some fgraph (its data structures are only set up to serve one) 2) The FunctionGraph doesn't already have a DestroyHandler. This would result in it validating everything twice, causing compilation to be slower. TODO: WRITEME: what does this do besides the checks? """ # Do the checking # already_there = False if self.fgraph not in [None, fgraph]: raise Exception("A DestroyHandler instance can only serve" " one FunctionGraph. (Matthew 6:24)") for attr in ('destroyers', 'destroy_handler'): if hasattr(fgraph, attr): already_there = True if already_there: # FunctionGraph.attach_feature catches AlreadyThere # and cancels the attachment raise toolbox.AlreadyThere( "DestroyHandler feature is already present or in" " conflict with another plugin.") # end of checking # def get_destroyers_of(r): droot, impact, root_destroyer = self.refresh_droot_impact() try: return [root_destroyer[droot[r]]] except Exception: return [] fgraph.destroyers = get_destroyers_of fgraph.destroy_handler = self self.fgraph = fgraph self.destroyers = OrderedSet() # set of Apply instances with non-null destroy_map self.view_i = {} # variable -> variable used in calculation self.view_o = {} # variable -> set of variables that use this one as a direct input # clients: how many times does an apply use a given variable self.clients = {} # variable -> apply -> ninputs self.stale_droot = True # IG: It's unclear if this is meant to be included in deployed code. It looks like # it is unnecessary if FunctionGraph is working correctly, so I am commenting uses # of it (for speed) but leaving the commented code in place so it is easy to restore # for debugging purposes. # Note: is there anything like the C preprocessor for python? It would be useful to # just ifdef these things out # self.debug_all_apps = set() if self.do_imports_on_attach: toolbox.Bookkeeper.on_attach(self, fgraph) def refresh_droot_impact(self): if self.stale_droot: self.droot, self.impact, self.root_destroyer = self._build_droot_impact() self.stale_droot = False return self.droot, self.impact, self.root_destroyer def _build_droot_impact(self): droot = {} # destroyed view + nonview variables -> foundation impact = {} # destroyed nonview variable -> it + all views of it root_destroyer = {} # root -> destroyer apply for app in self.destroyers: for output_idx, input_idx_list in iteritems(app.op.destroy_map): if len(input_idx_list) != 1: raise NotImplementedError() input_idx = input_idx_list[0] input = app.inputs[input_idx] input_root = getroot(input, self.view_i) if input_root in droot: raise InconsistencyError( "Multiple destroyers of %s" % input_root) droot[input_root] = input_root root_destroyer[input_root] = app # input_impact = set([input_root]) # add_impact(input_root, self.view_o, input_impact) input_impact = get_impact(input_root, self.view_o) for v in input_impact: assert v not in droot droot[v] = input_root impact[input_root] = input_impact impact[input_root].add(input_root) return droot, impact, root_destroyer def on_detach(self, fgraph): if fgraph is not self.fgraph: raise Exception("detaching wrong fgraph", fgraph) del self.destroyers del self.view_i del self.view_o del self.clients del self.stale_droot assert self.fgraph.destroyer_handler is self delattr(self.fgraph, 'destroyers') delattr(self.fgraph, 'destroy_handler') self.fgraph = None def on_import(self, fgraph, app, reason): """Add Apply instance to set which must be computed""" # if app in self.debug_all_apps: raise ProtocolError("double import") # self.debug_all_apps.add(app) # print 'DH IMPORT', app, id(app), id(self), len(self.debug_all_apps) # If it's a destructive op, add it to our watch list if getattr(app.op, 'destroy_map', {}): self.destroyers.add(app) # add this symbol to the forward and backward maps for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: raise NotImplementedError( 'destroying this output invalidates multiple inputs', (app. op)) o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] self.view_i[o] = i self.view_o.setdefault(i, OrderedSet()).add(o) # update self.clients for i, input in enumerate(app.inputs): self.clients.setdefault(input, {}).setdefault(app, 0) self.clients[input][app] += 1 for i, output in enumerate(app.outputs): self.clients.setdefault(output, {}) self.stale_droot = True def on_prune(self, fgraph, app, reason): """Remove Apply instance from set which must be computed""" # if app not in self.debug_all_apps: raise ProtocolError("prune without import") # self.debug_all_apps.remove(app) # UPDATE self.clients for i, input in enumerate(OrderedSet(app.inputs)): del self.clients[input][app] if getattr(app.op, 'destroy_map', {}): self.destroyers.remove(app) # Note: leaving empty client dictionaries in the struct. # Why? It's a pain to remove them. I think they aren't doing any harm, they will be # deleted on_detach(). # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() o = app.outputs[o_idx] i = app.inputs[i_idx_list[0]] del self.view_i[o] self.view_o[i].remove(o) if not self.view_o[i]: del self.view_o[i] self.stale_droot = True def on_change_input(self, fgraph, app, i, old_r, new_r, reason): """app.inputs[i] changed from old_r to new_r """ if app == 'output': # app == 'output' is special key that means FunctionGraph is redefining which nodes are being # considered 'outputs' of the graph. pass else: # if app not in self.debug_all_apps: raise ProtocolError("change without import") # UPDATE self.clients self.clients[old_r][app] -= 1 if self.clients[old_r][app] == 0: del self.clients[old_r][app] self.clients.setdefault(new_r, {}).setdefault(app, 0) self.clients[new_r][app] += 1 # UPDATE self.view_i, self.view_o for o_idx, i_idx_list in iteritems(getattr(app.op, 'view_map', {})): if len(i_idx_list) > 1: # destroying this output invalidates multiple inputs raise NotImplementedError() i_idx = i_idx_list[0] output = app.outputs[o_idx] if i_idx == i: if app.inputs[i_idx] is not new_r: raise ProtocolError("wrong new_r on change") self.view_i[output] = new_r self.view_o[old_r].remove(output) if not self.view_o[old_r]: del self.view_o[old_r] self.view_o.setdefault(new_r, OrderedSet()).add(output) self.stale_droot = True def validate(self, fgraph): """Return None Raise InconsistencyError when a) orderings() raises an error b) orderings cannot be topologically sorted. """ if self.destroyers: ords = self.orderings(fgraph) if _contains_cycle(fgraph, ords): raise InconsistencyError( "Dependency graph contains cycles") else: # James's Conjecture: # If there are no destructive ops, then there can be no cycles. pass return True def orderings(self, fgraph): """Return orderings induced by destructive operations. Raise InconsistencyError when a) attempting to destroy indestructable variable, or b) attempting to destroy a value multiple times, or c) an Apply destroys (illegally) one of its own inputs by aliasing """ rval = OrderedDict() if self.destroyers: # BUILD DATA STRUCTURES # CHECK for multiple destructions during construction of variables droot, impact, __ignore = self.refresh_droot_impact() # check for destruction of constants illegal_destroy = [ r for r in droot if getattr(r.tag, 'indestructible', False) or isinstance(r, graph.Constant)] if illegal_destroy: # print 'destroying illegally' raise InconsistencyError( "Attempting to destroy indestructible variables: %s" % illegal_destroy) # add destroyed variable clients as computational dependencies for app in self.destroyers: # for each destroyed input... for output_idx, input_idx_list in iteritems(app.op.destroy_map): destroyed_idx = input_idx_list[0] destroyed_variable = app.inputs[destroyed_idx] root = droot[destroyed_variable] root_impact = impact[root] # we generally want to put all clients of things which depend on root # as pre-requisites of app. # But, app is itself one such client! # App will always be a client of the node we're destroying # (destroyed_variable, but the tricky thing is when it is also a client of # *another variable* viewing on the root. Generally this is illegal, (e.g., # add_inplace(x, x.T). In some special cases though, the in-place op will # actually be able to work properly with multiple destroyed inputs (e.g, # add_inplace(x, x). An Op that can still work in this case should declare # so via the 'destroyhandler_tolerate_same' attribute or # 'destroyhandler_tolerate_aliased' attribute. # # destroyhandler_tolerate_same should be a list of pairs of the form # [(idx0, idx1), (idx0, idx2), ...] # The first element of each pair is the input index of a destroyed # variable. # The second element of each pair is the index of a different input where # we will permit exactly the same variable to appear. # For example, add_inplace.tolerate_same might be [(0,1)] if the destroyed # input is also allowed to appear as the second argument. # # destroyhandler_tolerate_aliased is the same sort of list of # pairs. # op.destroyhandler_tolerate_aliased = [(idx0, idx1)] tells the # destroyhandler to IGNORE an aliasing between a destroyed # input idx0 and another input idx1. # This is generally a bad idea, but it is safe in some # cases, such as # - the op reads from the aliased idx1 before modifying idx0 # - the idx0 and idx1 are guaranteed not to overlap (e.g. # they are pointed at different rows of a matrix). # # CHECK FOR INPUT ALIASING # OPT: pre-compute this on import tolerate_same = getattr(app.op, 'destroyhandler_tolerate_same', []) assert isinstance(tolerate_same, list) tolerated = OrderedSet(idx1 for idx0, idx1 in tolerate_same if idx0 == destroyed_idx) tolerated.add(destroyed_idx) tolerate_aliased = getattr( app.op, 'destroyhandler_tolerate_aliased', []) assert isinstance(tolerate_aliased, list) ignored = OrderedSet(idx1 for idx0, idx1 in tolerate_aliased if idx0 == destroyed_idx) # print 'tolerated', tolerated # print 'ignored', ignored for i, input in enumerate(app.inputs): if i in ignored: continue if input in root_impact \ and (i not in tolerated or input is not destroyed_variable): raise InconsistencyError("Input aliasing: %s (%i, %i)" % (app, destroyed_idx, i)) # add the rule: app must be preceded by all other Apply instances that # depend on destroyed_input root_clients = OrderedSet() for r in root_impact: assert not [a for a, c in iteritems(self.clients[r]) if not c] root_clients.update([a for a, c in iteritems(self.clients[r]) if c]) root_clients.remove(app) if root_clients: rval[app] = root_clients return rval