class FlowGeom(Polygon): """ A `FlowGeom` is a geometric representation of a flow from a begin node to end node. """ angular_thickness = Field( """ Thickness of the flow, measured in radians """, __default_value__=0.05) network_geom = Field( """ An object that will produce sides of the polygon to be drawn for this `FlowGeom`. """) begin_node_geom = Field( """ The node geometry where this flow begins. """) end_node_geom = Field( """ The node geometry where this flow ends. """) @lazyfield def sides(self): """ Sides of this flow. """ return self.network_geom.get_sides(self)
class Sphere(WithFields): """ Region in 3D """ position_center = Field( """ Three dimensional array providing position of the cube's center. """) radius = Field( """ Radius of the sphere """) closed = Field( """ Is the sphere closed? """, __default_value__=True) @property def volume(self): return (4. / 3.) * math.pi * math.pow(self.radius, 3) @property def bbox(self): return (self.center - self.radius, self.center + self.radius) def contains(self, position): check = np.less_equal if self.closed else np.less return check( np.linalg.norm(position - self.center, axis=0), self.radius)
class AnalysisSpec(WithFields): """ Specify a brain-circuit analysis. """ sample_size = Field(""" Number of individual sample measurements for each set of parameter values. """, __default_value__=100) size_roi = Field(""" Size of ROIs that will be sampled in circuit's physical space. """, __default_value__=50. * np.ones(3)) path_reports = Field(""" Location where the reports will be posted. """, __default_value__=os.path.join( os.getcwd(), "reports")) morphologies_interneurons = Field(""" Interneuron morphologies that are stained by markers. """, __default_value__=[ "BP", "BTC", "CHC", "DB", "LBC", "NBC", "MC", "SBC", "SSC" ]) number_cortical_thickness_bins = Field(""" Number of bins for by depth or by height analyses. """, __default_value__=np.int(50))
class ChartGeometry: """ List of attributes that can be mixed into a `Geometry` to get an object that can be used specifically as a `Chart`'s `Geometry` """ chart = Field(""" The chart object that defines this `NodeGeometry`'s behavior. """) label = Field(""" A string label to track this `ChartGeometry` instance. """) size = Field(""" Size of this `ChartGeometry` --- with data-type determined by its owning `self.chart`. """) position = Field(""" Position of this `ChartGeometry` --- with data-type determined by its owning `self.chart`. """) facecolor = LambdaField( """ Color for this `ChartGeometry`'s face. You may provide a value, otherwise this `ChartGeometry` instance will query it's owning `self.chart`. """, lambda self: self.chart.get_color(self)) def __hash__(self): """ A hash for this `ChartGeometry` instance will be used as a key in mappings. """ return hash(self.label)
class NodeGeom(Polygon): """ Geometry to represent nodes. Nodes will be placed on the circumference of a circle. """ network_geom = Field( """ The network geometry that controls this `NodeGeom`. """) position = Field( """ Position of this node --- type should be determined by the network geometry object. """) thickness = Field( """ Thickness of this node --- type should be the same as position. """) @lazyfield def sides(self): """ Sides to determine where this node geometry should be placed. """ return self.network_geom.get_sides(self)
class Section(WithFields): """ Section in a report. """ title = Field(""" Title of this section. """) content = Field(""" A string contaning text for this `Section` instance. This string may require formatting, and can hence be treated as a template to be filled in... """) def __init__(self, title, content, *args, **kwargs): """ Initialize with content. Arguments --------------- content :: A string that may require formatting. """ super().__init__(*args, title=title, content=content, **kwargs) @classmethod def introduction(cls, content): """...""" return cls("Introduction", content) @classmethod def methods(cls, content): """...""" return cls("Methods", content) @classmethod def results(cls, content): """...""" return cls("Results", content) @classmethod def conclusion(cls, content): """...""" return cls("Conclusion", content) @classmethod def discussion(cls, content): """...""" return cls("Discussion", content) def __call__(self, description_model={}): """ Generate content of this section using the description provided. """ return { "title": self.title, "content": self.content\ .format_map( defaultdict( lambda: "<Not Provided>", description_model))}
class Document(WithFields): """ The root document """ title = Field(""" Title of this document element. """) author = Field(""" Author of this document. """, __default_value__=Author.anonymous) chapters = Field(""" A chapter is any document that may have sections. """, __default_value__=[]) def __init__(self, title, *args, **kwargs): if "parent" not in kwargs: kwargs["parent"] = self for section in self.get_class_sections(): name_section = section.__attr_name__ if name_section in kwargs: try: kwargs[name_section]["parent"] = self except TypeError: pass super().__init__(title=title, *args, **kwargs) for chapter in self.chapters: chapter.parent = self @field def parent(self): """ Parent of this document instance... """ return None @field def sections(self): """ An ordered sequence of elements contained in this document. """ return OrderedDict() @classmethod def get_class_sections(cls): """ Sections defined in this `Document class` Override this if the behavior changes. TODO: Figure out how to introspect attributes to automate... """ return []
class SimpleUniformRandomConnectivity(CircuitConnectivity, WithFields): """ A circuit in which a neuron has a prescribed efferent degree, and is assigned that many randomly chosen efferent neighbors. """ mean_afferent_degree = Field(""" A post-synaptic cell (irrespecitive of it's mtype) will be given a Poisson distributed number of afferent connections with the mean equal to this Field's value. """) mean_synapse_count = Field(""" Mean number of synapses of a connection. """, __default_value__=5) def get_afferent_degree(self, post_synaptic_cell, *args, **kwargs): """ Number of out-going connections of a neuron of given 'mtype'. """ return\ np.random.poisson(self.mean_afferent_degree) def _synapse_count(self, *args, **kwargs): return 1. + np.random.poisson(self.mean_synapse_count) def get_synapse_counts(self, connections): """ ... """ return\ 1. + np.random.poisson( self.mean_synapse_count, size=connections.shape[0]) def get_afferent_gids(self, post_synaptic_cell, cells): """ GIDs of cells afferent on a post-synaptic cell. Arguments ------------- post_gid :: GID of the post_synaptic cell. cells :: pandas.DataFrame containing cells in the circuit """ return\ np.sort(np.random.choice( cells.index.values, self.get_afferent_degree(post_synaptic_cell), replace=False)) def get_afferent_connections(self, post_gid, post_cell, cells): """... """ return pd.DataFrame({ "pre_gid": self.get_afferent_gids(post_cell, cells), "post_gid": post_gid })
class Geometry(ABC, WithFields): """ Base class for Geometries. """ label = Field( """ A label to be displayed. """) children = Field( """ `Geometry`s spawned from this `Geometry`. """, __default_value__=[]) @lazyfield def identifier(self): """ Identifier can be used as a key in a mapping providing features for this `Geometry`. Override the return to specialize... """ return self.label def spawn(self, type_geometry, *args, **kwargs): """ Create a new geometry. """ self.children.append(type_geometry(*args, **kwargs)) return self.children @abstractmethod def points(self, number): """ Points on this `Geometry`'s boundary. """ raise NotImplementedError @abstractmethod def _draw_this(self, *args, **kwargs): """ Draw only this geometry. """ raise NotImplementedError def draw(self, *args, **kwargs): """ Draw this geometry, and all the geometries it has spawned. """ for child in self.children: child.draw(*args, **kwargs) return self._draw_this(*args, **kwargs)
class CircuitAnalysisReport(Report): """ Add some circuit analysis specific attributes to `Report` """ provenance_model = Field(""" Either a `class CircuitProvenance` instance or a dict providing values for the fields of `class CircuitProvenance`. """, __as__=CircuitProvenance) figures = Field(""" A dict mapping label to an object with a `.graphic` and `.caption` attributes. """, __default_value__={}) references = Field(""" References of literature cited in this report. """, __type__=Mapping, __default_value__={}) content = LambdaField( """ All text as a single string. """, lambda self: (self.abstract + self.introduction + self.methods + self.results + self.discussion)) @lazyfield def field_values(self): """...""" try: name_phenomenon = self.phenomenon.name except AttributeError: name_phenomenon = make_name(self.phenomenon, separator="-") return\ dict( circuit=OrderedDict(( ("animal", self.provenance_model.animal), ("age", self.provenance_model.age), ("brain_region", self.provenance_model.brain_region), ("uri", self.provenance_model.uri), ("references", self.references), ("date_release", self.provenance_model.date_release), ("authors", '; '.join( "{}. {}".format(i+1, a) for i, a in enumerate(self.provenance_model.authors))))), author=self.author, phenomenon=name_phenomenon, label=make_label(self.label, separator='-'), title=make_name(self.label, separator='-'), abstract=self.abstract, introduction=self.introduction, methods=self.methods, results=self.results, content=self.content, discussion=self.discussion)
class Cuboid(WithFields): """ A region. """ position_corner_0 = Field( """ A corner of this cuboid. """) position_corner_1 = Field( """ Another corner of this cuboid, diagonally opposite to the other. """) closed = Field( """ Is the geometry closed? """, __default_value__=True) def __init__(self, positions_corner_0, positions_corner_1, closed=False, *args, **kwargs): """...""" super().__init__( *args, position_corner_0=positions_corner_0, position_corner_1=positions_corner_1, closed=closed, **kwargs) @property def volume(self): """ Volume of this cuboid. """ return np.abs(np.prod(self.position_corner_0, self.position_corner_1)) @property def bbox(self): """ A box that bounds this cuboid. """ return (self.position_corner_0, self.position_corner_1) def contains(self, position): """???""" check = np.less_equal if self.closed else np.less return np.all( np.logical_and( check(self.position_corner_0, position), check(self.position_corner_1, position)), axis=1)
class LabelGeometry(WithFields): """ A helper class to handle a label for a geometry """ location = Field(""" Location in the chart plot where the label for this `NodeGeometry` will be written. """) rotation = Field(""" Angle with which to rotate a label annotation in the chart plot. """, __default_value__=0)
class Collector(WithFields): """ Collect a measurement... """ label = Field(""" Single word string to name the measurement column. """, __default_value__="value") sample_size = Field(""" Number of repetitions for each set of parameter values. """, __default_value__=1) @field.cast(Parameters) def parameters(self): """ Parameter sets to measure with. Or a callable that produces such parameters... """ raise FieldIsRequired @field def method(self, *args, **kwargs): """ That makes a measurement. """ raise FieldIsRequired @field def collection(self): """ A policy to collect the measurements over all parameter sets """ return measurement_collection.primitive_type def __call__(self, adapter, model, *args, **kwargs): """ Collect a measurement """ return\ self.collection( (p, self.method(adapter, model, **p, **kwargs)) for p in self.parameters( adapter, model, sample_size=self.sample_size, **kwargs) ).rename( columns={"value": self.label} )
class SynapseCollection(WithFields): """ A collection of synapses that stores synapses. SynapseCollection builds on pandas DataFrame to store data in memory, and provides an efficient secondary index on the stored data. If there are two many synapses to store in memory, SynapseCollection will respond to queries by loading the data from disk. """ adjacency = Field(""" List of 2-tuples holding connected cell gid and synapse count. """) direction = Field(""" Direction of the connections in 'adjacency' data. """)
class SpatialQueryData(WithFields): """ Define data critical to answer spatial queries. """ ids = Field(""" Ids of voxels that passed the spatial query filter. """) positions = Field(""" Physical space positions for the voxel with ids in self.ids. """) cell_gids = Field(""" pandas.Series that provides the gids of all the cells in voxels that passed the spatial query filter, indexed by the corresponding voxel ids. """)
class Connection(WithFields): """ A connection between two cells. A Connection instance will make sense only when defined for a circuit, as cells are tracked by their gid. """ pre_gid = Field(""" gid of the pre-synaptic cell. """) post_gid = Field(""" gid of the post-synaptic cell. """) synapse_count = Field(""" number of synapses in this Connection. """)
class Figure(WithFields): """ A `Figure` is a graphic with a caption. """ graphic = Field(""" A matplotlib figure, or PDF, PNG... """) caption = Field(""" A text to go with the graphic. """, __as__=paragraphs) def __init__(self, figure, caption="No caption provided", *args, **kwargs): """ Initialize with graphic as an argument. `WithField` provides an `__init__` method that accepts initializer arguments as keyword arguments. However, with this `__init__` method we cannot pass the graphic as a keyword argument. We still allow `*args, **kwargs` that will allow `class Figure` to be mixed in some other class. """ try: graphic = figure.graphic except AttributeError: graphic = figure super().__init__(graphic=graphic, caption=caption, *args, **kwargs) def save(self, path, dpi=100): """ Save the figure. """ if isinstance(self.graphic, (str, Path)): shutil.copy(self.graphic, path) return path try: result = self.graphic.savefig(path, dpi=dpi) except AttributeError: try: result = self.graphic.figure.savefig(path, dpi=dpi) except AttributeError: raise TypeError("Figure type {} not supported".format( self.graphic.__class__)) result = None return result
class NodeGeometry(ChartGeometry, Polygon): """ Geometries to represent nodes. """ flow_weight = Field(""" Total weight of connections to or from this `NodeGeometry`, measured in units of link weight. """) flow_positions = Field(""" Position inside the plotted shape of this `NodeGeometry` instance where a flows should start or stop. """, __default_value__=[]) shape = Field(""" Shape defining how this node will be plotted. """)
class CircuitProvenance(WithFields): """ Provenance of a circuit. """ label = Field(""" A label that names the circuit model. """, __default_value__="") authors = Field(""" A list of authors who built the circuit model. """, __default_value__=["Not Available"]) date_release = Field(""" When the circuit model was released in its final form. """, __default_value__="YYYYMMDD") uri = Field(""" URI from where the circuit model can be loaded. """, __default_value__="https://www.example.com") animal = Field(""" The animal whose brain was modeled. """, __default_value__="Not Available") age = Field(""" Age of the animal at which its brain was modeled. """, __default_value__="XYZ Weeks") brain_region = Field(""" Brain region that was modeled. """, __default_value__="Somatosensory Cortex (SSCx)")
class Arc(Circle): """ Part of a circle. """ angle_begin = Field( """ Angle at which the arc starts. """) angle_end = Field( """ Angle at which the arc stops. """) def points(self, number=100): """...""" return self.segment_points( angle_begin=self.angle_begin, angle_end=self.angle_end, number=number)
class Polygon(Geometry): """ Polygon is a sequences of vertices. """ sides = Field( """ Sequence of `Paths` """) facecolor = Field( """ Colors to use for this `Polygon`. """, __default_value__="red") def points(self, number=None): """ Points of a given number does not make sense. """ return [ point for side in self.sides for point in side.vertices] def _draw_this(self, axes=None, *args, **kwargs): """ Draw this polygon. """ if axes is None: axes = plt.gca() patch_polygon =\ PatchPolygon( np.vstack([ side.vertices for side in self.sides])) axes.add_collection( PatchCollection( [patch_polygon], facecolors=(self.facecolor,), edgecolors=("grey",), linewidths=(1,),)) return axes
class FlowGeometry(ChartGeometry): """ Geometry to represent a connection from a begin node to an end node as a flow. """ begin_node = Field(""" The `NodeGeometry` instance where this `FlowGeometry` instance starts. """) end_node = Field(""" The `NodeGeometry` instance where this `FlowGeometry` instance ends. """) size_begin = Field(""" Size of this `FlowGeometry` where it starts. """) size_end = Field(""" Size of this `FlowGeometry` where it ends. """) label = LambdaField( """ Label for this `FlowGeometry` is constructed from it's `begin_node` and `end_node`. A custom value may be provided at initialization. """, lambda self: "({},{})".format(self.begin_node.label, self.end_node .label)) @lazyfield def size(self): """...""" return self.size_begin @lazyfield def identifier(self): """ Identifier can be used as a key in a mapping providing features of this `Geometry`. """ return (self.begin_node.identifier, self.end_node.identifier) @lazyfield def position(self): """ Position of a flow in the chart. """ return (self.begin_node.position, self.end_node.position)
class Interval(WithFields): """ An `Interval` is two floats. """ bottom = Field( """ The bottom of the interval. """) top = Field( """ The top of the interval,. """) @lazyfield def thickness(self): """ Thickness is the length of the this `Interval`. """ return self.top - self.bottom
class QueryDB(WithFields): """ Cache of data associated with a circuit query, that the adapter will use. """ method_to_memoize = Field(""" A callable to get values to cache. """) def __init__(self, method_to_memoize): """...""" super().__init__(method_to_memoize=method_to_memoize) @lazyfield def store(self): return {} @staticmethod def _hashable(query_dict): """...""" return tuple( sorted([(key, make_hashable(value)) for key, value in query_dict.items() if value is not None], key=lambda key_value: key_value[0])) @staticmethod def _hashed(query_dict): """...""" return\ hash(QueryDB._hashable(query_dict)) def __call__(self, circuit_model, query_dict): """ Call Me. """ if circuit_model not in self.store: self.store[circuit_model] = {} cache_circuit_model = self.store[circuit_model] hash_query = self._hashable(query_dict) if hash_query not in cache_circuit_model: cache_circuit_model[hash_query] =\ self.method_to_memoize(circuit_model, query_dict) return cache_circuit_model[hash_query]
class Path(Curve): """ A curve with points on the curve provided explicitly. """, vertices = Field( """ A sequence of 2D vertices that define this `Path`. """) def points(self, number=100): """ Convert vertices to the number of points demanded. """ if number >= len(self.vertices): return self.vertices return self.vertices[ np.random.choice( range(len(self.vertices)), number, replace=False) ]
class DocElem(WithFields, AIBase): """...""" title = Field(""" Title of this document element. """) label = LambdaField( """ A single word tag for this document element. """, lambda self: make_label(self.title)) @field def parent(self): """ Parent `DocElem` that contains this one. """ raise FieldIsRequired @field def children(self): """ A sequence of `DocElem`s that are contained in this one. """ return tuple() def save(self, record, path): """Save this `DocElem`""" try: save_super = super().save except AttributeError: return path return save_super(record, Path(path).joinpath(self.label)) def __call__(self, adapter, model, *args, **kwargs): """...""" try: get_record = super().__call__ except AttributeError: return Record(title=self.title, label=self.label) return get_record(adapter, model, *args, **kwargs).assign(title=self.title, label=self.label)
class Connectome(WithFields): """ The connectome of a circuit. """ cells = Field(""" Instance of cell collection on which this `Connectome` has been defined. """) connections = Field(""" A pandas.DataFrame <pre_gid, post_gid, synapse_count>... """) cache_synapses = Field(""" An object that can cache synapses as they are read. If not provided, synapses will not be cached. """, __required__=False) @classmethod def _get_properties(cls, dataframe, properties): """ Get only columns in list 'properties' from pandas DataFrame. """ return dataframe[properties] if properties else dataframe.index.values @classmethod def empty_synapse_holder(cls): """ A pandas data-frame containing the synapses defining this Connectome. Connectome.synapses will be set lazily. """ return\ pd.DataFrame(dict( pre_gid=np.array([], dtype=np.int32), post_gid=np.array([], dtype=np.int32), synapse_index=np.array([], dtype=np.int32)))\ .set_index( ["pre_gid", "post_gid", "synapse_index"], drop=False) def __init__(self, *args, **kwargs): """ Initialize 1. A pandas data-frame containing the synapses defining this Connectome. ~ Connectome.synapses will be set lazily. 2. Pass on to WithFields for everything else. """ self._synapses =\ self.empty_synapse_holder() self._pre_post_index = { } #to track where pre_gid-->post_gid synapses are super().__init__(*args, **kwargs) @lazyfield def pre_gids(self): return\ np.sort(self.connections.pre_gid.unique()) @lazyfield def post_gids(self): return\ np.sort(self.connections.post_gid.unique()) @lazyfield def gids(self): """ All the gids, loaded lazily """ return\ np.unique(np.concatenate([self.pre_gids, self.post_gids], axis=0)) # @lazyfield # def connections(self): # """ # An array of (pre_gid, post_gid, number_synapses) tuples # """ # LOGGER.info( # LOGGER.get_source_info(), # "Loading connections") # cnxns =\ # pd.DataFrame( # np.array( # [(pre_gid_and_strength[0], post_gid, pre_gid_and_strength[1]) # for post_gid in tqdm(self.gids) # for pre_gid_and_strength in self.afferent_adjacency[post_gid]]), # columns=["pre_synaptic_cell_gid", # "post_synaptic_cell_gid", # "strength"]) # LOGGER.info( # LOGGER.get_source_info(), # "DONE") # return cnxns @lazyfield def afferent_connections(self): """...""" return\ self.connections\ .set_index("post_gid")\ .sort_index() @lazyfield def afferent_adjacency(self): """ List of 2-tuples giving afferent cell gid and synapse count, for each cell in a circuit. We can build other connectome properties from this list. """ raise NotImplementedError @lazyfield def efferent_adjacency(self): """ dict mapping gids to 2-tuples giving efferent cell gid and synapse count, for each cell in a circuit. We will set this field to an empty dict, and fill it lazily. """ raise NotImplementedError @lazyfield def efferent_connections(self): """...""" return\ self.connections\ .set_index("pre_gid")\ .sort_index() @lazyfield def synapse_counts(self): """...""" LOGGER.info("Loading synapse counts.") levels_index = ["pre_gid", "post_gid"] return\ self.connections\ .set_index(levels_index)\ .synapse_count\ .sort_index(level=levels_index) def synapse_properties(self, synapse_ids, properties): """ Synapse properties as pandas DataFrame. Arguments: ~ synapse_ids: array-like of synapse IDs. ~ properties: 'Synapse' property or list of 'Synapse' properties. Return: ~ pandas.Series indexed by synapse IDs if 'properties' is scalar. ~ pandas.DataFrame indexed by synapse IDs if 'properties' is list. """ raise NotImplementedError def synapse_positions(self, synapse_ids, side, kind): """ Synapse positions as pandas.DataFrame Arguments: ~ synapse_ids: array-like of synapse IDs ~ side: 'pre' or 'post' ~ kind: 'center' or 'contour' Return: ~ pandas.DataFrame with ('x', 'y', 'z') columns indexed by synapse IDs. """ raise NotImplementedError def afferent_gids(self, gid): """ All the incoming connected cell gids of the cell with 'gid'. """ return\ self.afferent_connections.loc[gid].pre_gid.values def afferent_synapses(self, gid, properties=None): """ Get afferent synapses for given 'gid'. Arguments: ~ gid: post-synaptic neuron's GID ~ properties: None / 'Synapse' property / list of 'Synapse' properties Return: ~ List of synapse IDs if 'properties' is None; ~ pandas.Series indexed by synapse IDs if 'properties' is scalar; ~ pandas.DataFrame indexed by synapse IDs if 'properties' is list. """ return\ self._get_properties( self._get_synapses(post_gid=gid), properties=properties) @staticmethod def __in_sorted(xs, y): """ Is 'y' in sorted array xs? """ i = np.searchsorted(xs, y) return i < len(xs) and xs[i] == y def efferent_gids(self, gid): """ All the outcoming connected cell gids of the cell with 'gid'. """ return\ self.efferent_connections.loc[gid].post_gid.values # def __contains(xs, y): # """ # Is 'y' in 'xs', given xs is sorted? # """ # i = np.searchsorted(xs, y) # return i < len(xs) and xs[i] == y # if gid not in self.efferent_adjacency: # self.efferent_adjacency[gid] = np.array([ # post_gid for post_gid in self.gids # if self.__in_sorted(self.afferent_gids(post_gid), gid)]) # return self.efferent_adjacency[gid] def efferent_synapses(self, gid, properties=None): """ Get efferent synapses for given 'gid'. Arguments: ~ gid: pre-synaptic neuron's GID ~ properties: None / 'Synapse' property / list of 'Synapse' properties Return: ~ List of synapse IDs if 'properties' is None; ~ pandas.Series indexed by synapse IDs if 'properties' is scalar; ~ pandas.DataFrame indexed by synapse IDs if 'properties' is list. """ return\ self._get_properties( self._get_synapses(pre_gid=gid), properties=properties) def _get_pair_synapse_count(self, pre_gid, post_gid): """ Get number of synapses pre_gid to post_gid. """ return\ self.synapse_counts.loc[(pre_gid, post_gid)] # pre_gids = self.afferent_gids(post_gid) # i = np.searchsorted(pre_gids, pre_gid) # return 0 if i >= len(pre_gids) or pre_gids[i] != pre_gid\ # else self.afferent_adjacency[post_gid][i, 1] def _read_synapses(self, pre_gid=None, post_gid=None): """ Read synapses connecting pre_gid to post_gid. The mock-circuit knows only the pre_gid and post_gid of synapses. We will read other properties when we have implemented them. """ assert not (pre_gid is None and post_gid is None) if post_gid is None: return\ pd.concat([ self._read_synapses(pre_gid, _post_gid) for _post_gid in self.efferent_gids(pre_gid)]) if pre_gid is None: return\ pd.concat([ self._read_synapses(_pre_gid, post_gid) for _pre_gid in self.afferent_gids(post_gid)]) return\ pd.DataFrame({ "pre_gid": pre_gid, "post_gid": post_gid, "synapse_index": np.arange( self._get_pair_synapse_count(pre_gid, post_gid))})\ .set_index( ["pre_gid", "post_gid", "synapse_index"], drop=False) def _get_cached(self, pre_gid=None, post_gid=None): """ Get cached synapses pre_gid --> post_gid """ try: synapse_cache = self.cache_synapses except AttributeError as error: LOGGER.info( "{} instance does not cache synapses: {}."\ .format(self.__class__, error)) raise KeyError( "Synapses {}-->{} not found in cache."\ .format(pre_gid, post_gid)) return synapse_cache.get(pre_gid=pre_gid, post_gid=post_gid) def _cache(self, synapses, pre_gid=None, post_gid=None): """ Save some synapses """ try: synapses_cache = self.cache_synapses except AttributeError as error: LOGGER.info( "{} instance does not cache synapses: {}."\ .format(self.__class__, error)) return None return synapses_cache.append(synapses, pre_gid, post_gid) def _get_synapses(self, pre_gid=None, post_gid=None): """ Get synapses pre_gid --> post_gid. """ try: return self._get_cached(pre_gid, post_gid) except KeyError: LOGGER.info( LOGGER.get_source_info(), "No cached synapses {}==>{}".format( pre_gid if pre_gid is not None else "", post_gid if post_gid is not None else "")) pass synapses_pre_post =\ self._read_synapses(pre_gid=pre_gid, post_gid=post_gid) self._cache(synapses_pre_post, pre_gid=pre_gid, post_gid=post_gid) return synapses_pre_post def pair_synapses(self, pre_gid, post_gid, properties=None): """ Get synapses corresponding to 'pre_gid' --> 'post_gid' connection. Arguments: pre_gid: presynaptic GID post_gid: postsynaprtic GID properties: None / 'Synapse' property / list of 'Synapse' properties Return: ~ List of synapse IDs if 'properties' is None; ~ pandas.Series indexed by synapse IDs if 'properties' is scalar; ~ pandas.DataFrame indexed by synapse IDs if 'properties' is list. """ return\ self._get_properties( self._get_synapses(pre_gid, post_gid), properties=properties) def _filter_connected(self, gid, candidate_gids, direction=terminology.direction.afferent): """ Filter gids from list of 'possible_pre_gids' that make a synapse onto the post-synaptic neuron with given 'gid'. """ connected_gids =\ self.afferent_gids(gid)\ if direction == terminology.bluebrain.afferent else\ self.efferent_gids(gid) return\ candidate_gids[ np.in1d(candidate_gids, connected_gids)] def pathway_synapses(self, pre_gids=np.array([]), post_gids=np.array([]), properties=None): """ Synapses in the pathway {pre_gids} -> {post_gids}. """ if len(pre_gids) == 0 and len(post_gids) == 0: raise NotImplementedError( "There may be too many synapses to handle.") def __stacked(datas): """ Stack either a list of arrays, or dataframes """ if len(datas) == 0: return\ self.empty_synapse_holder() if properties\ else np.array([], np.integer) return (pd.concat if properties else np.hstack)(datas) if len(pre_gids) == 0: return __stacked( [self.afferent_synapses(gid) for gid in post_gids]) if len(post_gids) == 0: return __stacked([self.efferent_synapses(gid) for gid in pre_gids]) return\ __stacked([ self.pair_synapses(_pre_gid, _post_gid, properties=properties) for _post_gid in post_gids for _pre_gid in self._filter_connected(_post_gid, pre_gids)]) def _resolve_gids(self, group_cells): cells_subset = self.cells.get(group_cells) if isinstance(cells_subset, pd.Series): return group_cells return cells_subset.index.values def iter_connections(self, pre=None, post=None, unique_gids=False, shuffle=False, return_synapse_ids=False, return_synapse_count=False): """ Iterate through pre -> post connections. """ result = lambda xyz: xyz if return_synapse_count else xyz[0:2] if pre is None: if post is None: for _, connection in self.connections.iterrows(): yield result((connection.pre_gid, connection.post_gid, connection.synapse_count)) else: for post_gid in self._resolve_gids(post): afferent_connections =\ self.afferent_connections.loc[post_gid] for _, connection in afferent_connections.iterrows(): yield result((connection.pre_gid, post_gid, connection.synapse_count)) elif post is None: for pre_gid in self._resolve_gids(pre): efferent_connections =\ self.efferent_connections.loc[pre_gid] for _, connection in efferent_connections.iterrows(): yield result((pre_gid, connection.post_gid, connection.synapse_count)) else: for pre_gid in tqdm(self._resolve_gids(pre)): efferent_synapse_count =\ self.synapse_counts\ .loc[pre_gid]\ .reindex(self._resolve_gids(post))\ .dropna() for post_gid, synapse_count in efferent_synapse_count.iteritems( ): yield result((pre_gid, post_gid, synapse_count))
class CircularNetworkChart(NetworkChart): """ Illustrate a network's nodes as islands along a circle's periphery, and its edges as rivers flowing between these islands. """ NodeGeometryType = Field(""" A callable that returns a node geometry """, __default_value__=NodeArcGeometry) FlowGeometryType = Field(""" A callable that returns a flow geometry """, __default_value__=FlowArcGeometry) center = Field(""" Position on the page where the center of this `Chart` should lie. """, __default_value__=np.array([0., 0.])) rotation = Field(""" The overall angle in radians, by which the chart will be rotated. """, __default_value__=0.) link_data = Field(""" A `pandas.Series` with a double leveled index (`begin_node`, 'end_node``), with values providing weights / strengths of the links to be displayed as sizes of the flows. """) size = Field(""" Size of the figure. """, __default_value__=12) height_figure = LambdaField(""" Height of the figure. """, lambda self: self.size) width_figure = LambdaField(""" Width of the figure. """, lambda self: self.size) radial_size_node = Field(""" Radial size of a node --- will be the same for all nodes. """, __default_value__=0.1) spacing_factor = Field(""" Fraction of space along the periphery that must be left blank, to space nodes. """, __default_value__=0.25) unit_node_size = LambdaField( """ Node size will be determined as a multiple of this unit size. """, lambda self: 2 * np.pi * (1. - self.spacing_factor)) inner_outer_spacing = LambdaField( """ Spacing from inner to outer circles. """, lambda self: 1. * self.radial_size_node) margin = Field(""" Space (in units of the axes) around the geometries. """, __default_value__=0.5) node_flow_spacing_factor = Field(""" A multiplicative factor ( > 1.) by which flows placed on a node geometry must be spaced. """, __default_value__=1.) fontsize = Field("""...""", __default_value__=24) axes_size = Field(""" Axes will be scaled accordingly. """, __default_value__=1.) color_map = Field(""" Colors for the nodes. Please a provide a `Mapping`, like a dictionary, or pandas Series that maps node labels to the color value with which they should be painted. """, __default_value__={}) def get_color(self, geometry, **kwargs): """ Get colors for a geometry. """ return self.color_map.get(geometry.identifier, "green") @lazyfield def outer_circle(self): """...""" return Circle(label="outer-circle", radius=self.axes_size) @lazyfield def inner_circle(self): """...""" return Circle(label="inner-circle", radius=self.axes_size - self.inner_outer_spacing) @lazyfield def node_geometry_size(self): """ Size of the geometry that will represent a node. In a pandas series of tuples (radial, angular) """ return self.node_weight.apply(lambda weight: (pd.Series({ "total": (self.radial_size_node, self.unit_node_size * weight.total), "source": (self.radial_size_node, self.unit_node_size * weight.source), "target": (self.radial_size_node, self.unit_node_size * weight.target) })), axis=1) @staticmethod def _angular_size(dataframe_size): """...""" try: return dataframe_size.apply(lambda row: pd.Series( dict(total=row.total[1], source=row.source[1], target=row.target[1])), axis=1) except AttributeError: return dataframe_size.apply(lambda row: pd.Series( dict(source=row.source[1], target=row.target[1])), axis=1) raise RuntimeError( "Execution of _angular_size(...) should not reach here.") @lazyfield def node_angular_size(self): """...""" return self._angular_size(self.node_geometry_size) @lazyfield def node_position(self): """ Positions where the nodes will be displayed. """ number_nodes = self.node_weight.shape[0] spacing = 2. * np.pi * self.spacing_factor / number_nodes def _positions_angular(): position_end = -spacing for size in self.node_geometry_size.total.values: position_start = position_end + spacing position_end = position_start + size[1] yield (position_start + position_end) / 2. positions_angular = pd.Series(list(_positions_angular()), index=self.node_geometry_size.index, name="angular") starts_source = ( positions_angular - self.node_angular_size.total / 2.).rename("start_source") positions_angular_source = ( starts_source + self.node_angular_size.source / 2.).rename("angular") positions_source = positions_angular_source.apply( lambda position_angular: (self.outer_circle.radius, position_angular)) starts_target = ( positions_angular_source + self.node_angular_size.source / 2.).rename("start_target") positions_angular_target = ( starts_target + self.node_angular_size.target / 2.).rename("angular") positions_target = positions_angular_target.apply( lambda position_angular: (self.inner_circle.radius, position_angular)) return pd.concat([positions_source, positions_target], axis=1, keys=["source", "target"]) def point_at(self, radius, angle): """...""" return self.center + np.array([ radius * np.sin(angle + self.rotation), radius * np.cos(angle + self.rotation) ]) def arc(self, radius, angle_begin, angle_end, label=""): """...""" if not label: label = "{}---{}".format(angle_begin, angle_end) return Arc(label=label, center=self.center, radius=radius, rotation=self.rotation, angle_begin=angle_begin, angle_end=angle_end).points() def flow_curve(self, radius, angle_begin, angle_end, label=""): """ Curve of a flow. """ angle_mean =\ (angle_begin + angle_end) / 2. angle_min =\ np.minimum(angle_begin, angle_end) angle_off =\ angle_mean - angle_min angle_center =\ np.pi - 2 * angle_off angle_rotation =\ np.pi / 2. - angle_min length =\ radius / np.cos(angle_off) radius_arc =\ radius * np.tan(angle_off) center_arc =\ self.center + np.array([ length * np.sin(angle_mean + self.rotation), length * np.cos(angle_mean + self.rotation)]) if not label: label = "{}==>{}".format(angle_begin, angle_end) rotation =\ self.rotation - angle_rotation\ if angle_begin < angle_end else\ self.rotation - angle_rotation - angle_center angle_end =\ -angle_center\ if angle_begin < angle_end else\ angle_center return Arc(label=label, center=center_arc, radius=radius_arc, rotation=rotation, angle_begin=0., angle_end=angle_end).points() def get_flow_position(self, node_geometry, flow_geometry): """ Where should flow geometry be situated on node geometry. """ assert node_geometry in ( flow_geometry.begin_node, flow_geometry.end_node),\ "{} not in ({}, {})".format( node_geometry.label, flow_geometry.begin_node.label, flow_geometry.end_node.label) flow_size =\ flow_geometry.size_end\ if node_geometry == flow_geometry.end_node else\ flow_geometry.size_begin spaced =\ lambda pos: self.node_flow_spacing_factor * pos start =\ spaced( node_geometry.position.angular - node_geometry.size.angular / 2) if not node_geometry.flow_positions: position = (start, start + flow_size) node_geometry.flow_positions.append((flow_geometry, position)) else: for geometry, position in node_geometry.flow_positions: if geometry == flow_geometry: return position start = spaced(position[1]) position = (start, start + flow_size) node_geometry.flow_positions.append((flow_geometry, position)) return position @lazyfield def flow_geometry_size(self): """...""" def _flow_sizes(node_type): """...""" assert node_type in ("source", "target") if node_type == "source": nodes = self.link_data.index.get_level_values("begin_node") return self.node_angular_size.source.loc[nodes].values * ( self.link_data / self.node_flow.outgoing.loc[nodes].values).rename("begin") else: nodes = self.link_data.index.get_level_values("end_node") return self.node_angular_size.target.loc[nodes].values * ( self.link_data / self.node_flow.incoming.loc[nodes].values).rename("end") raise RuntimeError( "Execution of `flow_data(...)` should not have reached here.") return pd.concat([_flow_sizes("source"), _flow_sizes("target")], axis=1) def draw(self, draw_diagonal=True, *args, **kwargs): """ Draw this `Chart`. """ figure = plt.figure(figsize=(self.size, self.size)) axes = plt.gca() s = self.axes_size + self.margin axes.set(xlim=(-s, s), ylim=(-s, s)) self.outer_circle.draw(*args, **kwargs) self.inner_circle.draw(*args, **kwargs) for node_geometry in self.source_geometries.values(): node_geometry.draw(axes, *args, **kwargs) #node_geometry.add_text(axes, *args, **kwargs) for node_geometry in self.target_geometries.values(): node_geometry.draw(axes, *args, **kwargs) #node_geometry.add_text(axes, *args, **kwargs) for flow_geometry in self.flow_geometries.values(): if (flow_geometry.begin_node == flow_geometry.end_node and not draw_diagonal): continue flow_geometry.draw(axes, *args, **kwargs)
class StructuredAnalysis(Analysis): """ An analysis structured as individual components that each handle an independent responsibility. """ author = Field(""" An object describing the author. """, __default_value__=Author.anonymous) AdapterInterface = Field(""" An class written as a subclass of `InterfaceMeta` that declares and documents the methods required of the adapter by this analysis. """, __type__=InterfaceMeta) default_adapter = Field(""" the adapter to use if none is provided when the analysis is run """, __required__=False) abstract = LambdaField( """ A short description of this analysis. """, lambda self: self.phenomenon.description) introduction = Field( """ A scientific introduction to this analysis. """, __as__=Section.introduction, __default_value__=Section.introduction("Not Provided")) methods = Field(""" A description of the algorithm / procedure used to compute the results, and the experimental measurement reported in this analysis. """, __as__=Section.methods, __default_value__=Section.methods("Not Provided.")) measurement_parameters = Field(""" An object providing a collection of parameters to measure with. This object may be of type: 1. either `pandas.DataFrame`, 2. or `adapter, model -> Collection<MeasurementParameters>`, 3. or dmt.tk.Parameters. """, __as__=Parameters, __default_value__=nothing) sampling_methodology = Field(""" A tag indicating whether this analysis will make measurements on random samples drawn from a relevant population of circuit constituents, or on the entire population. The circuit constituents population to be measured will be determined by a query. """, __default_value__="Not Provided") sample_size = Field(""" Number of samples to measure for each set of the measurement parameters. This field will be relevant when the measurements are made on random samples. When the measurement is exhaustive, the whole population of (relevant) circuit constituents will be measured. """, __default_value__=20) sample_measurement = Field(""" A callable that maps `(adapter, model, **parameters, **customizations) ==> measurement` where parameters : paramters for the measurements customizations : that specify the method used to make a measurement This field may also be implemented as a method in a subclass. """, __required__=False) measurement_collection = Field( """ A callable that will collect measurements passed as an iterable. The default value assumes that the each measurement will return an elemental value such as integer, or floating point number. """, __default_value__=primitive_type_measurement_collection) plotter = Field(""" A class instance or a module that has `plot` method that will be used to plot the results of this analysis. The plotter should know how to interpret the data provided. For example, the plotter will have to know which columns are the x-axis, and which the y-axis. The `Plotter` instance used by this `BrainCircuitAnalysis` instance should have those set as instance attributes. """, __required__=False) stats = Field(""" An object that provides a statistical summary for the measurements made in this analysis. This object may be just a function that takes this analysis' measurements as an argument. """, __as__=Statistics, __default_value__=nothing) verdict = Field(""" An object that provies a verdict on the measurements made in this analysis. This object may be just a function that takes this analysis' measurements as an argument. """, __default_value__=always_pass) results = Field(""" A callable on relevant parameters that will return results for a run of this analysis. """, __as__=Section.results, __default_value__="Results are presented in the figure") conclusion = Field( """ A callable on relevant parameters that will return conclusion for a run of this analysis. """, __as__=Section.conclusion, __default_value__= "Conclusion will be provided after a review of the results.") discussion = Field( """ A callable on relevant parameters that will return conclusion for a run of this analysis. """, __as__=Section.discussion, __default_value__= "Conclusion will be provided after a review of the results.") reference_data = Field(""" A pandas.DataFrame containing reference data to compare with the measurement made on a circuit model. Each dataset in the dataframe must be annotated with index level 'dataset', in addition to levels that make sense for the measurements. """, __default_value__=NOT_PROVIDED) report = Field(""" A callable that will generate a report. The callable should be able to take arguments listed in `get_report(...)` method defined below. """, __default_value__=Report) reporter = Field(""" A class or a module that will report the results of this analysis. It is up to the reporter what kind of report to generate. For example, the report can be a (interactive) webpage, or a static PDF. """, __default_value__=NOT_PROVIDED, __examples__=[Reporter(path_output_folder=os.getcwd())]) Measurement = namedtuple("Measurement", ["method", "dataset", "data"]) # TODO: The methods below are from HD's alpha StructuredAnalysis # This must be refactored... # TODO: this probably should not be public.... def adapter_method(self, adapter=None): """ get the measuremet marked on the AdapterInterface """ measurement_name = self.AdapterInterface.__measurement__ adapter = self.adapter if adapter is None else adapter try: method = getattr(adapter, measurement_name) except AttributeError: method = getattr(adapter, "get_{}".format(measurement_name)) finally: return method @lazyfield def label(self): """ A label for this analysis. """ return "{}_by_{}".format(self.phenomenon.label, '_'.join(self.names_measurement_parameters)) # TODO: parallelize model measuring? def get_model_measurements(self, adapter, model, sample_size=None): """ Get a statistical measurement. """ assert not sample_size or isinstance(sample_size, int),\ "Expected int, received {}".format(type(sample_size)) try: method = self.sample_measurement measurement_method =\ lambda *args, **kwargs: method(adapter, *args, **kwargs) except AttributeError: measurement_method = self.adapter_method(adapter) parameters = self._parameters.for_sampling(adapter, model, size=self.sample_size) # TODO: test parameter order is preserved measurements = make_dataframe_hashable( pd.DataFrame(parameters).assign( **{ self.phenomenon: [measurement_method(model, **p) for p in tqdm(parameters)] })) return measurements @property def number_measurement_parameters(self): """ How many parameters are the measurements made with? For example, if the measurement parameters are region, layer, the number is two. The implementation below uses the implementation of `self.measurement_parameters`. However, if you change the type of that component, you will have to override However, if you change the type of that component, you will have to override. """ return self._parameters.values.shape[1] @property def names_measurement_parameters(self): """ What are the names of the parameters that the measurements are made with? If measurement parameters cannot provide the variables (a.k.a parameter labels or tags), an empty list is returned. """ try: return self._parameters.variables except TypeError: return [] return None @property def phenomenon(self): try: return self.sample_measurement.phenomenon except AttributeError: try: return self.AdapterInterface.phenomenon except AttributeError: return NOT_PROVIDED @property def _parameters(self): if self.measurement_parameters is not NOT_PROVIDED: return\ Parameters(self.measurement_parameters)\ if not isinstance(self.measurement_parameters, Parameters)\ else self.measurement_parameters elif self.reference_data is not NOT_PROVIDED: return\ Parameters( self.reference_data.drop(columns=[self.phenomenon])) else: raise ValueError(""" {} has neither measurement_parameters nor reference_data provide one or the other """.format(self)) def with_fields(self, **kwargs): for field in self.get_fields(): if field not in kwargs: try: kwargs[field] = getattr(self, field) except AttributeError: pass return self.__class__(**kwargs) def validation(self, circuit_model, adapter=None, *args, **kwargs): """ Validation of a model against reference data. """ assert not self.reference_data.empty,\ "Validation needs reference data." if adapter is None: return self((adapter, circuit_model)) else: return self(circuit_model) def _get_report(self, measurements): try: fig = self.plotter(measurements, phenomenon=self.phenomenon) except Exception as e: import traceback fig = """" Plotting failed: {}, {} returning raw measurments""".format(e, traceback.format_exc()) warnings.warn(Warning(fig)) try: len(fig) except TypeError: fig = [fig] # TODO: until field is fixed, this will raise for empty docstrings # TEMPORARY workaround: if self.sample_measurement.__doc__ is None: self.sample_measurement.__doc__ = '' report = Report(figures=fig, measurement=measurements, phenomenon=self.phenomenon, methods=self.sample_measurement.__doc__) report.stats = self.stats(report) report.verdict = self.verdict(report) return report def __call__(self, *models): """ perform an analysis of 'models' """ measurements = OrderedDict() if self.reference_data is not NOT_PROVIDED: measurements[ _label(self.reference_data, default='reference_data')] =\ make_dataframe_hashable(self.reference_data) for i, model in enumerate(models): if isinstance(model, tuple): adapter, model = model else: adapter = self.default_adapter measurements[_label(model, default='model', i=i)] =\ self.get_model_measurements(adapter, model) report = self._get_report(measurements) if self.reporter is not NOT_PROVIDED: self.reporter.post(report) return report
class FlowArcGeometry(ChartGeometry, Polygon): """ Geometry to represent a flow from a begin node to an end node. """ chart = Field(""" The network chart that defines this `NodeGeometry`'s behavior. """) begin_node = Field(""" A `NodeGeometry` instance where this `FlowGeometry` starts. """) end_node = Field(""" A `NodeGeometry` instance where this `FlowGeometry` ends. """) size_begin = Field(""" Size at beginning. """) size_end = Field(""" Size at end. """) label = LambdaField( """ Label can be constructed from nodes. """, lambda self: (self.begin_node.label, self.end_node.label)) @lazyfield def size(self): return self.size_begin @lazyfield def identifier(self): """ Identifier can be used as a key in a mapping providing features for this `Geometry`. """ return (self.begin_node.identifier, self.end_node.identifier) @lazyfield def position(self): """ A flow lies between its begin and end nodes. """ return (self.begin_node.position, self.end_node.position) @lazyfield def sides(self): """...""" arc_begin = self.chart.get_flow_position(self.begin_node, self) begin_base = Path( label=self.label, vertices=self.chart.arc( self.chart.outer_circle.radius, #self.begin_node.shape.radial[0], arc_begin[0], arc_begin[1])) if self.begin_node == self.end_node: side_forward = Path( label=self.label, vertices=[ self.chart.point_at(self.begin_node.shape.radial[0], arc_begin[1]), self.chart.point_at(self.inner_circle.radius, arc_begin[1]) ]) end_base = Path(label=self.label, vertices=self.chart.arc( self.chart.inner_circle.radius, arc_begin[1], arc_begin[0])) side_backward = Path(label=self.label, vertices=[ self.chart.point_at( self.chart.inner_circle.radius, self.chart.point_at( self.begin_node.shape.radial[0], arc_begin[0])) ]) else: arc_end = self.chart.get_flow_position(self.end_node, self) side_forward = Path(label=self.label, vertices=self.chart.flow_curve( self.chart.inner_circle.radius, arc_begin[1], arc_end[0])) end_base = Path(label=self.label, vertices=self.chart.arc( self.chart.inner_circle.radius, arc_end[0], arc_end[1])) side_backward = Path(label=self.label, vertices=self.chart.flow_curve( self.chart.inner_circle.radius, arc_end[1], arc_begin[0])) return [begin_base, side_forward, end_base, side_backward] @lazyfield def curve(self): """ A curve along the middle of this `FlowGeometry` instance. """ arc_begin =\ self.chart.get_flow_position(self.begin_node, self) angle_begin =\ (arc_begin[0] + arc_begin[1]) / 2 if self.begin_node == self.end_node: return Path(label="{}_curve".format(self.label), vertices=[ begin_vertex, self.chart.point_at( self.begin_node.shape.radial[0], angle_begin), self.chart.point_at(self.chart.inner_circle.radius, angle_begin) ]) arc_end =\ self.chart.get_flow_position(self.end_node, self) angle_end =\ (arc_end[0] + arc_end[1]) / 2 vertices =\ self.chart.flow_curve( self.chart.inner_circle.radius, angle_begin, angle_end) return Path(label="{}_curve".format(self.label), vertices=vertices) def draw(self, axes, *args, **kwargs): """ Draw the `Polygon` associated with this `FlowGeometry` instance, and then draw an arrow over it. """ super().draw(*args, **kwargs) #self.curve.draw(*args, **kwargs) N = len(self.curve.vertices) n = -1 #np.int32(-0.1 * N) #arrow_start = self.curve.vertices[n] arc_end =\ self.chart.get_flow_position(self.end_node, self) angle_end =\ (arc_end[0] + arc_end[1]) / 2 arrow_start =\ self.chart.point_at( self.end_node.position.radial - self.end_node.size.radial, angle_end) arrow_direction =\ self.curve.vertices[n] - self.curve.vertices[n-1] arrow_end = arrow_start + arrow_direction arrow_direction = arrow_end - arrow_start color = self.facecolor color[3] = 0.5 # axes.arrow( # arrow_start[0], arrow_start[1], # arrow_direction[0], arrow_direction[1], # head_width=self.size, head_length=self.end_node.size.radial, # fc=color, # ec="gray")#self.facecolor) return axes