コード例 #1
0
class PatternDrivenAnalysis(param.Parameterized):
    """
    Abstract base class for various stimulus-response types of analysis.

    This type of analysis consists of presenting a set of input
    patterns and collecting the responses to each one, which one will
    often want to do in a way that does not affect the current state
    of the network.

    To achieve this, the class defines several types of hooks where
    arbitrary function objects (i.e., callables) can be registered.
    These hooks are generally used to ensure that unrelated previous
    activity is eliminated, that subsequent patterns do not interact,
    and that the initial state is restored after analysis.

    Any subclasses must ensure that these hook lists are run at the
    appropriate stage in their processing, using e.g.
    "for f in some_hook_list: f()".
    """

    __abstract = True

    pre_analysis_session_hooks = param.HookList(default=[],instantiate=False,doc="""
        List of callable objects to be run before an analysis session begins.""")

    pre_presentation_hooks = param.HookList(default=[],instantiate=False,doc="""
        List of callable objects to be run before each pattern is presented.""")

    post_presentation_hooks = param.HookList(default=[],instantiate=False,doc="""
        List of callable objects to be run after each pattern is presented.""")

    post_analysis_session_hooks = param.HookList(default=[],instantiate=False,doc="""
        List of callable objects to be run after an analysis session ends.""")
コード例 #2
0
ファイル: annotation.py プロジェクト: yuhongjiu/holoviews
class DivPlot(BokehPlot, GenericElementPlot, AnnotationPlot):

    height = param.Number(default=300)

    width = param.Number(default=300)

    finalize_hooks = param.HookList(default=[],
                                    doc="""
        Deprecated; use hooks options instead.""")

    hooks = param.HookList(default=[],
                           doc="""
        Optional list of hooks called when finalizing a plot. The
        hook is passed the plot object and the displayed element, and
        other plotting handles can be accessed via plot.handles.""")

    _stream_data = False

    def __init__(self, element, plot=None, **params):
        super(DivPlot, self).__init__(element, **params)
        self.callbacks = []
        self.handles = {} if plot is None else self.handles['plot']
        self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap)

    def get_data(self, element, ranges, style):
        return element.data, {}, style

    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        element = self.hmap.last
        key = self.keys[-1]
        self.current_frame = element
        self.current_key = key

        data, _, _ = self.get_data(element, ranges, {})
        div = BkDiv(text=data, width=self.width, height=self.height)
        self.handles['plot'] = div
        self._execute_hooks(element)
        self.drawn = True
        return div

    def update_frame(self, key, ranges=None, plot=None):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        element = self._get_frame(key)
        text, _, _ = self.get_data(element, ranges, {})
        self.handles['plot'].text = text
コード例 #3
0
class ChannelGenerator(PatternGenerator):
    """
    Abstract base class for patterns supporting multiple channels natively.
    """

    __abstract = True

    channel_transforms = param.HookList(class_=ChannelTransform,
                                        default=[],
                                        doc="""
        Optional functions to apply post processing to the set of channels.""")

    def __init__(self, **params):
        self._original_channel_data = []  # channel data before processing
        self._channel_data = []  # channel data after processing
        super(ChannelGenerator, self).__init__(**params)

    def channels(self, use_cached=False, **params_to_override):
        res = collections.OrderedDict()

        if not use_cached:
            default = self(**params_to_override)
            res['default'] = default
        else:
            res['default'] = None

        for i in range(len(self._channel_data)):
            res[i] = self._channel_data[i]

        return res

    def num_channels(self):
        return len(self._channel_data)
コード例 #4
0
ファイル: base.py プロジェクト: jmosbacher/eve-panel
class EveAuthBase(EveModelBase):
    """Base class for Eve authentication scheme

    Inheritance:
        param.Parameterized:

    """

    post_login_hooks = param.HookList([])

    def get_headers(self):
        """Generate auth headers for HTTP requests.

        Returns:
            dict: Auth related headers to be included in all requests.
        """
        raise NotImplementedError

    def login(self):
        """perform any actions required to aquire credentials.

        Returns:
            bool: whether login was successful
        """
        raise NotImplementedError

    def login_cli(self):
        return self.login()

    def login_notebook(self):
        return self.login()

    def logout(self):
        """perform any actions required to logout.

        Returns:
            bool: whether login was successful
        """
        raise NotImplementedError

    @property
    def logged_in(self):
        raise NotImplementedError

    def set_credentials(self, **credentials):
        """Set the access credentials manually.
        """
        for k, v in credentials.items():
            setattr(self, k, v)

    def credentials_view(self):
        raise NotImplementedError

    def post_login(self):
        for hook in self.post_login_hooks:
            hook(self)
コード例 #5
0
 class _BigDumbParams(param.Parameterized):
     action = param.Action(default_action, allow_None=True)
     array = param.Array(np.array([1.0, 2.0]))
     boolean = param.Boolean(True, allow_None=True)
     callable = param.Callable(default_action, allow_None=True)
     class_selector = param.ClassSelector(int, is_instance=False, allow_None=True)
     color = param.Color("#FFFFFF", allow_None=True)
     composite = param.Composite(["action", "array"], allow_None=True)
     try:
         data_frame = param.DataFrame(
             pd.DataFrame({"A": 1.0, "B": np.arange(5)}), allow_None=True
         )
     except TypeError:
         data_frame = param.DataFrame(pd.DataFrame({"A": 1.0, "B": np.arange(5)}))
     date = param.Date(datetime.now(), allow_None=True)
     date_range = param.DateRange((datetime.min, datetime.max), allow_None=True)
     dict_ = param.Dict({"foo": "bar"}, allow_None=True, doc="dict means dictionary")
     dynamic = param.Dynamic(default=default_action, allow_None=True)
     file_selector = param.FileSelector(
         os.path.join(FILE_DIR_DIR, "LICENSE"),
         path=os.path.join(FILE_DIR_DIR, "*"),
         allow_None=True,
     )
     filename = param.Filename(
         os.path.join(FILE_DIR_DIR, "LICENSE"), allow_None=True
     )
     foldername = param.Foldername(os.path.join(FILE_DIR_DIR), allow_None=True)
     hook_list = param.HookList(
         [CallableObject(), CallableObject()], class_=CallableObject, allow_None=True
     )
     integer = param.Integer(10, allow_None=True)
     list_ = param.List([1, 2, 3], allow_None=True, class_=int)
     list_selector = param.ListSelector([2, 2], objects=[1, 2, 3], allow_None=True)
     magnitude = param.Magnitude(0.5, allow_None=True)
     multi_file_selector = param.MultiFileSelector(
         [],
         path=os.path.join(FILE_DIR_DIR, "*"),
         allow_None=True,
         check_on_set=True,
     )
     number = param.Number(-10.0, allow_None=True, doc="here is a number")
     numeric_tuple = param.NumericTuple((5.0, 10.0), allow_None=True)
     object_selector = param.ObjectSelector(
         False, objects={"False": False, "True": 1}, allow_None=True
     )
     path = param.Path(os.path.join(FILE_DIR_DIR, "LICENSE"), allow_None=True)
     range_ = param.Range((-1.0, 2.0), allow_None=True)
     series = param.Series(pd.Series(range(5)), allow_None=True)
     string = param.String("foo", allow_None=True, doc="this is a string")
     tuple_ = param.Tuple((3, 4, "fi"), allow_None=True)
     x_y_coordinates = param.XYCoordinates((1.0, 2.0), allow_None=True)
コード例 #6
0
ファイル: tabular.py プロジェクト: fmaussion/holoviews
class TablePlot(BokehPlot, GenericElementPlot):

    height = param.Number(default=None)

    finalize_hooks = param.HookList(default=[], doc="""
        Deprecated; use hooks options instead.""")

    hooks = param.HookList(default=[], doc="""
        Optional list of hooks called when finalizing a plot. The
        hook is passed the plot object and the displayed element, and
        other plotting handles can be accessed via plot.handles.""")

    width = param.Number(default=400)

    style_opts = (
        ['row_headers', 'selectable', 'editable',
         'sortable', 'fit_columns', 'scroll_to_selection'] +
        (['index_position'] if bokeh_version >= '0.12.15' else [])
        )

    _stream_data = True

    def __init__(self, element, plot=None, **params):
        super(TablePlot, self).__init__(element, **params)
        self.handles = {} if plot is None else self.handles['plot']
        element_ids = self.hmap.traverse(lambda x: id(x), [Dataset, ItemTable])
        self.static = len(set(element_ids)) == 1 and len(self.keys) == len(self.hmap)
        self.callbacks = self._construct_callbacks()
        self.streaming = [s for s in self.streams if isinstance(s, Buffer)]
        self.static_source = False

    def get_data(self, element, ranges, style):
        return ({dimension_sanitizer(d.name): element.dimension_values(d)
                 for d in element.dimensions()}, {}, style)


    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        element = self.hmap.last
        key = self.keys[-1]
        self.current_frame = element
        self.current_key = key

        style = self.lookup_options(element, 'style')[self.cyclic_index]
        data, _, style = self.get_data(element, ranges, style)
        if source is None:
            source = self._init_datasource(data)
        self.handles['source'] = self.handles['cds'] = source

        columns = self._get_columns(element, data)
        style['reorderable'] = False
        table = DataTable(source=source, columns=columns, height=self.height,
                          width=self.width, **style)
        self.handles['table'] = table
        self.handles['glyph_renderer'] = table
        self._execute_hooks(element)
        self.drawn = True

        for cb in self.callbacks:
            cb.initialize()

        title = self._get_title_div(self.keys[-1], '10pt')
        if title:
            plot = Column(title, table)
            self.handles['title'] = title
        else:
            plot = table
        self.handles['plot'] = plot
        return plot

    def _get_columns(self, element, data):
        columns = []
        for d in element.dimensions():
            col = dimension_sanitizer(d.name)
            kind = data[col].dtype.kind
            if kind == 'i':
                formatter = NumberFormatter()
                editor = IntEditor()
            elif kind == 'f':
                formatter = NumberFormatter(format='0,0.0[00000]')
                editor = NumberEditor()
            elif kind == 'M' or (kind == 'O' and len(data[col]) and type(data[col][0]) in datetime_types):
                dimtype = element.get_dimension_type(col)
                dformat = Dimension.type_formatters.get(dimtype, '%Y-%m-%d %H:%M:%S')
                formatter = DateFormatter(format=dformat)
                editor = DateEditor()
            else:
                formatter = StringFormatter()
                editor = StringEditor()
            column = TableColumn(field=dimension_sanitizer(d.name), title=d.pprint_label,
                                 editor=editor, formatter=formatter)
            columns.append(column)
        return columns


    def update_frame(self, key, ranges=None, plot=None):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        element = self._get_frame(key)
        self._get_title_div(key, '12pt')

        # Cache frame object id to skip updating data if unchanged
        previous_id = self.handles.get('previous_id', None)
        current_id = element._plot_id
        self.handles['previous_id'] = current_id
        self.static_source = (self.dynamic and (current_id == previous_id))
        if (element is None or (not self.dynamic and self.static) or
            (self.streaming and self.streaming[0].data is self.current_frame.data
             and not self.streaming[0]._triggering) or self.static_source):
            return
        source = self.handles['source']
        style = self.lookup_options(element, 'style')[self.cyclic_index]
        data, _, style = self.get_data(element, ranges, style)
        columns = self._get_columns(element, data)
        self.handles['table'].columns = columns
        self._update_datasource(source, data)
コード例 #7
0
class PatternGenerator(param.Parameterized):
    """
    A class hierarchy for callable objects that can generate 2D patterns.

    Once initialized, PatternGenerators can be called to generate a
    value or a matrix of values from a 2D function, typically
    accepting at least x and y.

    A PatternGenerator's Parameters can make use of Parameter's
    precedence attribute to specify the order in which they should
    appear, e.g. in a GUI. The precedence attribute has a nominal
    range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
    (last), but any value is allowed.

    The orientation and layout of the pattern matrices is defined by
    the SheetCoordinateSystem class, which see.

    Note that not every parameter defined for a PatternGenerator will
    be used by every subclass.  For instance, a Constant pattern will
    ignore the x, y, orientation, and size parameters, because the
    pattern does not vary with any of those parameters.  However,
    those parameters are still defined for all PatternGenerators, even
    Constant patterns, to allow PatternGenerators to be scaled, rotated,
    translated, etc. uniformly.
    """
    __abstract = True

    bounds  = BoundingRegionParameter(
        default=BoundingBox(points=((-0.5,-0.5), (0.5,0.5))),precedence=-1,
        doc="BoundingBox of the area in which the pattern is generated.")

    xdensity = param.Number(default=10,bounds=(0,None),precedence=-1,doc="""
        Density (number of samples per 1.0 length) in the x direction.""")

    ydensity = param.Number(default=10,bounds=(0,None),precedence=-1,doc="""
        Density (number of samples per 1.0 length) in the y direction.
        Typically the same as the xdensity.""")

    x = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.20,doc="""
        X-coordinate location of pattern center.""")

    y = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.21,doc="""
        Y-coordinate location of pattern center.""")


    position = param.Composite(attribs=['x','y'],precedence=-1,doc="""
        Coordinates of location of pattern center.
        Provides a convenient way to set the x and y parameters together
        as a tuple (x,y), but shares the same actual storage as x and y
        (and thus only position OR x and y need to be specified).""")

    orientation = param.Number(default=0.0,softbounds=(0.0,2*pi),precedence=0.40,doc="""
        Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
        system, with zero at 3 o'clock and increasing counterclockwise.""")

    size = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0),
        precedence=0.30,doc="""Determines the overall size of the pattern.""")

    scale = param.Number(default=1.0,softbounds=(0.0,2.0),precedence=0.10,doc="""
        Multiplicative strength of input pattern, defaulting to 1.0""")

    offset = param.Number(default=0.0,softbounds=(-1.0,1.0),precedence=0.11,doc="""
        Additive offset to input pattern, defaulting to 0.0""")

    mask = param.Parameter(default=None,precedence=-1,doc="""
        Optional object (expected to be an array) with which to multiply the
        pattern array after it has been created, before any output_fns are
        applied. This can be used to shape the pattern.""")

    # Note that the class type is overridden to PatternGenerator below
    mask_shape = param.ClassSelector(param.Parameterized,default=None,precedence=0.06,doc="""
        Optional PatternGenerator used to construct a mask to be applied to
        the pattern.""")

    output_fns = param.HookList(default=[],class_=TransferFn,precedence=0.08,doc="""
        Optional function(s) to apply to the pattern array after it has been created.
        Can be used for normalization, thresholding, etc.""")


    def __init__(self,**params):
        super(PatternGenerator, self).__init__(**params)
        self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)


    def __call__(self,**params_to_override):
        """
        Call the subclass's 'function' method on a rotated and scaled coordinate system.

        Creates and fills an array with the requested pattern.  If
        called without any params, uses the values for the Parameters
        as currently set on the object. Otherwise, any params
        specified override those currently set on the object.
        """
        p=ParamOverrides(self,params_to_override)

        # CEBERRORALERT: position parameter is not currently
        # supported. We should delete the position parameter or fix
        # this.
        #
        # position=params_to_override.get('position',None) if position
        # is not None: x,y = position

        self._setup_xy(p.bounds,p.xdensity,p.ydensity,p.x,p.y,p.orientation)
        fn_result = self.function(p)
        self._apply_mask(p,fn_result)
        result = p.scale*fn_result+p.offset

        for of in p.output_fns:
            of(result)

        return result


    def _setup_xy(self,bounds,xdensity,ydensity,x,y,orientation):
        """
        Produce pattern coordinate matrices from the bounds and
        density (or rows and cols), and transforms them according to
        x, y, and orientation.
        """
        self.debug(lambda:"bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s"%(bounds,xdensity,ydensity,x,y,orientation))
        # Generate vectors representing coordinates at which the pattern
        # will be sampled.

        # CB: note to myself - use slice_._scs if supplied?
        x_points,y_points = SheetCoordinateSystem(bounds,xdensity,ydensity).sheetcoordinates_of_matrixidx()

        # Generate matrices of x and y sheet coordinates at which to
        # sample pattern, at the correct orientation
        self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(x_points-x,y_points-y,orientation)


    def function(self,p):
        """
        Function to draw a pattern that will then be scaled and rotated.

        Instead of implementing __call__ directly, PatternGenerator
        subclasses will typically implement this helper function used
        by __call__, because that way they can let __call__ handle the
        scaling and rotation for them.  Alternatively, __call__ itself
        can be reimplemented entirely by a subclass (e.g. if it does
        not need to do any scaling or rotation), in which case this
        function will be ignored.
        """
        raise NotImplementedError


    def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
        """
        Create pattern matrices from x and y vectors, and rotate
        them to the specified orientation.
        """
        # Using this two-liner requires that x increase from left to
        # right and y decrease from left to right; I don't think it
        # can be rewritten in so little code otherwise - but please
        # prove me wrong.
        pattern_y = subtract.outer(cos(orientation)*y, sin(orientation)*x)
        pattern_x = add.outer(sin(orientation)*y, cos(orientation)*x)
        return pattern_x, pattern_y


    def _apply_mask(self,p,mat):
        """Create (if necessary) and apply the mask to the given matrix mat."""
        mask = p.mask
        ms=p.mask_shape
        if ms is not None:
            mask = ms(x=p.x+p.size*(ms.x*cos(p.orientation)-ms.y*sin(p.orientation)),
                      y=p.y+p.size*(ms.x*sin(p.orientation)+ms.y*cos(p.orientation)),
                      orientation=ms.orientation+p.orientation,size=ms.size*p.size,
                      bounds=p.bounds,ydensity=p.ydensity,xdensity=p.xdensity)
        if mask is not None:
            mat*=mask


    def set_matrix_dimensions(self, bounds, xdensity, ydensity):
        """
        Change the dimensions of the matrix into which the pattern will be drawn.
        Users of this class should call this method rather than changing
        the bounds, xdensity, and ydensity parameters directly.  Subclasses
        can override this method to update any internal data structures that
        may depend on the matrix dimensions.
        """
        self.bounds = bounds
        self.xdensity = xdensity
        self.ydensity = ydensity
コード例 #8
0
ファイル: sheet.py プロジェクト: apdavison/topographica
class Sheet(EventProcessor,
            SheetCoordinateSystem):  # pylint: disable-msg=W0223
    """
    The generic base class for neural sheets.

    See SheetCoordinateSystem for how Sheet represents space, and
    EventProcessor for how Sheet handles time.

    output_fns are functions that take an activity matrix and produce
    an identically shaped output matrix. The default is having no
    output_fns.
    """
    __abstract = True

    nominal_bounds = BoundingRegionParameter(BoundingBox(radius=0.5),
                                             constant=True,
                                             doc="""
            User-specified BoundingBox of the Sheet coordinate area
            covered by this Sheet.  The left and right bounds--if
            specified--will always be observed, but the top and bottom
            bounds may be adjusted to ensure the density in the y
            direction is the same as the density in the x direction.
            In such a case, the top and bottom bounds are adjusted
            so that the center y point remains the same, and each
            bound is as close as possible to its specified value. The
            actual value of this Parameter is not adjusted, but the
            true bounds may be found from the 'bounds' attribute
            of this object.
            """)

    nominal_density = param.Number(default=10,
                                   constant=True,
                                   doc="""
            User-specified number of processing units per 1.0 distance
            horizontally or vertically in Sheet coordinates. The actual
            number may be different because of discretization; the matrix
            needs to tile the plane exactly, and for that to work the
            density might need to be adjusted.  For instance, an area of 3x2
            cannot have a density of 2 in each direction. The true density
            may be obtained from either the xdensity or ydensity attribute
            (since these are identical for a Sheet).
            """)

    plastic = param.Boolean(True,
                            doc="""
            Setting this to False tells the Sheet not to change its
            permanent state (e.g. any connection weights) based on
            incoming events.
            """)

    precedence = param.Number(default=0.1,
                              softbounds=(0.0, 1.0),
                              doc="""
            Allows a sorting order for Sheets, e.g. in the GUI.""")

    row_precedence = param.Number(default=0.5,
                                  softbounds=(0.0, 1.0),
                                  doc="""
            Allows grouping of Sheets before sorting precedence is
            applied, e.g. for two-dimensional plots in the GUI.""")

    layout_location = param.NumericTuple(default=(-1, -1),
                                         precedence=-1,
                                         doc="""
            Location for this Sheet in an arbitrary pixel-based space
            in which Sheets can be laid out for visualization.""")

    output_fns = param.HookList(
        default=[],
        class_=TransferFn,
        doc=
        "Output function(s) to apply (if apply_output_fns is true) to this Sheet's activity."
    )

    apply_output_fns = param.Boolean(
        default=True,
        doc="Whether to apply the output_fn after computing an Activity matrix."
    )

    def _get_density(self):
        return self.xdensity

    density = property(_get_density,
                       doc="""The sheet's true density (i.e. the
        xdensity, which is equal to the ydensity for a Sheet.)""")

    def __init__(self, **params):
        """
        Initialize this object as an EventProcessor, then also as
        a SheetCoordinateSystem with equal xdensity and ydensity.

        views is an AttrDict, which stores associated measurements,
        i.e. representations of the sheet for use by analysis or plotting
        code.
        """
        EventProcessor.__init__(self, **params)

        # Initialize this object as a SheetCoordinateSystem, with
        # the same density along y as along x.
        SheetCoordinateSystem.__init__(self, self.nominal_bounds,
                                       self.nominal_density)

        n_units = round((self.lbrt[2] - self.lbrt[0]) * self.xdensity, 0)
        if n_units < 1:            raise ValueError(
                "Sheet bounds and density must be specified such that the "+ \
 "sheet has at least one unit in each direction; " \
 +self.name+ " does not.")

        # setup the activity matrix
        self.activity = zeros(self.shape, activity_type)

        # For non-plastic inputs
        self.__saved_activity = []
        self._plasticity_setting_stack = []

        self.views = AttrDict()
        self.views['maps'] = AttrDict()
        self.views['curves'] = AttrDict()

    ### JABALERT: This should be deleted now that sheet_views is public
    ### JC: shouldn't we keep that, or at least write a function in
    ### utils that deletes a value in a dictinnary without returning an
    ### error if the key is not in the dict?  I leave for the moment,
    ### and have to ask Jim to advise.
    def release_sheet_view(self, view_name):
        """
        Delete the dictionary entry with key entry 'view_name' to save
        memory.
        """
        if view_name in self.views.maps:
            del self.views.maps[view_name]

    # CB: what to call this? sheetcoords()? sheetcoords_of_grid()? idxsheetcoords()?
    def sheetcoords_of_idx_grid(self):
        """
        Return an array of x-coordinates and an array of y-coordinates
        corresponding to the activity matrix of the sheet.
        """
        nrows, ncols = self.activity.shape

        C, R = meshgrid(arange(ncols), arange(nrows))

        X, Y = self.matrixidx2sheet(R, C)
        return X, Y

    # CB: check whether we need this function any more.
    def row_col_sheetcoords(self):
        """
        Return an array of Y-coordinates corresponding to the rows of
        the activity matrix of the sheet, and an array of
        X-coordinates corresponding to the columns.
        """
        # The row and column centers are returned in matrix (not
        # sheet) order (hence the reversals below).
        nrows, ncols = self.activity.shape
        return self.matrixidx2sheet(arange(nrows - 1, -1, -1),
                                    arange(ncols))[::-1]

    # CBALERT: to be removed once other code uses
    # row_col_sheetcoords() or sheetcoords_of_idx_grid().
    def sheet_rows(self):
        return self.row_col_sheetcoords()[0]

    def sheet_cols(self):
        return self.row_col_sheetcoords()[1]

    # CEBALERT: haven't really thought about what to put in this. The
    # way it is now, subclasses could make a super.activate() call to
    # avoid repeating some stuff.
    def activate(self):
        """
        Collect activity from each projection, combine it to calculate
        the activity for this sheet, and send the result out.

        Subclasses will need to override this method to whatever it
        means to calculate activity in that subclass.
        """
        if self.apply_output_fns:
            for of in self.output_fns:
                of(self.activity)

        self.send_output(src_port='Activity', data=self.activity)

    def state_push(self):
        """
        Save the current state of this sheet to an internal stack.

        This method is used by operations that need to test the
        response of the sheet without permanently altering its state,
        e.g. for measuring maps or probing the current behavior
        non-invasively.  By default, only the activity pattern of this
        sheet is saved, but subclasses should add saving for any
        additional state that they maintain, or strange bugs are
        likely to occur.  The state can be restored using state_pop().

        Note that Sheets that do learning need not save the
        values of all connection weights, if any, because
        plasticity can be turned off explicitly.  Thus this method
        is intended only for shorter-term state.
        """
        self.__saved_activity.append(array(self.activity))
        EventProcessor.state_push(self)
        for of in self.output_fns:
            if hasattr(of, 'state_push'):
                of.state_push()

    def state_pop(self):
        """
        Pop the most recently saved state off the stack.

        See state_push() for more details.
        """
        self.activity = self.__saved_activity.pop()
        EventProcessor.state_pop(self)
        for of in self.output_fns:
            if hasattr(of, 'state_pop'):
                of.state_pop()

    def activity_len(self):
        """Return the number of items that have been saved by state_push()."""
        return len(self.__saved_activity)

    def override_plasticity_state(self, new_plasticity_state):
        """
        Temporarily override plasticity of medium and long term internal state.

        This function should be implemented by all subclasses so that
        it preserves the ability of the Sheet to compute activity,
        i.e. to operate over a short time scale, while preventing any
        lasting changes to the state (if new_plasticity_state=False).

        Any operation that does not have any lasting state, such as
        those affecting only the current activity level, should not
        be affected by this call.

        By default, simply saves a copy of the plastic flag to an
        internal stack (so that it can be restored by
        restore_plasticity_state()), and then sets plastic to
        new_plasticity_state.
        """
        self._plasticity_setting_stack.append(self.plastic)
        self.plastic = new_plasticity_state

    def restore_plasticity_state(self):
        """
        Restores plasticity of medium and long term internal state after
        a override_plasticity_state call.

        This function should be implemented by all subclasses to
        remove the effect of the most recent override_plasticity_state call,
        i.e. to restore plasticity of any type that was overridden.
        """
        self.plastic = self._plasticity_setting_stack.pop()

    def n_bytes(self):
        """
        Return a lower bound for the memory taken by this sheet, in bytes.

        Typically, this number will include the activity array and any
        similar arrays, plus any other significant data owned (in some
        sense) by this Sheet.  It will not usually include memory
        taken by the Python dictionary or various "housekeeping"
        attributes, which usually contribute only a small amount to
        the memory requirements.

        Subclasses should reimplement this method if they store a
        significant amount of data other than in the activity array.
        """
        return self.activity.nbytes

    def __getitem__(self, coords):
        metadata = AttrDict(precedence=self.precedence,
                            row_precedence=self.row_precedence,
                            timestamp=self.simulation.time())
        return SheetView(self.activity.copy(),
                         self.bounds,
                         title=self.name,
                         metadata=metadata)[coords]
コード例 #9
0
class GPUSparseCFProjection(SparseCFProjection):
    """
    A projection composed of SparseConnectionFields from a Sheet into
    a ProjectionSheet, calculated using a GPU.

    Any subclass has to implement the interface activate(self) that
    computes the response from the input and stores it in the activity
    array.
    """

    cf_type = param.Parameter(default=SparseConnectionField,
                              doc="""
        Type of ConnectionField to use when creating individual CFs.""")

    learning_fn = param.Callable(default=CFPLF_Hebbian_Sparse_GPU,
                                 doc="""
        Function for computing changes to the weights based on one activation step."""
                                 )

    response_fn = param.Callable(default=CFPRF_DotProduct_Sparse_GPU,
                                 doc="""
        Function for computing the Projection response to an input pattern.""")

    weights_output_fns = param.HookList(
        default=[CFPOF_DivisiveNormalizeL1_Sparse_GPU],
        doc="""
        Functions applied to each CF after learning.""")

    initialized = param.Boolean(default=False)

    def __init__(self, **params):
        '''
        Hack-ish way to avoid initialisation until the weights are transfered:
        '''
        should_apply = self.apply_output_fns_init
        params['apply_output_fns_init'] = False

        super(GPUSparseCFProjection, self).__init__(**params)
        # Transfering the weights:
        self.pycuda_stream = cuda.Stream()
        self.weights_gpu = cusparse.CSR.to_CSR(
            self.weights.toSparseArray().transpose())
        # Getting the row and columns indices for the *transposed* matrix. Used for Hebbian learning and normalisation:
        nzcols, nzrows = self.weights.nonzero()
        tups = sorted(zip(nzrows, nzcols))
        nzrows = [x[0] for x in tups]
        nzcols = [x[1] for x in tups]
        '''
        Allocating a page-locked piece of memory for the activity so that GPU could transfer data to the
        main memory without the involvment of the CPU:
        '''
        self.activity = cuda.pagelocked_empty(self.activity.shape, np.float32)
        self.activity_gpu_buffer = gpuarray.zeros(
            shape=(self.weights_gpu.shape[0], ), dtype=np.float32)

        self.input_buffer_pagelocked = cuda.pagelocked_empty(
            shape=(self.weights_gpu.shape[1], ),
            dtype=np.float32,
            mem_flags=cuda.host_alloc_flags.WRITECOMBINED)
        self.input_buffer = gpuarray.zeros(shape=(self.weights_gpu.shape[1], ),
                                           dtype=np.float32)

        self.norm_total_gpu = gpuarray.zeros(
            shape=(self.weights_gpu.shape[0], ), dtype=np.float32)

        # Getting them on the GPU:
        self.nzcount = self.weights.getnnz()
        self.nzrows_gpu = gpuarray.to_gpu(np.array(nzrows, np.int32))
        self.nzcols_gpu = gpuarray.to_gpu(np.array(nzcols, np.int32))
        # Helper array for normalization:
        self.norm_ones_gpu = gpuarray.to_gpu(
            np.array([1.0] * self.weights_gpu.shape[1], np.float32))
        # Kernel that applies the normalisation:
        self.normalize_kernel = ElementwiseKernel(
            "int *nzrows, float *norm_total, float *weights",
            "weights[i] *= norm_total[nzrows[i]]", "divisive_normalize")
        # Kernel that calculates the learning:
        self.hebbian_kernel = ElementwiseKernel(
            "float single_conn_lr, int *row, int *col, float *src_activity, float *dest_activity, float *result",
            "result[i] += single_conn_lr * src_activity[col[i]] * dest_activity[row[i]]",
            "hebbian_learning")

        params['apply_output_fns_init'] = should_apply
        self.apply_output_fns_init = should_apply
        if self.apply_output_fns_init:
            self.apply_learn_output_fns()
コード例 #10
0
ファイル: plot.py プロジェクト: laurentperrinet/holoviews
class Plot(param.Parameterized):
    """
    A Plot object returns either a matplotlib figure object (when
    called or indexed) or a matplotlib animation object as
    appropriate. Plots take element objects such as Image,
    Contours or Points as inputs and plots them in the
    appropriate format. As views may vary over time, all plots support
    animation via the anim() method.
    """

    figure_bounds = param.NumericTuple(default=(0.15, 0.15, 0.85, 0.85),
                                       doc="""
        The bounds of the figure as a 4-tuple of the form
        (left, bottom, right, top), defining the size of the border
        around the subplots.""")

    figure_size = param.NumericTuple(default=(4, 4),
                                     doc="""
        The matplotlib figure size in inches.""")

    finalize_hooks = param.HookList(default=[],
                                    doc="""
        Optional list of hooks called when finalizing an axis.
        The hook is passed the full set of plot handles and the
        displayed object.""")

    latex = param.Boolean(default=False,
                          doc="""
        Whether to use LaTeX text.""")

    normalize = param.Boolean(default=True,
                              doc="""
        Whether to compute ranges across all Elements at this level
        of plotting. Allows selecting normalization at different levels
        for nested data containers.""")

    projection = param.ObjectSelector(default=None,
                                      objects=['3d', 'polar', None],
                                      doc="""
        The projection of the plot axis, default of None is equivalent to
        2D plot, 3D and polar plots are also supported.""")

    rcparams = param.Dict(default={},
                          doc="""
        matplotlib rc parameters to apply to the figure.""")

    size = param.Integer(default=100,
                         bounds=(1, None),
                         doc="""
        Size relative to the supplied figure size in percent.""")

    show_frame = param.Boolean(default=True,
                               doc="""
        Whether or not to show a complete frame around the plot.""")

    show_title = param.Boolean(default=True,
                               doc="""
        Whether to display the plot title.""")

    title_format = param.String(default="{label} {group}",
                                doc="""
        The formatting string for the title of this plot.""")

    # A list of matplotlib keyword arguments that may be supplied via a
    # style options object. Each subclass should override this
    # parameter to list every option that works correctly.
    style_opts = []

    # A mapping from ViewableElement types to their corresponding side plot types
    sideplots = {}

    def __init__(self,
                 figure=None,
                 axis=None,
                 dimensions=None,
                 layout_dimensions=None,
                 subplots=None,
                 uniform=True,
                 keys=None,
                 subplot=False,
                 **params):
        self.subplots = subplots
        self.subplot = figure is not None or subplot
        self.dimensions = dimensions
        self.layout_dimensions = layout_dimensions
        self.keys = keys
        self.uniform = uniform

        self._create_fig = True
        self.drawn = False
        # List of handles to matplotlib objects for animation update
        self.handles = {} if figure is None else {'fig': figure}

        super(Plot, self).__init__(**params)
        size_scale = self.size / 100.
        self.figure_size = (self.figure_size[0] * size_scale,
                            self.figure_size[1] * size_scale)
        self.handles['axis'] = self._init_axis(axis)

    def compute_ranges(self, obj, key, ranges):
        """
        Given an object, a specific key and the normalization options
        this method will find the specified normalization options on
        the appropriate OptionTree, group the elements according to
        the selected normalization option (i.e. either per frame or
        over the whole animation) and finally compute the dimension
        ranges in each group. The new set of ranges is returned.
        """
        all_table = all(
            isinstance(el, Table)
            for el in obj.traverse(lambda x: x, [Element]))
        if obj is None or not self.normalize or all_table:
            return OrderedDict()
        # Get inherited ranges
        ranges = {} if ranges is None else dict(ranges)

        # Get element identifiers from current object and resolve
        # with selected normalization options
        norm_opts = self._get_norm_opts(obj)

        # Traverse displayed object if normalization applies
        # at this level, and ranges for the group have not
        # been supplied from a composite plot
        return_fn = lambda x: x if isinstance(x, Element) else None
        for group, (groupwise, mapwise) in norm_opts.items():
            if group in ranges:
                continue  # Skip if ranges are already computed
            elif mapwise:  # Traverse to get all elements
                elements = obj.traverse(return_fn, [group])
            elif key is not None:  # Traverse to get elements for each frame
                elements = self._get_frame(key).traverse(return_fn, [group])
            if groupwise or (mapwise and isinstance(
                    obj, HoloMap)):  # Compute new ranges
                self._compute_group_range(group, elements, ranges)
        return ranges

    def _get_norm_opts(self, obj):
        """
        Gets the normalization options for a LabelledData object by
        traversing the object for to find elements and their ids.
        The id is then used to select the appropriate OptionsTree,
        accumulating the normalization options into a dictionary.
        Returns a dictionary of normalization options for each
        element in the tree.
        """
        norm_opts = {}

        # Get all elements' type.group.label specs and ids
        type_val_fn = lambda x: (x.id, (type(x).__name__, valid_identifier(x.group),
                                        valid_identifier(x.label))) \
            if isinstance(x, Element) else None
        element_specs = {(idspec[0], idspec[1])
                         for idspec in obj.traverse(type_val_fn)
                         if idspec is not None}

        # Group elements specs by ID and override normalization
        # options sequentially
        key_fn = lambda x: -1 if x[0] is None else x[0]
        id_groups = groupby(sorted(element_specs, key=key_fn), key_fn)
        for gid, element_spec_group in id_groups:
            gid = None if gid == -1 else gid
            group_specs = [el for _, el in element_spec_group]
            optstree = Store.custom_options.get(gid, Store.options)
            # Get the normalization options for the current id
            # and match against customizable elements
            for opts in optstree:
                path = tuple(opts.path.split('.')[1:])
                applies = any(path == spec[:i] for spec in group_specs
                              for i in range(1, 4))
                if applies and 'norm' in opts.groups:
                    nopts = opts['norm'].options
                    if 'groupwise' in nopts or 'mapwise' in nopts:
                        norm_opts.update({
                            path: (opts['norm'].options.get('groupwise', True),
                                   opts['norm'].options.get('mapwise', True))
                        })
        element_specs = [spec for eid, spec in element_specs]
        norm_opts.update({
            spec: (True, True)
            for spec in element_specs
            if not any(spec[1:i] in norm_opts.keys() for i in range(1, 3))
        })
        return norm_opts

    @staticmethod
    def _compute_group_range(group, elements, ranges):
        # Iterate over all elements in a normalization group
        # and accumulate their ranges into the supplied dictionary.
        elements = [el for el in elements if el is not None]
        for el in elements:
            for dim in el.dimensions(label=True):
                dim_range = el.range(dim)
                if group not in ranges: ranges[group] = OrderedDict()
                if dim in ranges[group]:
                    ranges[group][dim] = find_minmax(ranges[group][dim],
                                                     dim_range)
                else:
                    ranges[group][dim] = dim_range

    def _get_frame(self, key):
        """
        Required on each Plot type to get the data corresponding
        just to the current frame out from the object.
        """
        pass

    def _frame_title(self, key, group_size=2):
        """
        Returns the formatted dimension group strings
        for a particular frame.
        """
        if self.layout_dimensions is not None:
            dimensions, key = zip(*self.layout_dimensions.items())
        elif not self.uniform or len(self) == 1 or self.subplot:
            return ''
        else:
            key = key if isinstance(key, tuple) else (key, )
            dimensions = self.dimensions
        dimension_labels = [
            dim.pprint_value_string(k) for dim, k in zip(dimensions, key)
        ]
        groups = [
            ', '.join(dimension_labels[i * group_size:(i + 1) * group_size])
            for i in range(len(dimension_labels))
        ]
        return '\n '.join(g for g in groups if g)

    def _init_axis(self, axis):
        """
        Return an axis which may need to be initialized from
        a new figure.
        """
        if not self.subplot and self._create_fig:
            rc_params = self.rcparams
            if self.latex:
                rc_params['text.usetex'] = True
            with matplotlib.rc_context(rc=rc_params):
                fig = plt.figure()
                self.handles['fig'] = fig
                l, b, r, t = self.figure_bounds
                fig.subplots_adjust(left=l, bottom=b, right=r, top=t)
                fig.set_size_inches(list(self.figure_size))
                axis = fig.add_subplot(111, projection=self.projection)
                axis.set_aspect('auto')

        return axis

    def _finalize_axis(self, key):
        """
        General method to finalize the axis and plot.
        """
        if 'title' in self.handles:
            self.handles['title'].set_visible(self.show_title)

        self.drawn = True
        if self.subplot:
            return self.handles['axis']
        else:
            plt.draw()
            fig = self.handles['fig']
            plt.close(fig)
            return fig

    def __getitem__(self, frame):
        """
        Get the matplotlib figure at the given frame number.
        """
        if frame > len(self):
            self.warning("Showing last frame available: %d" % len(self))
        if not self.drawn: self.handles['fig'] = self()
        self.update_frame(self.keys[frame])
        return self.handles['fig']

    def anim(self, start=0, stop=None, fps=30):
        """
        Method to return a matplotlib animation. The start and stop
        frames may be specified as well as the fps.
        """
        figure = self()
        anim = animation.FuncAnimation(figure,
                                       self.update_frame,
                                       frames=self.keys,
                                       interval=1000.0 / fps)
        # Close the figure handle
        plt.close(figure)
        return anim

    def __len__(self):
        """
        Returns the total number of available frames.
        """
        return len(self.keys)

    def __call__(self, ranges=None):
        """
        Return a matplotlib figure.
        """
        raise NotImplementedError

    def update_frame(self, key, ranges=None):
        """
        Updates the current frame of the plot.
        """
        raise NotImplementedError

    def update_handles(self, axis, view, key, ranges=None):
        """
        Should be called by the update_frame class to update
        any handles on the plot.
        """
        pass
コード例 #11
0
class PlotGroup(param.Parameterized):
    """
    Container that has one or more Plots and also knows how to arrange
    the plots and other special parameters.
    """

    pre_plot_hooks = param.HookList(default=[],doc="""
        Commands to execute before updating this plot, e.g. to calculate sheet views.

        The commands can be any callable Python objects, i.e. any x for
        which x() is valid.  The initial value is determined by the
        template for this plot, but various arguments can be passed, a
        modified version substituted, etc.""")

    plot_hooks = param.HookList(default=[],doc="""
        Commands to execute when redrawing a plot rather than regenerating data.

        E.g, for a plot with data measured once but displayed one
        sheet or unit at at time, this command will be called whenever
        the sheet or coordinate of unit to be plotted (or the
        simulator time) has changed.

        The commands can be any callable Python objects, i.e. any x for
        which x() is valid.  The initial value is determined by the
        template for this plot, but various arguments can be passed, a
        modified version substituted, etc.""")

    # I guess the interface for users of the class (I just mean methods
    # likely to be used) is:
    # make_plots()
    # scale_images()
    # + parameters

    # And the interface for subclasses (I mean methods likely to be
    # overridden) is:
    # _generate_plots()      - return the list of plots
    # _generate_labels()     - return the list of labels
    # _sort_plots()          - sort the list of plots
    # _exec_pre_plot_hooks() - run the pre_plot_hooks
    # _exec_plot_hooks()     - run the plot_hooks


    ##############################
    ########## interface for users

    def make_plots(self,update=True):
        """
        Create and scale the plots, after first executing the PlotGroup's pre_plot_hooks
        (if update is True) and plot_hooks.
        """
        if update:self._exec_pre_plot_hooks()
        self._exec_plot_hooks()
        self._create_images(update)
        self.scale_images()

    def scale_images(self,zoom_factor=None):
        """Scale the images by the given zoom factor, if appropriate; default is to do nothing."""
        pass


    ###################################
    ########## interface for subclasses

    def _generate_plots(self):
        """Return the list of Plots"""
        # subclasses may have dynamically generated Plots to add
        return self._static_plots[:]


    def _generate_labels(self):
        return [plot.label() for plot in self.plots]


    def _sort_plots(self):
        """Sort plots according to their precedence, then alphabetically."""
        self.plots.sort(_cmp_plot)


    def __init__(self,**params):
        super(PlotGroup,self).__init__(**params)
        self._static_plots = []
        self.plots = []
        self.labels = []
        self.time = None
        self.filesaver = PlotGroupSaver(self)

        # In the future, it might be good to be able to specify the
        # plot rows and columns using tuples.  For instance, if three
        # columns are desired with the plots laid out from left to
        # right, we could use (None, 3).  If three rows are desired
        # then (3, None).  Another method that would work is [3,2,4]
        # would have the first row with 3, the second row with 2, the
        # third row with 4, etc.  The default left-to-right ordering
        # in one row could perhaps be represented as (None, Inf).
        #
        # Alternatively, we could add another precedence value, so
        # that the usual precedence value controls where the plot
        # appears left to right, while a rowprecedence value would
        # control where it appears top to bottom.  All plots with the
        # same rowprecedence would go on the same row, and the actual
        # value of the rowprecedence would determine which row goes
        # first.


    def _exec_pre_plot_hooks(self,**kw):
        for f in self.pre_plot_hooks:
            f(**kw)


    def _exec_plot_hooks(self,**kw):
        for f in self.plot_hooks:
            f(**kw)


    # unlikely to be overridden?
    def _create_images(self,update):
        """
        Generate the sorted and scaled list of plots constituting the PlotGroup.
        """
        self.plots = [plot for plot in self._generate_plots() if plot is not None]

        # Suppress plots in the special case of plots not being updated
        # and having no resizable images, to suppress plotgroups that
        # have nothing but a color key
        resizeable_plots = [plot for plot in self.plots if plot.resize]
        if not update and not resizeable_plots:
            self.plots=[]

        # Take the timestamps from the underlying Plots
        timestamps = [plot.timestamp for plot in self.plots
                      if plot.timestamp >= 0]
        if len(timestamps)>0:
            self.time = max(timestamps)
            if max(timestamps) != min(timestamps):
                self.warning("Combining Plots from different times (%s,%s)" %
                             (min(timestamps),max(timestamps)))

        self._sort_plots()
        self.labels = self._generate_labels()
コード例 #12
0
class LISSOM(SettlingCFSheet):
    output_fns = param.HookList(default=[PiecewiseLinear(lower_bound=0.1,upper_bound=0.65)])
コード例 #13
0
ファイル: annotators.py プロジェクト: fagan2888/holoviews
class annotate(param.ParameterizedFunction):
    """
    The annotate function allows drawing, editing and annotating any
    given Element (if it is supported). The annotate function returns
    a Layout of the editable plot and an Overlay of table(s), which
    allow editing the data of the element. The edited and annotated
    data may be accessed using the element and selected properties.
    """

    annotator = param.Parameter(doc="""The current Annotator instance.""")

    annotations = param.ClassSelector(default=[],
                                      class_=(dict, list),
                                      doc="""
        Annotations to associate with each object.""")

    edit_vertices = param.Boolean(default=True,
                                  doc="""
        Whether to add tool to edit vertices.""")

    empty_value = param.Parameter(default=None,
                                  doc="""
        The value to insert on annotation columns when drawing a new
        element.""")

    num_objects = param.Integer(default=None,
                                bounds=(0, None),
                                doc="""
        The maximum number of objects to draw.""")

    show_vertices = param.Boolean(default=True,
                                  doc="""
        Whether to show vertices when drawing the Path.""")

    table_transforms = param.HookList(default=[],
                                      doc="""
        Transform(s) to apply to element when converting data to Table.
        The functions should accept the Annotator and the transformed
        element as input.""")

    table_opts = param.Dict(default={
        'editable': True,
        'width': 400
    },
                            doc="""
        Opts to apply to the editor table(s).""")

    vertex_annotations = param.ClassSelector(default=[],
                                             class_=(dict, list),
                                             doc="""
        Columns to annotate the Polygons with.""")

    vertex_style = param.Dict(default={'nonselection_alpha': 0.5},
                              doc="""
        Options to apply to vertices during drawing and editing.""")

    _annotator_types = OrderedDict()

    @property
    def annotated(self):
        annotated = self.annotator.object
        if Store.current_backend == 'bokeh':
            return annotated.opts(clone=True, tools=['hover'])

    @property
    def selected(self):
        selected = self.annotator.selected
        if Store.current_backend == 'bokeh':
            return selected.opts(clone=True, tools=['hover'])

    @classmethod
    def compose(cls, *annotators):
        """Composes multiple annotator layouts and elements

        The composed Layout will contain all the elements in the
        supplied annotators and an overlay of all editor tables.

        Args:
            annotators: Annotator layouts or elements to compose

        Returns:
            A new layout consisting of the overlaid plots and tables
        """
        layers = []
        tables = []
        for annotator in annotators:
            if isinstance(annotator, Layout):
                l, ts = annotator
                layers.append(l)
                tables += ts
            elif isinstance(annotator, annotate):
                layers.append(annotator.plot)
                tables += [t[0].object for t in annotator.editor]
            elif isinstance(annotator, (HoloMap, ViewableElement)):
                layers.append(annotator)
            else:
                raise ValueError("Cannot compose %s type with annotators." %
                                 type(annotator).__name__)
        tables = Overlay(tables, group='Annotator')
        return (Overlay(layers).collate() + tables)

    def __call__(self, element, **params):
        overlay = element if isinstance(element, Overlay) else [element]

        layers = []
        annotator_type = None
        for element in overlay:
            matches = []
            for eltype, atype in self._annotator_types.items():
                if isinstance(element, eltype):
                    matches.append(
                        (getmro(type(element)).index(eltype), atype))
            if matches:
                if annotator_type is not None:
                    msg = (
                        'An annotate call may only annotate a single element. '
                        'If you want to annotate multiple elements call annotate '
                        'on each one separately and then use the annotate.compose '
                        'method to combine them into a single layout.')
                    raise ValueError(msg)
                annotator_type = sorted(matches)[0][1]
                self.annotator = annotator_type(element, **params)
                tables = Overlay([t[0].object for t in self.annotator.editor],
                                 group='Annotator')
                layout = (self.annotator.plot + tables)
                layers.append(layout)
            else:
                layers.append(element)

        if annotator_type is None:
            obj = overlay if isinstance(overlay, Overlay) else element
            raise ValueError('Could not find an Element to annotate on'
                             '%s object.' % type(obj).__name__)

        if len(layers) == 1:
            return layers[0]
        return self.compose(*layers)
コード例 #14
0
class GPUSparseCFProjection(SparseCFProjection):
    """
    A projection composed of SparseConnectionFields from a Sheet into
    a ProjectionSheet, calculated using a GPU.

    Any subclass has to implement the interface activate(self) that
    computes the response from the input and stores it in the activity
    array.
    """

    cf_type = param.Parameter(default=SparseConnectionField,
                              doc="""
        Type of ConnectionField to use when creating individual CFs.""")

    learning_fn = param.Callable(default=CFPLF_Hebbian_Sparse_GPU,
                                 doc="""
        Function for computing changes to the weights based on one activation step."""
                                 )

    response_fn = param.Callable(default=CFPRF_DotProduct_Sparse_GPU,
                                 doc="""
        Function for computing the Projection response to an input pattern.""")

    weights_output_fns = param.HookList(
        default=[CFPOF_DivisiveNormalizeL1_Sparse_GPU],
        doc="""
        Functions applied to each CF after learning.""")

    initialized = param.Boolean(default=False)

    def __init__(self, **params):
        #Hack-ish way to avoid initialisation until the weights are transfered:
        should_apply = self.apply_output_fns_init
        params['apply_output_fns_init'] = False
        super(GPUSparseCFProjection, self).__init__(**params)
        # The sparse matrix is stored in COO format, used for Hebbian learning and normalisation:
        nzcols, nzrows, values = self.weights.getTriplets()
        tups = sorted(zip(nzrows, nzcols, values))
        nzrows = np.array([x[0] for x in tups], np.int32)
        nzcols = np.array([x[1] for x in tups], np.int32)
        values = np.array([x[2] for x in tups], np.float32)
        # Getting them on the GPU:
        self.nzcount = self.weights.getnnz()
        self.nzrows_gpu = gpuarray.to_gpu(nzrows)
        self.nzcols_gpu = gpuarray.to_gpu(nzcols)
        # Setting the projection weights in CSR format for dot product calculation:
        rowPtr = cusparse.coo2csr(self.nzrows_gpu, self.weights.shape[1])
        descrA = cusparse.cusparseCreateMatDescr()
        cusparse.cusparseSetMatType(descrA,
                                    cusparse.CUSPARSE_MATRIX_TYPE_GENERAL)
        cusparse.cusparseSetMatIndexBase(descrA,
                                         cusparse.CUSPARSE_INDEX_BASE_ZERO)

        self.weights_gpu = cusparse.CSR(
            descrA, values, rowPtr, self.nzcols_gpu,
            (self.weights.shape[1], self.weights.shape[0]))
        # Allocating a page-locked piece of memory for the activity so that GPU could transfer data to the
        # main memory without the involvment of the CPU:
        self.activity = cuda.pagelocked_empty(self.activity.shape, np.float32)
        self.activity_gpu_buffer = gpuarray.zeros(
            shape=(self.weights_gpu.shape[0], ), dtype=np.float32)

        self.input_buffer_pagelocked = cuda.pagelocked_empty(
            shape=(self.weights_gpu.shape[1], ),
            dtype=np.float32,
            mem_flags=cuda.host_alloc_flags.WRITECOMBINED)
        self.input_buffer = gpuarray.zeros(shape=(self.weights_gpu.shape[1], ),
                                           dtype=np.float32)

        self.norm_total_gpu = gpuarray.zeros(
            shape=(self.weights_gpu.shape[0], ), dtype=np.float32)
        # Helper array for normalization:
        self.norm_ones_gpu = gpuarray.to_gpu(
            np.array([1.0] * self.weights_gpu.shape[1], np.float32))
        # Kernel that applies the normalisation:
        self.normalize_kernel = ElementwiseKernel(
            "int *nzrows, float *norm_total, float *weights",
            "weights[i] *= norm_total[nzrows[i]]", "divisive_normalize")
        # Kernel that calculates the learning:
        self.hebbian_kernel = ElementwiseKernel(
            "float single_conn_lr, int *row, int *col, float *src_activity, float *dest_activity, float *result",
            "result[i] += single_conn_lr * src_activity[col[i]] * dest_activity[row[i]]",
            "hebbian_learning")
        self.pycuda_stream = cuda.Stream()
        # Finishing the initialisation that might have been delayed:
        params['apply_output_fns_init'] = should_apply
        self.apply_output_fns_init = should_apply
        if self.apply_output_fns_init:
            self.apply_learn_output_fns()
コード例 #15
0
ファイル: element.py プロジェクト: eddy-ojb/holoviews
class ElementPlot(BokehPlot, GenericElementPlot):

    bgcolor = param.Parameter(default='white', doc="""
        Background color of the plot.""")

    border = param.Number(default=10, doc="""
        Minimum border around plot.""")

    finalize_hooks = param.HookList(default=[], doc="""
        Optional list of hooks called when finalizing an axis.
        The hook is passed the plot object and the displayed
        object, other plotting handles can be accessed via plot.handles.""")

    fontsize = param.Parameter(default={'title': '12pt'}, allow_None=True,  doc="""
       Specifies various fontsizes of the displayed text.

       Finer control is available by supplying a dictionary where any
       unmentioned keys reverts to the default sizes, e.g:

          {'ticks': '20pt', 'title': '15pt', 'ylabel': '5px', 'xlabel': '5px'}""")

    invert_axes = param.Boolean(default=False, doc="""
        Whether to invert the x- and y-axis""")

    invert_xaxis = param.Boolean(default=False, doc="""
        Whether to invert the plot x-axis.""")

    invert_yaxis = param.Boolean(default=False, doc="""
        Whether to invert the plot y-axis.""")

    labelled = param.List(default=['x', 'y'], doc="""
        Whether to plot the 'x' and 'y' labels.""")

    lod = param.Dict(default={'factor': 10, 'interval': 300,
                              'threshold': 2000, 'timeout': 500}, doc="""
        Bokeh plots offer "Level of Detail" (LOD) capability to
        accomodate large (but not huge) amounts of data. The available
        options are:

          * factor    - Decimation factor to use when applying
                        decimation.
          * interval  - Interval (in ms) downsampling will be enabled
                        after an interactive event.
          * threshold - Number of samples before downsampling is enabled.
          * timeout   - Timeout (in ms) for checking whether interactive
                        tool events are still occurring.""")

    show_frame = param.Boolean(default=True, doc="""
        Whether or not to show a complete frame around the plot.""")

    show_grid = param.Boolean(default=True, doc="""
        Whether to show a Cartesian grid on the plot.""")

    show_legend = param.Boolean(default=True, doc="""
        Whether to show legend for the plot.""")

    shared_axes = param.Boolean(default=True, doc="""
        Whether to invert the share axes across plots
        for linked panning and zooming.""")

    default_tools = param.List(default=['save', 'pan', 'wheel_zoom',
                                        'box_zoom', 'reset'],
        doc="A list of plugin tools to use on the plot.")

    tools = param.List(default=[], doc="""
        A list of plugin tools to use on the plot.""")

    toolbar = param.ObjectSelector(default='right',
                                   objects=["above", "below",
                                            "left", "right", None],
                                   doc="""
        The toolbar location, must be one of 'above', 'below',
        'left', 'right', None.""")

    xaxis = param.ObjectSelector(default='bottom',
                                 objects=['top', 'bottom', 'bare', 'top-bare',
                                          'bottom-bare', None], doc="""
        Whether and where to display the xaxis, bare options allow suppressing
        all axis labels including ticks and xlabel. Valid options are 'top',
        'bottom', 'bare', 'top-bare' and 'bottom-bare'.""")

    logx = param.Boolean(default=False, doc="""
        Whether the x-axis of the plot will be a log axis.""")

    xrotation = param.Integer(default=None, bounds=(0, 360), doc="""
        Rotation angle of the xticks.""")

    xticks = param.Parameter(default=None, doc="""
        Ticks along x-axis specified as an integer, explicit list of
        tick locations or bokeh Ticker object. If set to None default
        bokeh ticking behavior is applied.""")

    yaxis = param.ObjectSelector(default='left',
                                      objects=['left', 'right', 'bare', 'left-bare',
                                               'right-bare', None], doc="""
        Whether and where to display the yaxis, bare options allow suppressing
        all axis labels including ticks and ylabel. Valid options are 'left',
        'right', 'bare' 'left-bare' and 'right-bare'.""")

    logy = param.Boolean(default=False, doc="""
        Whether the y-axis of the plot will be a log axis.""")

    yrotation = param.Integer(default=None, bounds=(0, 360), doc="""
        Rotation angle of the yticks.""")

    yticks = param.Parameter(default=None, doc="""
        Ticks along y-axis specified as an integer, explicit list of
        tick locations or bokeh Ticker object. If set to None
        default bokeh ticking behavior is applied.""")

    # The plot objects to be updated on each frame
    # Any entries should be existing keys in the handles
    # instance attribute.
    _update_handles = ['source', 'glyph']
    _categorical = False

    def __init__(self, element, plot=None, **params):
        self.current_ranges = None
        super(ElementPlot, self).__init__(element, **params)
        self.handles = {} if plot is None else self.handles['plot']
        self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap)
        self.callbacks = self._construct_callbacks()
        self.static_source = False


    def _construct_callbacks(self):
        """
        Initializes any callbacks for streams which have defined
        the plotted object as a source.
        """
        if not self.static or isinstance(self.hmap, DynamicMap):
            sources = [(i, o) for i, o in get_sources(self.hmap)
                       if i in [None, self.zorder]]
        else:
            sources = [(self.zorder, self.hmap.last)]
        cb_classes = set()
        for _, source in sources:
            streams = Stream.registry.get(id(source), [])
            registry = Stream._callbacks['bokeh']
            cb_classes |= {(registry[type(stream)], stream) for stream in streams
                           if type(stream) in registry and streams}
        cbs = []
        sorted_cbs = sorted(cb_classes, key=lambda x: id(x[0]))
        for cb, group in groupby(sorted_cbs, lambda x: x[0]):
            cb_streams = [s for _, s in group]
            cbs.append(cb(self, cb_streams, source))
        return cbs

    def _hover_opts(self, element):
        if self.batched:
            dims = list(self.hmap.last.kdims)
        else:
            dims = list(self.overlay_dims.keys())
        dims += element.dimensions()
        return list(util.unique_iterator(dims)), {}

    def _init_tools(self, element, callbacks=[]):
        """
        Processes the list of tools to be supplied to the plot.
        """
        tooltips, hover_opts = self._hover_opts(element)
        tooltips = [(ttp.pprint_label, '@'+util.dimension_sanitizer(ttp.name))
                    if isinstance(ttp, Dimension) else ttp for ttp in tooltips]

        callbacks = callbacks+self.callbacks
        cb_tools, tool_names = [], []
        for cb in callbacks:
            for handle in cb.handles:
                if handle and handle in known_tools:
                    tool_names.append(handle)
                    if handle == 'hover':
                        tool = HoverTool(tooltips=tooltips, **hover_opts)
                    else:
                        tool = known_tools[handle]()
                    cb_tools.append(tool)
                    self.handles[handle] = tool

        tools = [t for t in cb_tools + self.default_tools + self.tools
                 if t not in tool_names]
        if 'hover' in tools:
            tools[tools.index('hover')] = HoverTool(tooltips=tooltips, **hover_opts)
        return tools


    def _get_hover_data(self, data, element, empty=False):
        """
        Initializes hover data based on Element dimension values.
        If empty initializes with no data.
        """
        if not any(isinstance(t, HoverTool) for t in self.state.tools):
            return

        for d in element.dimensions():
            dim = util.dimension_sanitizer(d.name)
            if dim not in data:
                data[dim] = element.dimension_values(d)

        for k, v in self.overlay_dims.items():
            dim = util.dimension_sanitizer(k.name)
            if dim not in data:
                data[dim] = [v for _ in range(len(list(data.values())[0]))]


    def _axes_props(self, plots, subplots, element, ranges):
        # Get the bottom layer and range element
        el = element.traverse(lambda x: x, [Element])
        el = el[0] if el else element

        dims = el.dimensions()
        xlabel, ylabel, zlabel = self._get_axis_labels(dims)
        if self.invert_axes:
            xlabel, ylabel = ylabel, xlabel

        plot_ranges = {}
        # Try finding shared ranges in other plots in the same Layout
        if plots and self.shared_axes:
            for plot in plots:
                if plot is None or not hasattr(plot, 'xaxis'): continue
                if plot.xaxis[0].axis_label == xlabel:
                    plot_ranges['x_range'] = plot.x_range
                if plot.xaxis[0].axis_label == ylabel:
                    plot_ranges['y_range'] = plot.x_range
                if plot.yaxis[0].axis_label == ylabel:
                    plot_ranges['y_range'] = plot.y_range
                if plot.yaxis[0].axis_label == xlabel:
                    plot_ranges['x_range'] = plot.y_range

        if el.get_dimension_type(0) in util.datetime_types:
            x_axis_type = 'datetime'
        else:
            x_axis_type = 'log' if self.logx else 'auto'

        if len(dims) > 1 and el.get_dimension_type(1) in util.datetime_types:
            y_axis_type = 'datetime'
        else:
            y_axis_type = 'log' if self.logy else 'auto'

        # Get the Element that determines the range and get_extents
        range_el = el if self.batched and not isinstance(self, OverlayPlot) else element
        l, b, r, t = self.get_extents(range_el, ranges)
        if self.invert_axes:
            l, b, r, t = b, l, t, r

        categorical = any(self.traverse(lambda x: x._categorical))
        categorical_x = any(isinstance(x, util.basestring) for x in (l, r))
        categorical_y = any(isinstance(y, util.basestring) for y in (b, t))

        if categorical or categorical_x:
            x_axis_type = 'auto'
            plot_ranges['x_range'] = FactorRange()
        elif 'x_range' not in plot_ranges:
            plot_ranges['x_range'] = Range1d()

        if categorical or categorical_y:
            y_axis_type = 'auto'
            plot_ranges['y_range'] = FactorRange()
        elif 'y_range' not in plot_ranges:
            plot_ranges['y_range'] = Range1d()

        return (x_axis_type, y_axis_type), (xlabel, ylabel, zlabel), plot_ranges


    def _init_plot(self, key, element, plots, ranges=None):
        """
        Initializes Bokeh figure to draw Element into and sets basic
        figure and axis attributes including axes types, labels,
        titles and plot height and width.
        """
        subplots = list(self.subplots.values()) if self.subplots else []

        axis_types, labels, plot_ranges = self._axes_props(plots, subplots, element, ranges)
        xlabel, ylabel, _ = labels
        x_axis_type, y_axis_type = axis_types
        properties = dict(plot_ranges)
        properties['x_axis_label'] = xlabel if 'x' in self.labelled else ' '
        properties['y_axis_label'] = ylabel if 'y' in self.labelled else ' '

        if not self.show_frame:
            properties['outline_line_alpha'] = 0

        if self.show_title:
            title = self._format_title(key, separator=' ')
        else:
            title = ''

        if self.toolbar:
            tools = self._init_tools(element)
            properties['tools'] = tools
            properties['toolbar_location'] = self.toolbar

        properties['webgl'] = Store.renderers[self.renderer.backend].webgl
        return bokeh.plotting.Figure(x_axis_type=x_axis_type,
                                     y_axis_type=y_axis_type, title=title,
                                     **properties)


    def _plot_properties(self, key, plot, element):
        """
        Returns a dictionary of plot properties.
        """
        size_multiplier = Store.renderers[self.renderer.backend].size/100.
        plot_props = dict(plot_height=int(self.height*size_multiplier),
                          plot_width=int(self.width*size_multiplier))
        if bokeh_version < '0.12':
            plot_props.update(self._title_properties(key, plot, element))
        if self.bgcolor:
            plot_props['background_fill_color'] = self.bgcolor
        if self.border is not None:
            for p in ['left', 'right', 'top', 'bottom']:
                plot_props['min_border_'+p] = self.border
        lod = dict(self.defaults().get('lod', {}), **self.lod)
        for lod_prop, v in lod.items():
            plot_props['lod_'+lod_prop] = v
        return plot_props


    def _title_properties(self, key, plot, element):
        if self.show_title:
            title = self._format_title(key, separator=' ')
        else:
            title = ''

        if bokeh_version < '0.12':
            title_font = self._fontsize('title', 'title_text_font_size')
            return dict(title=title, title_text_color='black', **title_font)
        else:
            opts = dict(text=title, text_color='black')
            title_font = self._fontsize('title').get('fontsize')
            if title_font:
                opts['text_font_size'] = value(title_font)
            return opts

    def _init_axes(self, plot):
        if self.xaxis is None:
            plot.xaxis.visible = False
        elif 'top' in self.xaxis:
            plot.above = plot.below
            plot.below = []
            plot.xaxis[:] = plot.above
        self.handles['xaxis'] = plot.xaxis[0]
        self.handles['x_range'] = plot.x_range

        if self.yaxis is None:
            plot.yaxis.visible = False
        elif 'right' in self.yaxis:
            plot.right = plot.left
            plot.left = []
            plot.yaxis[:] = plot.right
        self.handles['yaxis'] = plot.yaxis[0]
        self.handles['y_range'] = plot.y_range


    def _axis_properties(self, axis, key, plot, dimension=None,
                         ax_mapping={'x': 0, 'y': 1}):
        """
        Returns a dictionary of axis properties depending
        on the specified axis.
        """
        axis_props = {}
        if ((axis == 'x' and self.xaxis in ['bottom-bare', 'top-bare']) or
            (axis == 'y' and self.yaxis in ['left-bare', 'right-bare'])):
            axis_props['axis_label'] = ''
            axis_props['major_label_text_font_size'] = value('0pt')
            axis_props['major_tick_line_color'] = None
            axis_props['minor_tick_line_color'] = None
        else:
            labelsize = self._fontsize('%slabel' % axis).get('fontsize')
            if labelsize:
                axis_props['axis_label_text_font_size'] = labelsize
            ticksize = self._fontsize('%sticks' % axis, common=False).get('fontsize')
            if ticksize:
                axis_props['major_label_text_font_size'] = value(ticksize)
            rotation = self.xrotation if axis == 'x' else self.yrotation
            if rotation:
                axis_props['major_label_orientation'] = np.radians(rotation)
            ticker = self.xticks if axis == 'x' else self.yticks
            if isinstance(ticker, Ticker):
                axis_props['ticker'] = ticker
            elif isinstance(ticker, int):
                axis_props['ticker'] = BasicTicker(desired_num_ticks=ticker)
            elif isinstance(ticker, (tuple, list)):
                if all(isinstance(t, tuple) for t in ticker):
                    pass
                else:
                    axis_props['ticker'] = FixedTicker(ticks=ticker)

        if FuncTickFormatter is not None and ax_mapping and dimension:
            formatter = None
            if dimension.value_format:
                formatter = dimension.value_format
            elif dimension.type in dimension.type_formatters:
                formatter = dimension.type_formatters[dimension.type]
            if formatter:
                msg = ('%s dimension formatter could not be '
                       'converted to tick formatter. ' % dimension.name)
                jsfunc = py2js_tickformatter(formatter, msg)
                if jsfunc:
                    formatter = FuncTickFormatter(code=jsfunc)
                    axis_props['formatter'] = formatter
        return axis_props


    def _update_plot(self, key, plot, element=None):
        """
        Updates plot parameters on every frame
        """
        el = element.traverse(lambda x: x, [Element])
        dimensions = el[0].dimensions() if el else el.dimensions()
        if not len(dimensions) >= 2:
            dimensions = dimensions+[None]
        plot.update(**self._plot_properties(key, plot, element))
        props = {axis: self._axis_properties(axis, key, plot, dim)
                 for axis, dim in zip(['x', 'y'], dimensions)}
        plot.xaxis[0].update(**props.get('x', {}))
        plot.yaxis[0].update(**props.get('y', {}))

        if bokeh_version >= '0.12' and not self.overlaid:
            plot.title.update(**self._title_properties(key, plot, element))

        if not self.show_grid:
            plot.xgrid.grid_line_color = None
            plot.ygrid.grid_line_color = None


    def _update_ranges(self, element, ranges):
        plot = self.handles['plot']
        x_range = self.handles['x_range']
        y_range = self.handles['y_range']

        l, b, r, t = None, None, None, None
        if any(isinstance(r, Range1d) for r in [x_range, y_range]):
            l, b, r, t = self.get_extents(element, ranges)
            if self.invert_axes:
                l, b, r, t = b, l, t, r

        xfactors, yfactors = None, None
        if any(isinstance(ax_range, FactorRange) for ax_range in [x_range, y_range]):
            xfactors, yfactors = self._get_factors(element)
        self._update_range(x_range, l, r, xfactors, self.invert_xaxis)
        self._update_range(y_range, b, t, yfactors, self.invert_yaxis)


    def _update_range(self, axis_range, low, high, factors, invert):
        if isinstance(axis_range, Range1d):
            if (low == high and low is not None and
                not isinstance(high, util.datetime_types)):
                offset = abs(low*0.1 if low else 0.5)
                low -= offset
                high += offset
            if invert: low, high = high, low
            if low is not None and (isinstance(low, util.datetime_types)
                                    or np.isfinite(low)):
                axis_range.start = low
            if high is not None and (isinstance(high, util.datetime_types)
                                     or np.isfinite(high)):
                axis_range.end = high
        elif isinstance(axis_range, FactorRange):
            factors = list(factors)
            if invert: factors = factors[::-1]
            axis_range.factors = factors


    def _categorize_data(self, data, cols, dims):
        """
        Transforms non-string or integer types in datasource if the
        axis to be plotted on is categorical. Accepts the column data
        sourcec data, the columns corresponding to the axes and the
        dimensions for each axis, changing the data inplace.
        """
        if self.invert_axes:
            cols = cols[::-1]
            dims = dims[:2][::-1]
        ranges = [self.handles['%s_range' % ax] for ax in 'xy']
        for i, col in enumerate(cols):
            column = data[col]
            if (isinstance(ranges[i], FactorRange) and
                (isinstance(column, list) or column.dtype.kind not in 'SU')):
                data[col] = [dims[i].pprint_value(v) for v in column]


    def _get_factors(self, element):
        """
        Get factors for categorical axes.
        """
        xdim, ydim = element.dimensions()[:2]
        xvals, yvals = [element.dimension_values(i, False)
                        for i in range(2)]
        coords = ([x if xvals.dtype.kind in 'SU' else xdim.pprint_value(x) for x in xvals],
                  [y if yvals.dtype.kind in 'SU' else ydim.pprint_value(y) for y in yvals])
        if self.invert_axes: coords = coords[::-1]
        return coords


    def _process_legend(self):
        """
        Disables legends if show_legend is disabled.
        """
        for l in self.handles['plot'].legend:
            if bokeh_version > '0.12.2':
                l.items[:] = []
            else:
                l.legends[:] = []
            l.border_line_alpha = 0
            l.background_fill_alpha = 0


    def _init_glyph(self, plot, mapping, properties):
        """
        Returns a Bokeh glyph object.
        """
        properties = mpl_to_bokeh(properties)
        plot_method = self._plot_methods.get('batched' if self.batched else 'single')
        renderer = getattr(plot, plot_method)(**dict(properties, **mapping))
        return renderer, renderer.glyph


    def _glyph_properties(self, plot, element, source, ranges):
        properties = self.style[self.cyclic_index]

        if self.show_legend:
            if self.overlay_dims:
                legend = ', '.join([d.pprint_value(v) for d, v in
                                    self.overlay_dims.items()])
            else:
                legend = element.label
            properties['legend'] = legend
        properties['source'] = source
        return properties


    def _update_glyph(self, glyph, properties, mapping):
        allowed_properties = glyph.properties()
        properties = mpl_to_bokeh(properties)
        merged = dict(properties, **mapping)
        glyph.update(**{k: v for k, v in merged.items()
                        if k in allowed_properties})

    def _execute_hooks(self, element):
        """
        Executes finalize hooks
        """
        for hook in self.finalize_hooks:
            try:
                hook(self, element)
            except Exception as e:
                self.warning("Plotting hook %r could not be applied:\n\n %s" % (hook, e))


    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        if self.batched:
            element = [el for el in self.hmap.data.values() if el][-1]
        else:
            element = self.hmap.last
        key = self.keys[-1]
        ranges = self.compute_ranges(self.hmap, key, ranges)
        self.current_ranges = ranges
        self.current_frame = element
        self.current_key = key
        style_element = element.last if self.batched else element
        ranges = util.match_spec(style_element, ranges)

        # Initialize plot, source and glyph
        if plot is None:
            plot = self._init_plot(key, style_element, ranges=ranges, plots=plots)
            self._init_axes(plot)
        else:
            self.handles['xaxis'] = plot.xaxis[0]
            self.handles['x_range'] = plot.x_range
            self.handles['y_axis'] = plot.yaxis[0]
            self.handles['y_range'] = plot.y_range
        self.handles['plot'] = plot

        # Get data and initialize data source
        empty = False
        if self.batched:
            data, mapping = self.get_batched_data(element, ranges, empty)
        else:
            data, mapping = self.get_data(element, ranges, empty)
        if source is None:
            source = self._init_datasource(data)
        self.handles['source'] = source

        properties = self._glyph_properties(plot, style_element, source, ranges)
        with abbreviated_exception():
            renderer, glyph = self._init_glyph(plot, mapping, properties)
        self.handles['glyph'] = glyph
        if isinstance(renderer, Renderer):
            self.handles['glyph_renderer'] = renderer

        # Update plot, source and glyph
        with abbreviated_exception():
            self._update_glyph(glyph, properties, mapping)
        if not self.overlaid:
            self._update_plot(key, plot, style_element)
            self._update_ranges(style_element, ranges)

        if not self.batched:
            for cb in self.callbacks:
                cb.initialize()

        if not self.overlaid:
            self._process_legend()
        self._execute_hooks(element)

        self.drawn = True

        return plot


    def update_frame(self, key, ranges=None, plot=None, element=None, empty=False):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        reused = isinstance(self.hmap, DynamicMap) and self.overlaid
        if not reused and element is None:
            element = self._get_frame(key)
        else:
            self.current_key = key
            self.current_frame = element

        glyph = self.handles.get('glyph', None)
        if hasattr(glyph, 'visible'):
            glyph.visible = bool(element)

        if not element or (not self.dynamic and self.static):
            return

        style_element = element.last if self.batched else element
        self.style = self.lookup_options(style_element, 'style')

        ranges = self.compute_ranges(self.hmap, key, ranges)
        self.set_param(**self.lookup_options(style_element, 'plot').options)
        ranges = util.match_spec(style_element, ranges)
        self.current_ranges = ranges

        plot = self.handles['plot']
        source = self.handles['source']
        empty = False
        mapping = {}

        # Cache frame object id to skip updating data if unchanged
        previous_id = self.handles.get('previous_id', None)
        if self.batched:
            current_id = sum(element.traverse(lambda x: id(x.data), [Element]))
        else:
            current_id = id(element.data)
        self.handles['previous_id'] = current_id
        self.static_source = self.dynamic and (current_id == previous_id)
        if not self.static_source:
            if self.batched:
                data, mapping = self.get_batched_data(element, ranges, empty)
            else:
                data, mapping = self.get_data(element, ranges, empty)
            self._update_datasource(source, data)

        if glyph:
            properties = self._glyph_properties(plot, element, source, ranges)
            with abbreviated_exception():
                self._update_glyph(self.handles['glyph'], properties, mapping)
        if not self.overlaid:
            self._update_ranges(style_element, ranges)
            self._update_plot(key, plot, style_element)

        self._execute_hooks(element)


    @property
    def current_handles(self):
        """
        Returns a list of the plot objects to update.
        """
        handles = []
        if self.static and not self.dynamic:
            return handles

        for handle in self._update_handles:
            if (handle == 'source' and self.static_source):
                continue
            if handle in self.handles:
                handles.append(self.handles[handle])

        if self.overlaid:
            return handles

        plot = self.state
        handles.append(plot)
        if bokeh_version >= '0.12':
            handles.append(plot.title)

        for ax in 'xy':
            key = '%s_range' % ax
            if isinstance(self.handles.get(key), FactorRange):
                handles.append(self.handles[key])

        if self.current_frame:
            if not self.apply_ranges:
                rangex, rangey = False, False
            elif self.framewise:
                rangex, rangey = True, True
            elif isinstance(self.hmap, DynamicMap):
                rangex, rangey = True, True
                callbacks = [cb for cbs in self.traverse(lambda x: x.callbacks)
                             for cb in cbs]
                streams = [s for cb in callbacks for s in cb.streams]
                for stream in streams:
                    if isinstance(stream, RangeXY):
                        rangex, rangey = False, False
                        break
                    elif isinstance(stream, RangeX):
                        rangex = False
                    elif isinstance(stream, RangeY):
                        rangey = False
            else:
                rangex, rangey = False, False
            if rangex:
                handles += [plot.x_range]
            if rangey:
                handles += [plot.y_range]
        return handles


    @property
    def framewise(self):
        """
        Property to determine whether the current frame should have
        framewise normalization enabled. Required for bokeh plotting
        classes to determine whether to send updated ranges for each
        frame.
        """
        current_frames = [el for f in self.traverse(lambda x: x.current_frame)
                          for el in (f.traverse(lambda x: x, [Element])
                                     if f else [])]
        return any(self.lookup_options(frame, 'norm').options.get('framewise')
                   for frame in current_frames)
コード例 #16
0
ファイル: __init__.py プロジェクト: sf-issues/topographica
class SharedWeightCFProjection(CFProjection):
    """
    A Projection with a single set of weights, shared by all units.

    Otherwise similar to CFProjection, except that learning is
    currently disabled.
    """
    ### JABHACKALERT: Set to be constant as a clue that learning won't
    ### actually work yet, but we could certainly extend it to support
    ### learning if desired, e.g. to learn position-independent responses.
    learning_fn = param.ClassSelector(CFPLearningFn,CFPLF_Identity(),constant=True)
    weights_output_fns = param.HookList(default=[CFPOF_SharedWeight()])
    precedence = param.Number(default=0.5)

    def __init__(self,**params):
        """
        Initialize the Projection with a single cf_type object
        (typically a ConnectionField),
        """
        # We don't want the whole set of cfs initialized, but we
        # do want anything that CFProjection defines.
        super(SharedWeightCFProjection,self).__init__(initialize_cfs=False,**params)

        # We want the sharedcf to be located on the grid, so use the
        # center of a unit
        sheet_rows,sheet_cols=self.src.shape
        # arbitrary (e.g. could use 0,0) 
        center_row,center_col = sheet_rows/2,sheet_cols/2
        center_unitxcenter,center_unitycenter=self.src.matrixidx2sheet(center_row,
                                                                       center_col)

        
        self.__sharedcf=self.cf_type(self.src,
                                     x=center_unitxcenter,
                                     y=center_unitycenter,
                                     template=self._slice_template,
                                     weights_generator=self.weights_generator,
                                     mask=self.mask_template,
                                     output_fns=[wof.single_cf_fn for wof in self.weights_output_fns],
                                     min_matrix_radius=self.min_matrix_radius)

        self._create_cfs()



    def _create_cf(self,x,y):
        x_cf,y_cf = self.coord_mapper(x,y)
        # Does not pass the mask, as it would have to be sliced
        # for each cf, and is only used for learning.
        CF = SharedWeightCF(self.__sharedcf,self.src,x=x_cf,y=y_cf, #JUDE ADDED
                            template=self._slice_template,
                            min_matrix_radius=self.min_matrix_radius,
                            mask=self.mask_template)

        return CF
            
    
    def learn(self):
        """
        Because of how output functions are applied, it is not currently
        possible to use learning functions and learning output functions for
        SharedWeightCFProjections, so we disable them here.
        """
        pass
    
    
    def apply_learn_output_fns(self,active_units_mask=True):
        """
        Because of how output functions are applied, it is not currently
        possible to use learning functions and learning output functions for
        SharedWeightCFProjections, so we disable them here.
        """
        pass


    def n_bytes(self):
        return self.activity.nbytes + self.__sharedcf.weights.nbytes + \
               sum([cf.input_sheet_slice.nbytes
                    for cf,i in CFIter(self)()])
コード例 #17
0
class CFProjection(Projection):
    """
    A projection composed of ConnectionFields from a Sheet into a ProjectionSheet.

    CFProjection computes its activity using a response_fn of type
    CFPResponseFn (typically a CF-aware version of mdot) and output_fns
    (typically none).  The initial contents of the
    ConnectionFields mapping from the input Sheet into the target
    ProjectionSheet are controlled by the weights_generator, cf_shape,
    and weights_output_fn parameters, while the location of the
    ConnectionField is controlled by the coord_mapper parameter.

    Any subclass has to implement the interface
    activate(self,input_activity) that computes the response from the
    input and stores it in the activity array.
    """

    response_fn = param.ClassSelector(
        CFPResponseFn,
        default=CFPRF_Plugin(),
        doc=
        'Function for computing the Projection response to an input pattern.')

    input_fns = param.HookList(default=[],
                               class_=TransferFn,
                               doc="""
        Function(s) applied to the input before the projection activity is computed."""
                               )

    cf_type = param.Parameter(
        default=ConnectionField,
        constant=True,
        doc="Type of ConnectionField to use when creating individual CFs.")

    # JPHACKALERT: Not all support for null CFs has been implemented.
    # CF plotting and C-optimized CFPxF_ functions need
    # to be fixed to support null CFs without crashing.
    allow_null_cfs = param.Boolean(
        default=False,
        doc="Whether or not the projection can have entirely empty CFs")

    nominal_bounds_template = BoundingRegionParameter(
        default=BoundingBox(radius=0.1),
        doc="""
        Bounds defining the Sheet area covered by a prototypical ConnectionField.
        The true bounds will differ depending on the density (see create_slice_template())."""
    )

    weights_generator = param.ClassSelector(
        PatternGenerator,
        default=patterngenerator.Constant(),
        constant=True,
        doc="Generate initial weights values.")

    cf_shape = param.ClassSelector(
        PatternGenerator,
        default=patterngenerator.Constant(),
        constant=True,
        doc="Mask pattern to define the shape of the connection fields.")

    same_cf_shape_for_all_cfs = param.Boolean(default=True,
                                              doc="""
        Whether or not to share a single cf_shape mask for all CFs.
        If True, the cf_shape is evaluated only once and shared for
        all CFs, which saves computation time and memory.  If False,
        the cf_shape is evaluated once for each CF, allowing each to
        have its own shape.""")

    learning_fn = param.ClassSelector(
        CFPLearningFn,
        default=CFPLF_Plugin(),
        doc=
        'Function for computing changes to the weights based on one activation step.'
    )

    # JABALERT: Shouldn't learning_rate be owned by the learning_fn?
    learning_rate = param.Number(default=0.0,
                                 softbounds=(0, 100),
                                 doc="""
        Amount of learning at each step for this projection, specified
        in units that are independent of the density of each Sheet.""")

    weights_output_fns = param.HookList(
        default=[CFPOF_Plugin()],
        class_=CFPOutputFn,
        doc='Functions applied to each CF after learning.')

    strength = param.Number(default=1.0,
                            doc="""
        Global multiplicative scaling applied to the Activity of this Sheet."""
                            )

    coord_mapper = param.ClassSelector(
        CoordinateMapperFn,
        default=IdentityMF(),
        doc='Function to map a projected coordinate into the target sheet.')

    # CEBALERT: this is temporary (allows c++ matching in certain
    # cases).  We will allow the user to override the mask size, but
    # by offering a scaling parameter.
    autosize_mask = param.Boolean(default=True,
                                  constant=True,
                                  precedence=-1,
                                  doc="""
        Topographica sets the mask size so that it is the same as the connection field's
        size, unless this parameter is False - in which case the user-specified size of
        the cf_shape is used. In normal usage of Topographica, this parameter should
        remain True.""")

    mask_threshold = param.Number(default=0.5,
                                  constant=True,
                                  doc="""
        If a unit is above this value in the cf_shape mask, it is
        included; otherwise it is excluded from the mask.""")

    apply_output_fns_init = param.Boolean(default=True,
                                          doc="""
        Whether to apply the output function to connection fields (e.g. for
        normalization) when the CFs are first created.""")

    min_matrix_radius = param.Integer(default=1,
                                      bounds=(0, None),
                                      doc="""
        Enforced minimum for radius of weights matrix.
        The default of 1 gives a minimum matrix of 3x3. 0 would
        allow a 1x1 matrix.""")

    hash_format = param.String(default="{name}-{src}-{dest}",
                               doc="""
       Format string to determine the hash value used to initialize
       random weight generation. Format keys available include {name}
       {src} and {dest}.""")

    precedence = param.Number(default=0.8)

    def __init__(self, initialize_cfs=True, **params):
        """
        Initialize the Projection with a set of cf_type objects
        (typically ConnectionFields), each located at the location
        in the source sheet corresponding to the unit in the target
        sheet. The cf_type objects are stored in the 'cfs' array.

        The nominal_bounds_template specified may be altered: the
        bounds must be fitted to the Sheet's matrix, and the weights
        matrix must have odd dimensions. These altered bounds are
        passed to the individual connection fields.

        A mask for the weights matrix is constructed. The shape is
        specified by cf_shape; the size defaults to the size
        of the nominal_bounds_template.
        """
        super(CFProjection, self).__init__(**params)

        self.weights_generator.set_dynamic_time_fn(None,
                                                   sublistattr='generators')
        # get the actual bounds_template by adjusting a copy of the
        # nominal_bounds_template to ensure an odd slice, and to be
        # cropped to sheet if necessary
        self._slice_template = Slice(copy(self.nominal_bounds_template),
                                     self.src,
                                     force_odd=True,
                                     min_matrix_radius=self.min_matrix_radius)

        self.bounds_template = self._slice_template.compute_bounds(self.src)

        self.mask_template = _create_mask(self.cf_shape, self.bounds_template,
                                          self.src, self.autosize_mask,
                                          self.mask_threshold)

        self.n_units = self._calc_n_units()

        if initialize_cfs:
            self._create_cfs()

        if self.apply_output_fns_init:
            self.apply_learn_output_fns(active_units_mask=False)

        ### JCALERT! We might want to change the default value of the
        ### input value to self.src.activity; but it fails, raising a
        ### type error. It probably has to be clarified why this is
        ### happening
        self.input_buffer = None
        self.activity = np.array(self.dest.activity)
        if 'cfs' not in self.dest.views:
            self.dest.views.CFs = AttrTree()
        self.dest.views.CFs[self.name] = self._cf_grid()

    def _cf_grid(self, shape=None, **kwargs):
        "Create ProjectionGrid with the correct metadata."
        grid = CoordinateGrid(self.dest.bounds,
                              None,
                              xdensity=self.dest.xdensity,
                              ydensity=self.dest.ydensity)
        grid.metadata = AttrDict(timestamp=self.src.simulation.time(),
                                 info=self.name,
                                 proj_src_name=self.src.name,
                                 proj_dest_name=self.dest.name,
                                 **kwargs)
        return grid

    def _generate_coords(self):
        X, Y = self.dest.sheetcoords_of_idx_grid()
        vectorized_coord_mapper = simple_vectorize(
            self.coord_mapper,
            num_outputs=2,
            # CB: could switch to float32?
            output_type=float)
        return vectorized_coord_mapper(X, Y)

    # CB: should be _initialize_cfs() since we already have 'initialize_cfs' flag?
    def _create_cfs(self):
        vectorized_create_cf = simple_vectorize(self._create_cf)
        self.cfs = vectorized_create_cf(*self._generate_coords())
        self.flatcfs = list(self.cfs.flat)

    def _create_cf(self, x, y):
        """
        Create a ConnectionField at x,y in the src sheet.
        """
        # (to restore would need to have an r,c counter)
        # self.debug("Creating CF(%d,%d) from src (%.3f,%.3f) to  dest (%.3f,%.3f)"%(r,c,x_cf,y_cf,x,y))

        label = self.hash_format.format(name=self.name,
                                        src=self.src.name,
                                        dest=self.dest.name)

        name = "%s_CF (%.5f, %.5f)" % ('' if label is None else label, x, y)
        try:
            if self.same_cf_shape_for_all_cfs:
                mask_template = self.mask_template
            else:
                mask_template = _create_mask(self.cf_shape,
                                             self.bounds_template,
                                             self.src,
                                             self.autosize_mask,
                                             self.mask_threshold,
                                             name=name)

            CF = self.cf_type(self.src,
                              x=x,
                              y=y,
                              template=self._slice_template,
                              weights_generator=self.weights_generator,
                              mask=mask_template,
                              min_matrix_radius=self.min_matrix_radius,
                              label=label)
        except NullCFError:
            if self.allow_null_cfs:
                CF = None
            else:
                raise

        return CF

    def _calc_n_units(self):
        """Return the number of unmasked units in a typical ConnectionField."""

        return min(
            len(self.mask_template.ravel().nonzero()[0]),
            # CEBALERT: if the mask_template is bigger than the
            # src sheet (e.g.  conn radius bigger than src
            # radius), return the size of the source sheet
            self.src.shape[0] * self.src.shape[1])

    def cf(self, r, c):
        """Return the specified ConnectionField"""
        # CB: should we offer convenience cf(x,y) (i.e. sheetcoords) method instead?
        self.warning(
            "CFProjection.cf(r,c) is deprecated: use cfs[r,c] instead")
        return self.cfs[r, c]

    def cf_bounds(self, r, c):
        """Return the bounds of the specified ConnectionField."""
        return self.cfs[r, c].get_bounds(self.src)

    def grid(self, rows=11, cols=11, lbrt=None, situated=False, **kwargs):
        xdensity, ydensity = self.dest.xdensity, self.dest.ydensity
        l, b, r, t = self.dest.bounds.lbrt()
        half_x_unit = ((r - l) / xdensity) / 2.
        half_y_unit = ((t - b) / ydensity) / 2.
        if lbrt is None:
            bounds = self.dest.bounds
            l, b, r, t = (l + half_x_unit, b + half_y_unit, r - half_x_unit,
                          t - half_y_unit)
        else:
            l, b = self.dest.closest_cell_center(lbrt[0], lbrt[1])
            r, t = self.dest.closest_cell_center(lbrt[2], lbrt[3])
            bounds = BoundingBox(
                points=[(l - half_x_unit,
                         b - half_y_unit), (r + half_x_unit, t + half_y_unit)])
        x, y = np.meshgrid(np.linspace(l, r, cols), np.linspace(b, t, rows))
        coords = zip(x.flat, y.flat)

        grid_items = {}
        for x, y in coords:
            grid_items[x, y] = self.view(x, y, situated=situated, **kwargs)

        grid = CoordinateGrid(bounds, (cols, rows),
                              initial_items=grid_items,
                              title=' '.join(
                                  [self.dest.name, self.name, '{label}']),
                              label='CFs')
        grid.metadata = AttrDict(info=self.name,
                                 proj_src_name=self.src.name,
                                 proj_dest_name=self.dest.name,
                                 timestamp=self.src.simulation.time(),
                                 **kwargs)
        return grid

    def view(self, sheet_x, sheet_y, timestamp=None, situated=False, **kwargs):
        """
        Return a single connection field SheetView, for the unit
        located nearest to sheet coordinate (sheet_x,sheet_y).
        """
        if timestamp is None:
            timestamp = self.src.simulation.time()
        time_dim = Dimension("Time", type=param.Dynamic.time_fn.time_type)
        (r, c) = self.dest.sheet2matrixidx(sheet_x, sheet_y)
        cf = self.cfs[r, c]
        r1, r2, c1, c2 = cf.input_sheet_slice
        situated_shape = self.src.activity.shape
        situated_bounds = self.src.bounds
        roi_bounds = cf.get_bounds(self.src)
        if situated:
            matrix_data = np.zeros(situated_shape, dtype=np.float64)
            matrix_data[r1:r2, c1:c2] = cf.weights.copy()
            bounds = situated_bounds
        else:
            matrix_data = cf.weights.copy()
            bounds = roi_bounds

        sv = CFView(matrix_data,
                    bounds,
                    situated_bounds=situated_bounds,
                    input_sheet_slice=(r1, r2, c1, c2),
                    roi_bounds=roi_bounds,
                    label=self.name + " CF Weights",
                    value='CF Weight')
        sv.metadata = AttrDict(timestamp=timestamp)

        cfstack = CFStack((timestamp, sv), dimensions=[time_dim])
        cfstack.metadata = AttrDict(coords=(sheet_x, sheet_y),
                                    dest_name=self.dest.name,
                                    precedence=self.src.precedence,
                                    proj_name=self.name,
                                    src_name=self.src.name,
                                    row_precedence=self.src.row_precedence,
                                    timestamp=timestamp,
                                    **kwargs)
        return cfstack

    def get_view(self, sheet_x, sheet_y, timestamp=None):
        self.warning("Deprecated, call 'view' method instead.")
        return self.view(sheet_x, sheet_y, timestamp)

    def activate(self, input_activity):
        """Activate using the specified response_fn and output_fn."""
        if self.input_fns:
            input_activity = input_activity.copy()
        for iaf in self.input_fns:
            iaf(input_activity)
        self.input_buffer = input_activity
        self.activity *= 0.0
        self.response_fn(CFIter(self), input_activity, self.activity,
                         self.strength)
        for of in self.output_fns:
            of(self.activity)

    # CEBALERT: should add active_units_mask to match
    # apply_learn_output_fns.
    def learn(self):
        """
        For a CFProjection, learn consists of calling the learning_fn.
        """
        # Learning is performed if the input_buffer has already been set,
        # i.e. there is an input to the Projection.
        if self.input_buffer is not None:
            self.learning_fn(CFIter(self), self.input_buffer,
                             self.dest.activity, self.learning_rate)

    # CEBALERT: called 'learn' output fns here, but called 'weights' output fns
    # elsewhere (mostly). Change all to 'learn'?
    def apply_learn_output_fns(self, active_units_mask=True):
        """
        Apply the weights_output_fns to each unit.

        If active_units_mask is True, inactive units will be skipped.
        """
        for of in self.weights_output_fns:
            of(CFIter(self, active_units_mask=active_units_mask))

    # CEBALERT: see gc alert in simulation.__new__
    def _cleanup(self):
        for cf in self.cfs.flat:
            # cf could be None or maybe something else
            if hasattr(cf, 'input_sheet'):
                cf.input_sheet = None
            if hasattr(cf, 'input_sheet_slice'):
                cf.input_sheet_slice = None
            if hasattr(cf, 'weights_slice'):
                cf.weights_slice = None

    def n_bytes(self):
        # Could also count the input_sheet_slice
        rows, cols = self.cfs.shape
        return super(CFProjection,self).n_bytes() + \
               sum([cf.weights.nbytes +
                    cf.mask.nbytes
                    for cf,i in CFIter(self,ignore_sheet_mask=True)()])

    def n_conns(self):
        # Counts non-masked values, if mask is available; otherwise counts
        # weights as connections if nonzero
        rows, cols = self.cfs.shape
        return np.sum([
            len((cf.mask
                 if cf.mask is not None else cf.weights).ravel().nonzero()[0])
            for cf, i in CFIter(self)()
        ])
コード例 #18
0
class DivPlot(BokehPlot, GenericElementPlot, AnnotationPlot):

    height = param.Number(default=300)

    width = param.Number(default=300)

    sizing_mode = param.ObjectSelector(default=None,
                                       objects=[
                                           'fixed', 'stretch_width',
                                           'stretch_height', 'stretch_both',
                                           'scale_width', 'scale_height',
                                           'scale_both', None
                                       ],
                                       doc="""

        How the component should size itself.

        * "fixed" :
          Component is not responsive. It will retain its original
          width and height regardless of any subsequent browser window
          resize events.
        * "stretch_width"
          Component will responsively resize to stretch to the
          available width, without maintaining any aspect ratio. The
          height of the component depends on the type of the component
          and may be fixed or fit to component's contents.
        * "stretch_height"
          Component will responsively resize to stretch to the
          available height, without maintaining any aspect ratio. The
          width of the component depends on the type of the component
          and may be fixed or fit to component's contents.
        * "stretch_both"
          Component is completely responsive, independently in width
          and height, and will occupy all the available horizontal and
          vertical space, even if this changes the aspect ratio of the
          component.
        * "scale_width"
          Component will responsively resize to stretch to the
          available width, while maintaining the original or provided
          aspect ratio.
        * "scale_height"
          Component will responsively resize to stretch to the
          available height, while maintaining the original or provided
          aspect ratio.
        * "scale_both"
          Component will responsively resize to both the available
          width and height, while maintaining the original or provided
          aspect ratio.
    """)

    finalize_hooks = param.HookList(default=[],
                                    doc="""
        Deprecated; use hooks options instead.""")

    hooks = param.HookList(default=[],
                           doc="""
        Optional list of hooks called when finalizing a plot. The
        hook is passed the plot object and the displayed element, and
        other plotting handles can be accessed via plot.handles.""")

    _stream_data = False

    selection_display = None

    def __init__(self, element, plot=None, **params):
        super(DivPlot, self).__init__(element, **params)
        self.callbacks = []
        self.handles = {} if plot is None else self.handles['plot']
        self.static = len(self.hmap) == 1 and len(self.keys) == len(self.hmap)

    def get_data(self, element, ranges, style):
        return element.data, {}, style

    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        element = self.hmap.last
        key = self.keys[-1]
        self.current_frame = element
        self.current_key = key

        data, _, _ = self.get_data(element, ranges, {})
        div = HTML(text=data,
                   width=self.width,
                   height=self.height,
                   sizing_mode=self.sizing_mode)
        self.handles['plot'] = div
        self._execute_hooks(element)
        self.drawn = True
        return div

    def update_frame(self, key, ranges=None, plot=None):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        element = self._get_frame(key)
        text, _, _ = self.get_data(element, ranges, {})
        self.state.update(text=text, sizing_mode=self.sizing_mode)
コード例 #19
0
class SLISSOM(SettlingCFSheet):
    """
    A Sheet class implementing the SLISSOM algorithm
    (Choe and Miikkulainen, Neurocomputing 21:139-157, 1998).

    A SLISSOM sheet is a SettlingCFSheet sheet extended to include spiking
    neurons using dynamic synapses.
    """

    # configurable parameters
    threshold = param.Number(default=0.3,
                             bounds=(0, None),
                             doc="Baseline threshold")

    threshold_decay_rate = param.Number(default=0.01,
                                        bounds=(0, None),
                                        doc="Dynamic threshold decay rate")

    absolute_refractory = param.Number(default=1.0,
                                       bounds=(0, None),
                                       doc="Absolute refractory period")

    dynamic_threshold_init = param.Number(
        default=2.0,
        bounds=(0, None),
        doc="Initial value for dynamic threshold when spike occurs")

    spike_amplitude = param.Number(
        default=1.0,
        bounds=(0, None),
        doc="Amplitude of spike at the moment of spiking")

    reset_on_new_iteration = param.Boolean(
        default=False,
        doc="Reset activity and projection activity when new iteration starts")

    noise_rate = param.Number(default=0.0,
                              bounds=(0, 1.0),
                              doc="Noise added to the on-going activity")

    output_fns = param.HookList(
        default=[PiecewiseLinear(lower_bound=0.1, upper_bound=0.65)])

    # logging facility for debugging
    trace_coords = param.List(
        default=[],
        doc="List of coord(s) of membrane potential(s) to track over time")

    trace_n = param.Number(
        default=400,
        bounds=(1, None),
        doc="Number of steps to track neuron's membrane potential")

    # matrices and vectors for internal use
    dynamic_threshold = None
    spike = None
    spike_history = None
    membrane_potential = None
    membrane_potential_trace = None
    trace_count = 0

    def __init__(self, **params):
        """
        SLISSOM-specific init, where dynamic threshold stuff
        gets initialized.
        """
        super(SLISSOM, self).__init__(**params)
        self.dynamic_threshold = \
            np.zeros(self.activity.shape).astype(activity_type)
        self.spike = np.zeros(self.activity.shape)
        self.spike_history = np.zeros(self.activity.shape)
        self.membrane_potential = \
            np.zeros(self.activity.shape).astype(activity_type)

        num_traces = len(self.trace_coords)
        self.membrane_potential_trace = \
            np.zeros((num_traces,self.trace_n)).astype(activity_type)

    def activate(self):
        """
        For now, this is the same as the parent's activate(), plus
        fixed+dynamic thresholding. Overloading was necessary to
        avoid self.send_output() being invoked before thresholding.
        This function also updates and maintains internal values such as
        membrane_potential, spike, etc.
        """
        self.activity *= 0.0

        for proj in self.in_connections:
            self.activity += proj.activity

        if self.apply_output_fns:
            for of in self.output_fns:
                of(self.activity)

        # Add noise, based on the noise_rate.
        if self.noise_rate > 0.0:
            self.activity = self.activity * (1.0-self.noise_rate) \
                + np.random.random(self.activity.shape) * self.noise_rate

        # Thresholding: baseline + dynamic threshold + absolute refractory
        # period
        rows, cols = self.activity.shape

        for r in xrange(rows):
            for c in xrange(cols):

                thresh = self.threshold + self.dynamic_threshold[r, c]

                # Calculate membrane potential
                self.membrane_potential[r, c] = self.activity[r, c] - thresh

                if (self.activity[r, c] > thresh
                        and self.spike_history[r, c] <= 0):
                    self.activity[r, c] = self.spike_amplitude
                    self.dynamic_threshold[r, c] = self.dynamic_threshold_init
                    # set absolute refractory period for "next" timestep
                    # (hence the "-1")
                    self.spike_history[r, c] = self.absolute_refractory - 1.0
                else:
                    self.activity[r, c] = 0.0
                    self.dynamic_threshold[r, c] = self.dynamic_threshold[
                        r, c] * exp(-(self.threshold_decay_rate))
                    self.spike_history[r, c] -= 1.0

                # Append spike to the membrane potential
                self.membrane_potential[r, c] += self.activity[r, c]

        self._update_trace()
        self.send_output(src_port='Activity', data=self.activity)

    def input_event(self, conn, data):
        """
        SLISSOM-specific input_event handeling:
        On a new afferent input, DO NOT clear the activity matrix unless
        reset_on_new_iteration is True.
        """
        if self.new_iteration and self.reset_on_new_iteration:
            self.new_iteration = False
            self.activity *= 0.0
            for proj in self.in_connections:
                proj.activity *= 0.0
            self.mask.reset()
        super(SettlingCFSheet, self).input_event(conn, data)

    def plot_trace(self):
        """
        Plot membrane potential trace of the unit designated by the
        trace_coords list. This plot has trace_n data points.
        """
        trace_offset = 0
        for trace in self.membrane_potential_trace:
            vectorplot(trace + trace_offset, style="b-")
            vectorplot(trace + trace_offset, style="rx")
            trace_offset += 3

    def vectorplot_trace(self):
        """
        Plot membrane potential trace of the unit designated by the
        trace_coords list. This plot has trace_n data points.
        This method simply calls plot_trace().
        """
        self.plot_trace()

    def matrixplot_trace(self):
        """
        Matrixplot membrane potential trace of the unit designated by the
        trace_coords list.
        """
        matrixplot(self.membrane_potential_trace, aspect=40)

    def _update_trace(self):
        """
        Update membrane potential trace for sheet coordinate (x,y).
        """

        trace_id = 0

        for coord in self.trace_coords:
            (trace_r, trace_c) = self.sheet2matrix(coord[0], coord[1])
            self.membrane_potential_trace[trace_id][self.trace_count]=\
                 self.membrane_potential[trace_r,trace_c]
            trace_id += 1

        self.trace_count = (self.trace_count + 1) % self.trace_n
コード例 #20
0
class SparseConnectionField(param.Parameterized):
    """
    A set of weights on one input Sheet.

    Each ConnectionField contributes to the activity of one unit on
    the output sheet, and is normally used as part of a Projection
    including many other ConnectionFields.
    """

    # ALERT: need bounds, more docs
    x = param.Number(default=0.0,doc="Sheet X coordinate of CF")

    y = param.Number(default=0.0,doc="Sheet Y coordinate of CF")

    weights_generator = param.ClassSelector(PatternGenerator,
        default=patterngenerator.Constant(),constant=True,doc="""
        Generates initial weights values.""")

    min_matrix_radius=param.Integer(default=1)

    output_fns = param.HookList(default=[],class_=TransferFn,precedence=0.08,doc="""
        Optional function(s) to apply to the pattern array after it has been created.
        Can be used for normalization, thresholding, etc.""")


    # Class attribute to switch to legacy weight generation if False
    independent_weight_generation = True

    def get_bounds(self,input_sheet=None):
        if not input_sheet == None:
            return self.input_sheet_slice.compute_bounds(input_sheet)
        else:
            return self.input_sheet_slice.compute_bounds(self.input_sheet)


    def __get_shape_mask(self):
        cf_shape = self.projection.cf_shape
        bounds = self.projection.bounds_template
        xdensity = self.projection.src.xdensity
        ydensity = self.projection.src.xdensity
        center_r,center_c = self.projection.src.sheet2matrixidx(0,0)
        center_x,center_y = self.projection.src.matrixidx2sheet(center_r,center_c)
        cf_mask = cf_shape(x=center_x,y=center_y,bounds=bounds,xdensity=xdensity,ydensity=ydensity)
        return cf_mask

    shape_mask = property(__get_shape_mask)


    def __get_norm_total(self):
        return self.projection.norm_total[self.matrix_idx[0],self.matrix_idx[1]]

    def __set_norm_total(self,new_norm_total):
        self.projection.norm_total[self.matrix_idx[0],self.matrix_idx[1]] = new_norm_total

    def __del_norm_total(self):
        self.projection.norm_total[self.matrix_idx[0],self.matrix_idx[1]] = 0.0

    norm_total = property(__get_norm_total,__set_norm_total,__del_norm_total)


    def __get_mask(self):
        x1,x2,y1,y2 = self.input_sheet_slice.tolist()
        mask = np.zeros((x2-x1,y2-y1),dtype=np.bool)
        inds = np.ravel_multi_index(np.mgrid[x1:x2,y1:y2],self.projection.src.shape).flatten()
        nz_flat = self.projection.weights[inds,self.oned_idx].toarray()
        nz_inds = nz_flat.reshape(x2-x1,y2-y1).nonzero()
        mask[nz_inds] = True
        return mask

    mask = property(__get_mask,
        """
        The mask property returns an array of bools representing the
        zero weights in the CF weights array.

        It is useful when applying additive functions on the weights
        array, to ensure zero values are not accidentally overwritten.

        The mask cannot be changed via the property, only by changing
        the weights directly.
        """)


    def __get_weights(self):
        """
        get_weights accesses the sparse CF matrix and returns the CF
        in dense form.
        """

        x1,x2,y1,y2 = self.src_slice
        inds = np.ravel_multi_index(np.mgrid[x1:x2,y1:y2],self.projection.src.shape).flatten()
        return self.projection.weights[inds,self.oned_idx].toarray().reshape(x2-x1,y2-y1)

    def __set_weights(self,arr):
        """
        Takes an input array, which has to match the CF shape, and
        creates an mgrid of the appropriate size, adds the proper
        offsets and passes the values and indices to the sparse matrix
        representation.
        """

        x1,x2,y1,y2 = self.src_slice
        (dim1,dim2) = arr.shape
        assert (dim1,dim2) == (x2-x1,y2-y1), "Array does not match CF shape."
        (x,y) = np.mgrid[0:dim1,0:dim2] # Create mgrid of CF size
        x_ind = np.array(x)+x1; y_ind = np.array(y) + y1; # Add slice offsets
        row_inds = np.ravel_multi_index((x_ind,y_ind),self.projection.src.shape).flatten().astype(np.int32)
        col_inds = np.array([self.oned_idx]*len(row_inds),dtype=np.int32)
        self.projection.weights.put(arr[x,y].flatten(),row_inds,col_inds)

    weights = property(__get_weights,__set_weights)


    def __init__(self,template,input_sheet,projection,label=None,**params):
        """
        Initializes the CF object and stores meta information about the CF's
        shape and position in the SparseCFProjection to allow for easier
        initialization.
        """

        super(SparseConnectionField,self).__init__(**params)

        self.input_sheet = input_sheet
        self.projection = projection
        self.label = label

        self.matrix_idx = self.projection.dest.sheet2matrixidx(self.x,self.y)
        self.oned_idx = self.matrix_idx[0] * self.projection.dest.shape[1] + self.matrix_idx[1]

        template = copy(template)

        if not isinstance(template,Slice):
            template = Slice(template,self.input_sheet,force_odd=True,
                             min_matrix_radius=self.min_matrix_radius)
        self.weights_slice = self._create_input_sheet_slice(template)

        self.src_slice = tuple(self.input_sheet_slice.tolist())


    def _init_weights(self,mask_template):
        mask = self.weights_slice.submatrix(mask_template)
        mask = np.array(mask,copy=1)

        pattern_params = dict(x=self.x,y=self.y,
                              bounds=self.get_bounds(self.input_sheet),
                              xdensity=self.input_sheet.xdensity,
                              ydensity=self.input_sheet.ydensity,
                              mask=mask)

        controlled_weights = (param.Dynamic.time_dependent
                              and isinstance(param.Dynamic.time_fn,
                                             param.Time)
                              and self.independent_weight_generation)

        if controlled_weights:
            with param.Dynamic.time_fn as t:
                t(0)                        # Initialize at time zero.
                # Controls random streams
                label = '' if self.label is None else self.label
                name = "%s_CF (%.5f, %.5f)" % (label, self.x, self.y)
                w = self.weights_generator(**dict(pattern_params,
                                                  name=name))
        else:
            w = self.weights_generator(**pattern_params)

        w = w.astype(sparse_type)

        for of in self.output_fns:
            of(w)

        return w


    def _create_input_sheet_slice(self,template):
        """
        Create the input_sheet_slice, which provides the appropriate
        Slice for this CF on the input_sheet (as well as providing
        this CF's exact bounds).

        Also creates the weights_slice, which provides the Slice for
        this weights matrix (in case it must be cropped at an edge).
        """
        # copy required because the template gets modified here but
        # needs to be used again
        input_sheet_slice = copy(template)
        input_sheet_slice.positionedcrop(self.x,self.y,self.input_sheet)
        input_sheet_slice.crop_to_sheet(self.input_sheet)

        # weights matrix cannot have a zero-sized dimension (could
        # happen at this stage because of cropping)
        nrows,ncols = input_sheet_slice.shape_on_sheet()
        if nrows<1 or ncols<1:
            raise NullCFError(self.x,self.y,self.input_sheet,nrows,ncols)

        self.input_sheet_slice = input_sheet_slice

        # not copied because we don't use again
        template.positionlesscrop(self.x,self.y,self.input_sheet)
        return template


    def get_input_matrix(self, activity):
        return self.input_sheet_slice.submatrix(activity)
コード例 #21
0
class MPLPlot(DimensionedPlot):
    """
    An MPLPlot object draws a matplotlib figure object when called or
    indexed but can also return a matplotlib animation object as
    appropriate. MPLPlots take element objects such as Image, Contours
    or Points as inputs and plots them in the appropriate format using
    matplotlib. As HoloMaps are supported, all plots support animation
    via the anim() method.
    """

    renderer = MPLRenderer
    sideplots = {}

    fig_alpha = param.Number(default=1.0,
                             bounds=(0, 1),
                             doc="""
        Alpha of the overall figure background.""")

    fig_bounds = param.NumericTuple(default=(0.15, 0.15, 0.85, 0.85),
                                    doc="""
        The bounds of the overall figure as a 4-tuple of the form
        (left, bottom, right, top), defining the size of the border
        around the subplots.""")

    fig_inches = param.Parameter(default=4,
                                 doc="""
        The overall matplotlib figure size in inches.  May be set as
        an integer in which case it will be used to autocompute a
        size. Alternatively may be set with an explicit tuple or list,
        in which case it will be applied directly after being scaled
        by fig_size. If either the width or height is set to None,
        it will be computed automatically.""")

    fig_latex = param.Boolean(default=False,
                              doc="""
        Whether to use LaTeX text in the overall figure.""")

    fig_rcparams = param.Dict(default={},
                              doc="""
        matplotlib rc parameters to apply to the overall figure.""")

    fig_size = param.Integer(default=100,
                             bounds=(1, None),
                             doc="""
        Size relative to the supplied overall fig_inches in percent.""")

    initial_hooks = param.HookList(default=[],
                                   doc="""
        Optional list of hooks called before plotting the data onto
        the axis. The hook is passed the plot object and the displayed
        object, other plotting handles can be accessed via plot.handles.""")

    final_hooks = param.HookList(default=[],
                                 doc="""
        Optional list of hooks called when finalizing an axis.
        The hook is passed the plot object and the displayed
        object, other plotting handles can be accessed via plot.handles.""")

    finalize_hooks = param.HookList(default=[],
                                    doc="""
        Optional list of hooks called when finalizing an axis.
        The hook is passed the plot object and the displayed
        object, other plotting handles can be accessed via plot.handles.""")

    sublabel_format = param.String(default=None,
                                   allow_None=True,
                                   doc="""
        Allows labeling the subaxes in each plot with various formatters
        including {Alpha}, {alpha}, {numeric} and {roman}.""")

    sublabel_position = param.NumericTuple(default=(-0.35, 0.85),
                                           doc="""
         Position relative to the plot for placing the optional subfigure label."""
                                           )

    sublabel_size = param.Number(default=18,
                                 doc="""
         Size of optional subfigure label.""")

    projection = param.Parameter(default=None,
                                 doc="""
        The projection of the plot axis, default of None is equivalent to
        2D plot, '3d' and 'polar' are also supported by matplotlib by default.
        May also supply a custom projection that is either a matplotlib
        projection type or implements the `_as_mpl_axes` method.""")

    show_frame = param.Boolean(default=True,
                               doc="""
        Whether or not to show a complete frame around the plot.""")

    _close_figures = True

    def __init__(self, fig=None, axis=None, **params):
        self._create_fig = True
        super(MPLPlot, self).__init__(**params)
        # List of handles to matplotlib objects for animation update
        scale = self.fig_size / 100.
        if isinstance(self.fig_inches, (tuple, list)):
            self.fig_inches = [
                None if i is None else i * scale for i in self.fig_inches
            ]
        else:
            self.fig_inches *= scale
        fig, axis = self._init_axis(fig, axis)
        self.handles['fig'] = fig
        self.handles['axis'] = axis

        if self.final_hooks and self.finalize_hooks:
            self.warning('Set either final_hooks or deprecated '
                         'finalize_hooks, not both.')
        self.finalize_hooks = self.final_hooks
        self.handles['bbox_extra_artists'] = []

    def _init_axis(self, fig, axis):
        """
        Return an axis which may need to be initialized from
        a new figure.
        """
        if not fig and self._create_fig:
            rc_params = self.fig_rcparams
            if self.fig_latex:
                rc_params['text.usetex'] = True
            with mpl.rc_context(rc=rc_params):
                fig = plt.figure()
                l, b, r, t = self.fig_bounds
                inches = self.fig_inches
                fig.subplots_adjust(left=l, bottom=b, right=r, top=t)
                fig.patch.set_alpha(self.fig_alpha)
                if isinstance(inches, (tuple, list)):
                    inches = list(inches)
                    if inches[0] is None:
                        inches[0] = inches[1]
                    elif inches[1] is None:
                        inches[1] = inches[0]
                    fig.set_size_inches(list(inches))
                else:
                    fig.set_size_inches([inches, inches])
                axis = fig.add_subplot(111, projection=self.projection)
                axis.set_aspect('auto')

        return fig, axis

    def _subplot_label(self, axis):
        layout_num = self.layout_num if self.subplot else 1
        if self.sublabel_format and not self.adjoined and layout_num > 0:
            from mpl_toolkits.axes_grid1.anchored_artists import AnchoredText
            labels = {}
            if '{Alpha}' in self.sublabel_format:
                labels['Alpha'] = int_to_alpha(layout_num - 1)
            elif '{alpha}' in self.sublabel_format:
                labels['alpha'] = int_to_alpha(layout_num - 1, upper=False)
            elif '{numeric}' in self.sublabel_format:
                labels['numeric'] = self.layout_num
            elif '{Roman}' in self.sublabel_format:
                labels['Roman'] = int_to_roman(layout_num)
            elif '{roman}' in self.sublabel_format:
                labels['roman'] = int_to_roman(layout_num).lower()
            at = AnchoredText(self.sublabel_format.format(**labels),
                              loc=3,
                              bbox_to_anchor=self.sublabel_position,
                              frameon=False,
                              prop=dict(size=self.sublabel_size,
                                        weight='bold'),
                              bbox_transform=axis.transAxes)
            at.patch.set_visible(False)
            axis.add_artist(at)
            sublabel = at.txt.get_children()[0]
            self.handles['sublabel'] = sublabel
            self.handles['bbox_extra_artists'] += [sublabel]

    def _finalize_axis(self, key):
        """
        General method to finalize the axis and plot.
        """
        if 'title' in self.handles:
            self.handles['title'].set_visible(self.show_title)

        self.drawn = True
        if self.subplot:
            return self.handles['axis']
        else:
            fig = self.handles['fig']
            if not getattr(self, 'overlaid', False) and self._close_figures:
                plt.close(fig)
            return fig

    @property
    def state(self):
        return self.handles['fig']

    def anim(self, start=0, stop=None, fps=30):
        """
        Method to return a matplotlib animation. The start and stop
        frames may be specified as well as the fps.
        """
        figure = self.initialize_plot()
        anim = animation.FuncAnimation(figure,
                                       self.update_frame,
                                       frames=self.keys,
                                       interval=1000.0 / fps)
        # Close the figure handle
        if self._close_figures: plt.close(figure)
        return anim

    def update(self, key):
        rc_params = self.fig_rcparams
        if self.fig_latex:
            rc_params['text.usetex'] = True
        mpl.rcParams.update(rc_params)
        if len(self) == 1 and key == 0 and not self.drawn:
            return self.initialize_plot()
        return self.__getitem__(key)
コード例 #22
0
class SparseCFProjection(CFProjection):
    """
    A projection composed of SparseConnectionFields from a Sheet into
    a ProjectionSheet.

    SparseCFProjection computes its activity using a response_fn which
    can either be an optimized function implemented as part of the
    sparse matrix class or an unoptimized function, which requests the
    weights in dense format.  The initial contents of the
    SparseConnectionFields mapping from the input Sheet into the
    target ProjectionSheet are controlled by the weights_generator,
    cf_shape, and weights_output_fn parameters, while the location of
    the ConnectionField is controlled by the coord_mapper parameter.

    Any subclass has to implement the interface activate(self) that
    computes the response from the input and stores it in the activity
    array.
    """

    cf_type = param.Parameter(default=SparseConnectionField,doc="""
        Type of ConnectionField to use when creating individual CFs.""")

    learning_fn = param.Callable(default=CFPLF_Hebbian_Sparse,doc="""
        Function for computing changes to the weights based on one activation step.""")

    response_fn = param.Callable(default=CFPRF_DotProduct_Sparse,doc="""
        Function for computing the Projection response to an input pattern.""")

    weights_output_fns = param.HookList(default=[CFPOF_DivisiveNormalizeL1_Sparse],doc="""
        Functions applied to each CF after learning.""")

    initialized = param.Boolean(default=False)


    def __init__(self,initialize_cfs=True,**params):
        """
        Initialize the Projection with a set of cf_type objects
        (typically SparseConnectionFields), each located at the
        location in the source sheet corresponding to the unit in the
        target sheet. The cf_type objects are stored in the 'cfs'
        array.

        The nominal_bounds_template specified may be altered: the
        bounds must be fitted to the Sheet's matrix, and the weights
        matrix must have odd dimensions. These altered bounds are
        passed to the individual connection fields.

        A mask for the weights matrix is constructed. The shape is
        specified by cf_shape; the size defaults to the size
        of the nominal_bounds_template.
        """

        super(CFProjection,self).__init__(**params)

        self.weights_generator.set_dynamic_time_fn(None,sublistattr='generators')
        # get the actual bounds_template by adjusting a copy of the
        # nominal_bounds_template to ensure an odd slice, and to be
        # cropped to sheet if necessary
        self._slice_template = Slice(copy(self.nominal_bounds_template),
                                     self.src,force_odd=True,
                                     min_matrix_radius=self.min_matrix_radius)

        self.bounds_template = self._slice_template.compute_bounds(self.src)

        self.mask_template = _create_mask(self.cf_shape,self.bounds_template,
                                         self.src,self.autosize_mask,
                                         self.mask_threshold)

        self.n_units = self._calc_n_units()

        self.activity = np.array(self.dest.activity)
        self.norm_total = np.array(self.dest.activity,dtype=np.float64)
        self.has_norm_total = False

        if initialize_cfs:
            self._create_cfs()

        if self.apply_output_fns_init:
            self.apply_learn_output_fns()

        self.input_buffer = None


    def __getstate__(self):
        """
        Method to support pickling of sparse weights object.
        """

        state_dict = self.__dict__.copy()
        state_dict['triplets'] = state_dict['weights'].getTriplets()
        state_dict['weight_shape'] = (self.src.activity.shape,self.dest.activity.shape)
        del state_dict['weights']
        return state_dict


    def __setstate__(self,state_dict):
        """
        Method to support unpickling of sparse weights object.
        """

        self.__dict__.update(state_dict)
        self.weights = sparse.csarray_float(self.weight_shape[0],self.weight_shape[1])
        rowInds, colInds, values = self.triplets
        self.weights.setTriplets(rowInds,colInds,values)
        del self.triplets
        del self.weight_shape


    def _create_cfs(self):
        """
        Creates the CF objects, initializing the weights one by one
        and adding them to the sparse weights object in chunks.
        """

        vectorized_create_cf = simple_vectorize(self._create_cf)
        self.cfs = vectorized_create_cf(*self._generate_coords())
        self.flatcfs = list(self.cfs.flat)
        self.weights = sparse.csarray_float(self.src.activity.shape,self.dest.activity.shape)

        cf_x,cf_y = self.dest.activity.shape
        src_x,src_y = self.src.activity.shape

        y_array = np.zeros((src_x*src_y*cf_y),dtype=np.int32)
        x_array = np.zeros((src_x*src_y*cf_y),dtype=np.int32)
        val_array = np.zeros((src_x*src_y*cf_y),dtype=np.float32)

        # Iterate over the CFs
        for x in range(cf_x):
            temp_sparse = sparse.csarray_float(self.src.activity.shape,self.dest.activity.shape)
            idx = 0
            for y in range(cf_y):
                cf = self.cfs[x][y]
                label = cf.label + ('-%d' % self.seed if self.seed is not None else '')
                name = "%s_CF (%.5f, %.5f)" % ('' if label is None else label, cf.x,cf.y)
                x1,x2,y1,y2 = cf.input_sheet_slice.tolist()
                if self.same_cf_shape_for_all_cfs:
                    mask_template = self.mask_template
                else:
                    mask_template = _create_mask(self.cf_shape,self.bounds_template,
                                                 self.src,self.autosize_mask,
                                                 self.mask_threshold, name=name)
                weights = self.cfs[x][y]._init_weights(mask_template)
                cn_x,cn_y = weights.shape
                y_val = x * cf_y + y
                for cnx in range(cn_x):
                    val_array[idx:idx+cn_y] = weights[cnx,:]
                    x_val = (x1+cnx) * src_y + y1
                    x_array[idx:idx+cn_y] = range(x_val,x_val+cn_y)
                    y_array[idx:idx+cn_y] = y_val
                    idx += cn_y
            nnz_idx = val_array.nonzero()
            temp_sparse.setTriplets(x_array[nnz_idx],y_array[nnz_idx],val_array[nnz_idx])
            self.weights += temp_sparse
            x_array *= 0; y_array *= 0; val_array *= 0.0
        del temp_sparse
        self.weights.compress()
        self.debug("Sparse projection %r loaded" % self.name)


    def _create_cf(self,x,y):
        """
        Create a ConnectionField at x,y in the src sheet.
        """

        label = self.hash_format.format(name=self.name,
                                        src=self.src.name,
                                        dest=self.dest.name)
        try:
            CF = self.cf_type(template=self._slice_template,
                              projection=self,input_sheet=self.src,x=x,y=y,
                              weights_generator=self.weights_generator,
                              min_matrix_radius=self.min_matrix_radius,
                              label=label)
        except NullCFError:
            if self.allow_null_cfs:
                CF = None
            else:
                raise

        return CF

    def get_sheet_mask(self):
        return np.ones(self.activity.shape, dtype=self.activity.dtype)

    def get_active_units_mask(self):
        return np.ones(self.activity.shape, dtype=self.activity.dtype)


    def activate(self,input_activity):
        """Activate using the specified response_fn and output_fn."""
        if self.input_fns:
            input_activity = input_activity.copy()
        for iaf in self.input_fns:
            iaf(input_activity)
        self.input_buffer = input_activity
        self.activity *=0.0
        self.response_fn(self)
        for of in self.output_fns:
            of(self.activity)


    def learn(self):
        """
        For a SparseCFProjection, learn consists of calling the learning_fn.
        """
        # Learning is performed if the input_buffer has already been set,
        # i.e. there is an input to the Projection.
        if self.input_buffer is not None:
            self.learning_fn(self)


    def apply_learn_output_fns(self,active_units_mask=True):
        """
        Apply the weights_output_fns to each unit.
        """
        for of in self.weights_output_fns: of(self)


    def n_bytes(self):
        """
        Estimates the size on the basis of the number non-zeros in the
        sparse matrix, asssuming indices and values are stored using
        32-bit integers and floats respectively.
        """
        return self.n_conns() * (3 * 4)


    def n_conns(self):
        """
        Returns number of nonzero weights.
        """
        return self.weights.getnnz()
コード例 #23
0
ファイル: tabular.py プロジェクト: knut0815/holoviews
class TablePlot(BokehPlot, GenericElementPlot):

    height = param.Number(default=None)

    width = param.Number(default=400)

    style_opts = [
        'row_headers', 'selectable', 'editable', 'sortable', 'fit_columns',
        'width', 'height'
    ]

    finalize_hooks = param.HookList(default=[],
                                    doc="""
        Optional list of hooks called when finalizing a column.
        The hook is passed the plot object and the displayed
        object, and other plotting handles can be accessed via plot.handles."""
                                    )

    _update_handles = ['source', 'glyph']

    def __init__(self, element, plot=None, **params):
        super(TablePlot, self).__init__(element, **params)
        self.handles = {} if plot is None else self.handles['plot']
        element_ids = self.hmap.traverse(lambda x: id(x), [Dataset, ItemTable])
        self.static = len(set(element_ids)) == 1 and len(self.keys) == len(
            self.hmap)
        self.callbacks = []  # Callback support on tables not implemented

    def _execute_hooks(self, element):
        """
        Executes finalize hooks
        """
        for hook in self.finalize_hooks:
            try:
                hook(self, element)
            except Exception as e:
                self.warning("Plotting hook %r could not be applied:\n\n %s" %
                             (hook, e))

    def get_data(self, element, ranges, style):
        dims = element.dimensions()
        mapping = {d.name: d.name for d in dims}
        data = {d: element.dimension_values(d) for d in dims}
        data = {
            d.name: values if values.dtype.kind in "if" else list(
                map(d.pprint_value, values))
            for d, values in data.items()
        }
        return data, mapping, style

    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        """
        Initializes a new plot object with the last available frame.
        """
        # Get element key and ranges for frame
        element = self.hmap.last
        key = self.keys[-1]
        self.current_frame = element
        self.current_key = key

        style = self.lookup_options(element, 'style')[self.cyclic_index]
        data, _, style = self.get_data(element, ranges, style)
        if source is None:
            source = self._init_datasource(data)
        self.handles['source'] = source

        dims = element.dimensions()
        columns = [
            TableColumn(field=d.name, title=d.pprint_label) for d in dims
        ]
        if bokeh_version > '0.12.7':
            style['reorderable'] = False
        table = DataTable(source=source,
                          columns=columns,
                          height=self.height,
                          width=self.width,
                          **style)
        self.handles['plot'] = table
        self.handles['glyph_renderer'] = table
        self._execute_hooks(element)
        self.drawn = True

        return table

    @property
    def current_handles(self):
        """
        Returns a list of the plot objects to update.
        """
        handles = []
        if self.static and not self.dynamic:
            return handles

        element = self.current_frame
        for handle in self._update_handles:
            if (handle == 'source' and self.static_source):
                continue
            if handle in self.handles:
                handles.append(self.handles[handle])
        return handles

    def update_frame(self, key, ranges=None, plot=None):
        """
        Updates an existing plot with data corresponding
        to the key.
        """
        element = self._get_frame(key)

        # Cache frame object id to skip updating data if unchanged
        previous_id = self.handles.get('previous_id', None)
        current_id = element._plot_id
        self.handles['previous_id'] = current_id
        self.static_source = (self.dynamic and (current_id == previous_id))

        if self.static_source:
            return
        source = self.handles['source']
        style = self.lookup_options(element, 'style')[self.cyclic_index]
        data, _, style = self.get_data(element, ranges, style)
        self._update_datasource(source, data)
コード例 #24
0
class CFProjection(Projection):
    """
    A projection composed of ConnectionFields from a Sheet into a ProjectionSheet.

    CFProjection computes its activity using a response_fn of type
    CFPResponseFn (typically a CF-aware version of mdot) and output_fns 
    (typically none).  The initial contents of the 
    ConnectionFields mapping from the input Sheet into the target
    ProjectionSheet are controlled by the weights_generator, cf_shape,
    and weights_output_fn parameters, while the location of the
    ConnectionField is controlled by the coord_mapper parameter.

    Any subclass has to implement the interface
    activate(self,input_activity) that computes the response from the
    input and stores it in the activity array.
    """

    response_fn = param.ClassSelector(
        CFPResponseFn,
        default=CFPRF_Plugin(),
        doc=
        'Function for computing the Projection response to an input pattern.')

    cf_type = param.Parameter(
        default=ConnectionField,
        constant=True,
        doc="Type of ConnectionField to use when creating individual CFs.")

    # JPHACKALERT: Not all support for null CFs has been implemented.
    # CF plotting and C-optimized CFPxF_ functions need
    # to be fixed to support null CFs without crashing.
    allow_null_cfs = param.Boolean(
        default=False,
        doc="Whether or not the projection can have entirely empty CFs")

    nominal_bounds_template = BoundingRegionParameter(
        default=BoundingBox(radius=0.1),
        doc="""
        Bounds defining the Sheet area covered by a prototypical ConnectionField.
        The true bounds will differ depending on the density (see create_slice_template())."""
    )

    weights_generator = param.ClassSelector(
        PatternGenerator,
        default=patterngenerator.Constant(),
        constant=True,
        doc="Generate initial weights values.")

    cf_shape = param.ClassSelector(
        PatternGenerator,
        default=patterngenerator.Constant(),
        constant=True,
        doc="Mask pattern to define the shape of the connection fields.")

    same_cf_shape_for_all_cfs = param.Boolean(default=True,
                                              doc="""
        Whether or not to share a single cf_shape mask for all CFs.
        If True, the cf_shape is evaluated only once and shared for
        all CFs, which saves computation time and memory.  If False,
        the cf_shape is evaluated once for each CF, allowing each to
        have its own shape.""")

    learning_fn = param.ClassSelector(
        CFPLearningFn,
        default=CFPLF_Plugin(),
        doc=
        'Function for computing changes to the weights based on one activation step.'
    )

    # JABALERT: Shouldn't learning_rate be owned by the learning_fn?
    learning_rate = param.Number(default=0.0,
                                 softbounds=(0, 100),
                                 doc="""
        Amount of learning at each step for this projection, specified
        in units that are independent of the density of each Sheet.""")

    weights_output_fns = param.HookList(
        default=[CFPOF_Plugin()],
        class_=CFPOutputFn,
        doc='Functions applied to each CF after learning.')

    strength = param.Number(default=1.0,
                            doc="""
        Global multiplicative scaling applied to the Activity of this Sheet."""
                            )

    coord_mapper = param.ClassSelector(
        CoordinateMapperFn,
        default=IdentityMF(),
        doc='Function to map a projected coordinate into the target sheet.')

    # CEBALERT: this is temporary (allows c++ matching in certain
    # cases).  We will allow the user to override the mask size, but
    # by offering a scaling parameter.
    autosize_mask = param.Boolean(default=True,
                                  constant=True,
                                  precedence=-1,
                                  doc="""
        Topographica sets the mask size so that it is the same as the connection field's
        size, unless this parameter is False - in which case the user-specified size of
        the cf_shape is used. In normal usage of Topographica, this parameter should
        remain True.""")

    mask_threshold = param.Number(default=0.5,
                                  constant=True,
                                  doc="""
        If a unit is above this value in the cf_shape mask, it is
        included; otherwise it is excluded from the mask.""")

    apply_output_fns_init = param.Boolean(default=True,
                                          doc="""
        Whether to apply the output function to connection fields (e.g. for 
        normalization) when the CFs are first created.""")

    min_matrix_radius = param.Integer(default=1,
                                      bounds=(0, None),
                                      doc="""
        Enforced minimum for radius of weights matrix.
        The default of 1 gives a minimum matrix of 3x3. 0 would
        allow a 1x1 matrix.""")

    precedence = param.Number(default=0.8)

    def __init__(self, initialize_cfs=True, **params):
        """
        Initialize the Projection with a set of cf_type objects
        (typically ConnectionFields), each located at the location
        in the source sheet corresponding to the unit in the target
        sheet. The cf_type objects are stored in the 'cfs' array.

        The nominal_bounds_template specified may be altered: the
        bounds must be fitted to the Sheet's matrix, and the weights
        matrix must have odd dimensions. These altered bounds are
        passed to the individual connection fields.

        A mask for the weights matrix is constructed. The shape is
        specified by cf_shape; the size defaults to the size
        of the nominal_bounds_template.
        """
        super(CFProjection, self).__init__(**params)

        self.weights_generator.set_dynamic_time_fn(None,
                                                   sublistattr='generators')
        # get the actual bounds_template by adjusting a copy of the
        # nominal_bounds_template to ensure an odd slice, and to be
        # cropped to sheet if necessary
        self._slice_template = Slice(copy(self.nominal_bounds_template),
                                     self.src,
                                     force_odd=True,
                                     min_matrix_radius=self.min_matrix_radius)

        self.bounds_template = self._slice_template.compute_bounds(self.src)

        self.mask_template = _create_mask(self.cf_shape, self.bounds_template,
                                          self.src, self.autosize_mask,
                                          self.mask_threshold)

        self.n_units = self._calc_n_units()

        if initialize_cfs:
            self._create_cfs()

        ### JCALERT! We might want to change the default value of the
        ### input value to self.src.activity; but it fails, raising a
        ### type error. It probably has to be clarified why this is
        ### happening
        self.input_buffer = None
        self.activity = array(self.dest.activity)

    def _generate_coords(self):
        X, Y = self.dest.sheetcoords_of_idx_grid()
        vectorized_coord_mapper = simple_vectorize(
            self.coord_mapper,
            num_outputs=2,
            # CB: could switch to float32?
            output_type=float)
        return vectorized_coord_mapper(X, Y)

    # CB: should be _initialize_cfs() since we already have 'initialize_cfs' flag?
    def _create_cfs(self):
        vectorized_create_cf = simple_vectorize(self._create_cf)
        #:
        self.cfs = vectorized_create_cf(*self._generate_coords())
        self.flatcfs = list(self.cfs.flat)

    def _create_cf(self, x, y):
        """
        Create a ConnectionField at x,y in the src sheet.
        """
        # (to restore would need to have an r,c counter)
        # self.debug("Creating CF(%d,%d) from src (%.3f,%.3f) to  dest (%.3f,%.3f)"%(r,c,x_cf,y_cf,x,y))

        try:
            if self.apply_output_fns_init:
                ofs = [wof.single_cf_fn for wof in self.weights_output_fns]
            else:
                ofs = []

            if self.same_cf_shape_for_all_cfs:
                mask_template = self.mask_template
            else:
                mask_template = _create_mask(self.cf_shape,
                                             self.bounds_template, self.src,
                                             self.autosize_mask,
                                             self.mask_threshold)

            CF = self.cf_type(self.src,
                              x=x,
                              y=y,
                              template=self._slice_template,
                              weights_generator=self.weights_generator,
                              mask=mask_template,
                              output_fns=ofs,
                              min_matrix_radius=self.min_matrix_radius)
        except NullCFError:
            if self.allow_null_cfs:
                CF = None
            else:
                raise

        return CF

    def _calc_n_units(self):
        """Return the number of unmasked units in a typical ConnectionField."""

        return min(
            len(self.mask_template.ravel().nonzero()[0]),
            # CEBALERT: if the mask_template is bigger than the
            # src sheet (e.g.  conn radius bigger than src
            # radius), return the size of the source sheet
            self.src.shape[0] * self.src.shape[1])

    def cf(self, r, c):
        """Return the specified ConnectionField"""
        # CB: should we offer convenience cf(x,y) (i.e. sheetcoords) method instead?
        self.warning(
            "CFProjection.cf(r,c) is deprecated: use cfs[r,c] instead")
        return self.cfs[r, c]

    def cf_bounds(self, r, c):
        """Return the bounds of the specified ConnectionField."""
        return self.cfs[r, c].get_bounds(self.src)

    def get_view(self, sheet_x, sheet_y, timestamp):
        """
        Return a single connection field UnitView, for the unit
        located nearest to sheet coordinate (sheet_x,sheet_y).
        """
        matrix_data = zeros(self.src.activity.shape, Float)
        (r, c) = self.dest.sheet2matrixidx(sheet_x, sheet_y)
        r1, r2, c1, c2 = self.cfs[r, c].input_sheet_slice
        matrix_data[r1:r2, c1:c2] = self.cfs[r, c].weights

        # CB: the following would be equivalent with Slice __call__

        # cf = self.cf(self.dest.sheet2matrixidx(sheet_x,sheet_y))
        # matrix_data = numpy.zeros(self.src.activity.shape,Numeric.Float)
        # matrix_data[cf.input_sheet_slice()]=cf.weights

        return UnitView((matrix_data, self.src.bounds), sheet_x, sheet_y, self,
                        timestamp)

    def activate(self, input_activity):
        """Activate using the specified response_fn and output_fn."""
        self.input_buffer = input_activity
        self.activity *= 0.0
        self.response_fn(MaskedCFIter(self), input_activity, self.activity,
                         self.strength)
        for of in self.output_fns:
            of(self.activity)

    # CEBALERT: should add active_units_mask to match
    # apply_learn_output_fns.
    def learn(self):
        """
        For a CFProjection, learn consists of calling the learning_fn.
        """
        # Learning is performed if the input_buffer has already been set,
        # i.e. there is an input to the Projection.
        if self.input_buffer != None:
            self.learning_fn(MaskedCFIter(self), self.input_buffer,
                             self.dest.activity, self.learning_rate)

    # CEBALERT: called 'learn' output fns here, but called 'weights' output fns
    # elsewhere (mostly). Change all to 'learn'?
    def apply_learn_output_fns(self, active_units_mask=True):
        """
        Apply the weights_output_fns to each unit.

        If active_units_mask is True, inactive units will be skipped.
        """
        for of in self.weights_output_fns:
            of(MaskedCFIter(self, active_units_mask=active_units_mask))

    # CEBALERT: see gc alert in simulation.__new__
    def _cleanup(self):
        for cf in self.cfs.flat:
            # cf could be None or maybe something else
            if hasattr(cf, 'input_sheet'):
                cf.input_sheet = None
            if hasattr(cf, 'input_sheet_slice'):
                cf.input_sheet_slice = None
            if hasattr(cf, 'weights_slice'):
                cf.weights_slice = None

    def n_bytes(self):
        # Could also count the input_sheet_slice
        rows, cols = self.cfs.shape
        return super(CFProjection,self).n_bytes() + \
               sum([cf.weights.nbytes +
                    cf.mask.nbytes
                    for cf,i in CFIter(self,ignore_sheet_mask=True)()])

    def n_conns(self):
        # Counts non-masked values, if mask is available; otherwise counts
        # weights as connections if nonzero
        rows, cols = self.cfs.shape
        return sum([
            len((cf.mask
                 if cf.mask is not None else cf.weights).ravel().nonzero()[0])
            for cf, i in MaskedCFIter(self)()
        ])
コード例 #25
0
class PatternSampler(ImageSampler):
    """
    When called, resamples - according to the size_normalization
    parameter - an image at the supplied (x,y) sheet coordinates.

    (x,y) coordinates outside the image are returned as the background
    value.
    """
    whole_pattern_output_fns = param.HookList(class_=TransferFn,default=[],doc="""
        Functions to apply to the whole image before any sampling is done.""")

    background_value_fn = param.Callable(default=None,doc="""
        Function to compute an appropriate background value. Must accept
        an array and return a scalar.""")

    size_normalization = param.ObjectSelector(default='original',
        objects=['original','stretch_to_fit','fit_shortest','fit_longest'],
        doc="""
        Determines how the pattern is scaled initially, relative to the
        default retinal dimension of 1.0 in sheet coordinates:

        'stretch_to_fit': scale both dimensions of the pattern so they
        would fill a Sheet with bounds=BoundingBox(radius=0.5) (disregards
        the original's aspect ratio).

        'fit_shortest': scale the pattern so that its shortest dimension
        is made to fill the corresponding dimension on a Sheet with
        bounds=BoundingBox(radius=0.5) (maintains the original's aspect
        ratio, filling the entire bounding box).

        'fit_longest': scale the pattern so that its longest dimension is
        made to fill the corresponding dimension on a Sheet with
        bounds=BoundingBox(radius=0.5) (maintains the original's
        aspect ratio, fitting the image into the bounding box but not
        necessarily filling it).

        'original': no scaling is applied; each pixel of the pattern
        corresponds to one matrix unit of the Sheet on which the
        pattern being displayed.""")

    def _get_image(self):
        return self.scs.activity

    def _set_image(self,image):
        # Stores a SheetCoordinateSystem with an activity matrix
        # representing the image
        if not isinstance(image,numpy.ndarray):
            image = array(image,Float)

        rows,cols = image.shape
        self.scs = SheetCoordinateSystem(xdensity=1.0,ydensity=1.0,
                                         bounds=BoundingBox(points=((-cols/2.0,-rows/2.0),
                                                                    ( cols/2.0, rows/2.0))))
        self.scs.activity=image

    def _del_image(self):
        self.scs = None


    def __call__(self, image, x, y, sheet_xdensity, sheet_ydensity, width=1.0, height=1.0):
        """
        Return pixels from the supplied image at the given Sheet (x,y)
        coordinates.

        The image is assumed to be a NumPy array or other object that
        exports the NumPy buffer interface (i.e. can be converted to a
        NumPy array by passing it to numpy.array(), e.g. Image.Image).
        The whole_pattern_output_fns are applied to the image before
        any sampling is done.

        To calculate the sample, the image is scaled according to the
        size_normalization parameter, and any supplied width and
        height. sheet_xdensity and sheet_ydensity are the xdensity and
        ydensity of the sheet on which the pattern is to be drawn.
        """
        # CEB: could allow image=None in args and have 'if image is
        # not None: self.image=image' here to avoid re-initializing the
        # image.
        self.image=image

        for wpof in self.whole_pattern_output_fns:
            wpof(self.image)
        if not self.background_value_fn:
            self.background_value = 0.0
        else:
            self.background_value = self.background_value_fn(self.image)

        pattern_rows,pattern_cols = self.image.shape

        if width==0 or height==0 or pattern_cols==0 or pattern_rows==0:
            return ones(x.shape, Float)*self.background_value

        # scale the supplied coordinates to match the pattern being at density=1
        x=x*sheet_xdensity # deliberately don't operate in place (so as not to change supplied x & y)
        y=y*sheet_ydensity

        # scale according to initial pattern size_normalization selected (size_normalization)
        self.__apply_size_normalization(x,y,sheet_xdensity,sheet_ydensity,self.size_normalization)

        # scale according to user-specified width and height
        x/=width
        y/=height

        # now sample pattern at the (r,c) corresponding to the supplied (x,y)
        r,c = self.scs.sheet2matrixidx(x,y)
        # (where(cond,x,y) evaluates x whether cond is True or False)
        r.clip(0,pattern_rows-1,out=r)
        c.clip(0,pattern_cols-1,out=c)
        left,bottom,right,top = self.scs.bounds.lbrt()
        return numpy.where((x>=left) & (x<right) & (y>bottom) & (y<=top),
                           self.image[r,c],
                           self.background_value)


    def __apply_size_normalization(self,x,y,sheet_xdensity,sheet_ydensity,size_normalization):
        pattern_rows,pattern_cols = self.image.shape

        # Instead of an if-test, could have a class of this type of
        # function (c.f. OutputFunctions, etc)...
        if size_normalization=='original':
            return

        elif size_normalization=='stretch_to_fit':
            x_sf,y_sf = pattern_cols/sheet_xdensity, pattern_rows/sheet_ydensity
            x*=x_sf; y*=y_sf

        elif size_normalization=='fit_shortest':
            if pattern_rows<pattern_cols:
                sf = pattern_rows/sheet_ydensity
            else:
                sf = pattern_cols/sheet_xdensity
            x*=sf;y*=sf

        elif size_normalization=='fit_longest':
            if pattern_rows<pattern_cols:
                sf = pattern_cols/sheet_xdensity
            else:
                sf = pattern_rows/sheet_ydensity
            x*=sf;y*=sf
コード例 #26
0
class Annotator(PaneBase):
    """
    An Annotator allows drawing, editing and annotating a specific
    type of element. Each Annotator consists of the `plot` to draw and
    edit the element and the `editor`, which contains a list of tables,
    which make it possible to annotate each object in the element with
    additional properties defined in the `annotations`.
    """

    annotations = param.ClassSelector(default=[],
                                      class_=(dict, list),
                                      doc="""
        Annotations to associate with each object.""")

    default_opts = param.Dict(default={
        'responsive': True,
        'min_height': 400,
        'padding': 0.1,
        'framewise': True
    },
                              doc="""
        Opts to apply to the element.""")

    object = param.ClassSelector(class_=Element,
                                 doc="""
        The Element to edit and annotate.""")

    num_objects = param.Integer(default=None,
                                bounds=(0, None),
                                doc="""
        The maximum number of objects to draw.""")

    table_transforms = param.HookList(default=[],
                                      doc="""
        Transform(s) to apply to element when converting data to Table.
        The functions should accept the Annotator and the transformed
        element as input.""")

    table_opts = param.Dict(default={
        'editable': True,
        'width': 400
    },
                            doc="""
        Opts to apply to the editor table(s).""")

    # Once generic editing tools are merged into bokeh this could
    # include snapshot, restore and clear tools
    _tools = []

    # Allows patching on custom behavior
    _extra_opts = {}

    # Triggers for updates to the table
    _triggers = ['annotations', 'object', 'table_opts']

    # Links between plot and table
    _link_type = DataLink
    _selection_link_type = SelectionLink

    priority = 0.7

    @classmethod
    def applies(cls, obj):
        if 'holoviews' not in sys.modules:
            return False
        return isinstance(obj, cls.param.object.class_)

    @property
    def _element_type(self):
        return self.param.object.class_

    @property
    def _object_name(self):
        return self._element_type.__name__

    def __init__(self, object=None, **params):
        super(Annotator, self).__init__(None, **params)
        self.object = self._process_element(object)
        self._table_row = Row()
        self.editor = Tabs(('%s' % param_name(self.name), self._table_row))
        self.plot = DynamicMap(self._get_plot)
        self.plot.callback.inputs[:] = [self.object]
        self._tables = []
        self._init_stream()
        self._stream.add_subscriber(self._update_object, precedence=0.1)
        self._selection = Selection1D(source=self.plot)
        self._update_table()
        self._update_links()
        self.param.watch(self._update, self._triggers)
        self.layout[:] = [self.plot, self.editor]

    @param.depends('annotations', 'object', 'default_opts')
    def _get_plot(self):
        return self._process_element(self.object)

    def _get_model(self, doc, root=None, parent=None, comm=None):
        return self.layout._get_model(doc, root, parent, comm)

    @preprocess
    def _update(self, event=None):
        if event and event.name == 'object':
            with param.discard_events(self):
                self.object = self._process_element(event.new)
        self._update_table()

    def _update_links(self):
        if hasattr(self, '_link'): self._link.unlink()
        self._link = self._link_type(self.plot, self._table)
        if self._selection_link_type:
            if hasattr(self, '_selection_link'): self._selection_link.unlink()
            self._selection_link = SelectionLink(self.plot, self._table)

    def _update_object(self, data=None):
        with param.discard_events(self):
            self.object = self._stream.element

    def _update_table(self):
        object = self.object
        for transform in self.table_transforms:
            object = transform(object)
        self._table = Table(object, label=param_name(self.name)).opts(
            show_title=False, **self.table_opts)
        self._update_links()
        self._table_row[:] = [self._table]

    def select(self, selector=None):
        return self.layout.select(selector)

    @classmethod
    def compose(cls, *annotators):
        """Composes multiple Annotator instances and elements

        The composed Panel will contain all the elements in the
        supplied Annotators and Tabs containing all editors.

        Args:
            annotators: Annotator objects or elements to compose

        Returns:
            A new Panel consisting of the overlaid plots and tables
        """
        layers, tables = [], []
        for a in annotators:
            if isinstance(a, Annotator):
                layers.append(a.plot)
                tables += a.tables
            elif isinstance(a, Element):
                layers.append(a)
        return Row(Overlay(layers).collate(), Tabs(*tables))

    @property
    def tables(self):
        return list(zip(self.editor._names, self.editor))

    @property
    def selected(self):
        return self.object.iloc[self._selection.index]
コード例 #27
0
class SettlingCFSheet(JointNormalizingCFSheet):
    """
    A JointNormalizingCFSheet implementing the idea of settling.

    Breaks continuous time up into discrete iterations, each
    consisting of a series of activations, up to a fixed number of
    settling steps.  Settling is controlled by the tsettle parameter;
    once that number of settling steps has been reached, an external
    input is required before the sheet will activate again.

    See the LISSOM algorithm (Sirosh and Miikkulainen, Biological
    Cybernetics 71:66-78, 1994) for one example of its usage.
    """

    strict_tsettle = param.Parameter(default = None,doc="""
        If non-None, delay sending output until activation_count reaches this value.""")

    mask_init_time=param.Integer(default=5,bounds=(0,None),doc="""
        Determines when a new mask is initialized in each new iteration.

        The mask is reset whenever new input comes in.  Once the
        activation_count (see tsettle) reaches mask_init_time, the mask
        is initialized to reflect the current activity profile.""")

    tsettle=param.Integer(default=8,bounds=(0,None),doc="""
        Number of times to activate the SettlingCFSheet sheet for each external input event.

        A counter is incremented each time an input is received from any
        source, and once the counter reaches tsettle, the last activation
        step is skipped so that there will not be any further recurrent
        activation.  The next external (i.e., afferent or feedback)
        event will then start the counter over again.""")

    continuous_learning = param.Boolean(default=False, doc="""
        Whether to modify the weights after every settling step.
        If false, waits until settling is completed before doing learning.""")

    precedence = param.Number(0.6)

    post_initialization_weights_output_fns = param.HookList([],doc="""
        If not empty, weights output_fns that will replace the
        existing ones after an initial normalization step.""")

    beginning_of_iteration = param.HookList(default=[],instantiate=False,doc="""
        List of callables to be executed at the beginning of each iteration.""")

    end_of_iteration = param.HookList(default=[],instantiate=False,doc="""
        List of callables to be executed at the end of each iteration.""")


    def __init__(self,**params):
        super(SettlingCFSheet,self).__init__(**params)
        self.__counter_stack=[]
        self.activation_count = 0
        self.new_iteration = True


    def start(self):
        self._normalize_weights(active_units_mask=False)
        if len(self.post_initialization_weights_output_fns)>0:
            for proj in self.in_connections:
                if not isinstance(proj,Projection):
                    self.debug("Skipping non-Projection ")
                else:
                    proj.weights_output_fns=self.post_initialization_weights_output_fns


    def input_event(self,conn,data):
        # On a new afferent input, clear the activity
        if self.new_iteration:
            for f in self.beginning_of_iteration: f()
            self.new_iteration = False
            self.activity *= 0.0
            for proj in self.in_connections:
                proj.activity *= 0.0
            self.mask.reset()
        super(SettlingCFSheet,self).input_event(conn,data)


    ### JABALERT!  There should be some sort of warning when
    ### tsettle times the input delay is larger than the input period.
    ### Right now it seems to do strange things in that case (does it
    ### settle at all after the first iteration?), but of course that
    ### is arguably an error condition anyway (and should thus be
    ### flagged).
    # CEBALERT: there is at least one bug in here for tsettle==0: see
    # CB/JAB email "LISSOM tsettle question", 2010/03/22.
    def process_current_time(self):
        """
        Pass the accumulated stimulation through self.output_fns and
        send it out on the default output port.
        """
        if self.new_input:
            self.new_input = False

            if self.activation_count == self.mask_init_time:
                self.mask.calculate()

            if self.tsettle == 0:
                # Special case: behave just like a CFSheet
                self.activate()
                self.learn()

            elif self.activation_count == self.tsettle:
                # Once we have been activated the required number of times
                # (determined by tsettle), reset various counters, learn
                # if appropriate, and avoid further activation until an
                # external event arrives.
                for f in self.end_of_iteration: f()

                self.activation_count = 0
                self.new_iteration = True # used by input_event when it is called
                if (self.plastic and not self.continuous_learning):
                    self.learn()
            else:
                self.activate()
                self.activation_count += 1
                if (self.plastic and self.continuous_learning):
                   self.learn()


    # print the weights of a unit
    def printwts(self,x,y):
        for proj in self.in_connections:
            print proj.name, x, y
            print proj.cfs[x,y].weights


    def state_push(self,**args):
        super(SettlingCFSheet,self).state_push(**args)
        self.__counter_stack.append((self.activation_count,self.new_iteration))


    def state_pop(self,**args):
        super(SettlingCFSheet,self).state_pop(**args)
        self.activation_count,self.new_iteration=self.__counter_stack.pop()

    def send_output(self,src_port=None,data=None):
        """Send some data out to all connections on the given src_port."""

        out_conns_on_src_port = [conn for conn in self.out_connections
                                 if self._port_match(conn.src_port,[src_port])]

        for conn in out_conns_on_src_port:
            if self.strict_tsettle != None:
               if self.activation_count < self.strict_tsettle:
                   if len(conn.dest_port)>2 and conn.dest_port[2] == 'Afferent':
                       continue
            self.verbose("Sending output on src_port %s via connection %s to %s" %
                         (str(src_port), conn.name, conn.dest.name))
            e=EPConnectionEvent(self.simulation.convert_to_time_type(conn.delay)+self.simulation.time(),conn,data)
            self.simulation.enqueue_event(e)
コード例 #28
0
class annotate(param.ParameterizedFunction):
    """
    The annotate function allows drawing, editing and annotating any
    given Element (if it is supported). The annotate function returns
    a Layout of the editable plot and an Overlay of table(s), which
    allow editing the data of the element. The edited and annotated
    data may be accessed using the element and selected properties.
    """

    annotator = param.Parameter(doc="""The current Annotator instance.""")

    annotations = param.ClassSelector(default=[],
                                      class_=(dict, list),
                                      doc="""
        Annotations to associate with each object.""")

    edit_vertices = param.Boolean(default=True,
                                  doc="""
        Whether to add tool to edit vertices.""")

    num_objects = param.Integer(default=None,
                                bounds=(0, None),
                                doc="""
        The maximum number of objects to draw.""")

    show_vertices = param.Boolean(default=True,
                                  doc="""
        Whether to show vertices when drawing the Path.""")

    table_transforms = param.HookList(default=[],
                                      doc="""
        Transform(s) to apply to element when converting data to Table.
        The functions should accept the Annotator and the transformed
        element as input.""")

    table_opts = param.Dict(default={
        'editable': True,
        'width': 400
    },
                            doc="""
        Opts to apply to the editor table(s).""")

    vertex_annotations = param.ClassSelector(default=[],
                                             class_=(dict, list),
                                             doc="""
        Columns to annotate the Polygons with.""")

    vertex_style = param.Dict(default={'nonselection_alpha': 0.5},
                              doc="""
        Options to apply to vertices during drawing and editing.""")

    _annotator_types = OrderedDict()

    @property
    def annotated(self):
        annotated = self.annotator.object
        if Store.current_backend == 'bokeh':
            return annotated.opts(clone=True, tools=['hover'])

    @property
    def selected(self):
        selected = self.annotator.selected
        if Store.current_backend == 'bokeh':
            return selected.opts(clone=True, tools=['hover'])

    @classmethod
    def compose(cls, *annotators):
        """Composes multiple annotator layouts and elements

        The composed Layout will contain all the elements in the
        supplied annotators and an overlay of all editor tables.

        Args:
            annotators: Annotator layouts or elements to compose

        Returns:
            A new layout consisting of the overlaid plots and tables
        """
        layers = []
        tables = []
        for annotator in annotators:
            if isinstance(annotator, Layout):
                l, ts = annotator
                layers.append(l)
                tables += ts
            elif isinstance(annotator, annotate):
                layers.append(annotator.plot)
                tables += [t[0].object for t in annotator.editor]
            elif isinstance(annotator, Element):
                layers.append(annotator)
            else:
                raise ValueError("Cannot compose %s type with annotators." %
                                 type(annotator).__name__)
        tables = Overlay(tables, group='Annotator').opts(tabs=True)
        return (Overlay(layers).collate() +
                tables).opts(sizing_mode='stretch_width')

    def __call__(self, element, **params):
        for eltype, annotator_type in self._annotator_types.items():
            if isinstance(element, eltype):
                break
            else:
                annotator_type = None
        if annotator_type is None:
            raise ValueError('Annotation of %s element types is not '
                             'supported.' % type(element).__name__)
        self.annotator = annotator_type(element, **params)
        tables = Overlay([t[0].object for t in self.annotator.editor],
                         group='Annotator').opts(tabs=True)
        return (self.annotator.plot + tables).opts(sizing_mode='stretch_width')
コード例 #29
0
ファイル: projection.py プロジェクト: qqkong/topographica
class Projection(EPConnection):
    """
    A projection from a Sheet into a ProjectionSheet.

    Projections are required to support the activate() method, which
    will construct a matrix the same size as the target
    ProjectionSheet, from an input matrix of activity from the source
    Sheet.  Other than that, a Projection may be of any type.
    """
    __abstract = True

    strength = param.Number(default=1.0)

    src_port = param.Parameter(default='Activity')

    dest_port = param.Parameter(default='Activity')

    output_fns = param.HookList(default=[],
                                class_=TransferFn,
                                doc="""
        Function(s) applied to the Projection activity after it is computed."""
                                )

    plastic = param.Boolean(default=True,
                            doc="""
        Whether or not to update the internal state on each call.
        Allows plasticity to be turned off during analysis, and then re-enabled."""
                            )

    activity_group = param.Parameter(default=(0.5, numpy.add),
                                     doc="""
       Grouping and precedence specifier for computing activity from
       Projections.  In a ProjectionSheet, all Projections in the
       same activity_group will be summed, and then the results from
       each group will be combined in the order of the activity_group
       using the operator specified by the activity_operator.  For
       instance, if there are two Projections with
       activity_group==(0.2,numpy.add) and two with
       activity_group==(0.6,numpy.divide), activity
       from the first two will be added together, and the result
       divided by the sum of the second two.""")

    # CEBALERT: precedence should probably be defined at some higher level
    # (and see other classes where it's defined, e.g. Sheet)
    precedence = param.Number(default=0.5)

    def __init__(self, **params):
        super(Projection, self).__init__(**params)
        self.activity = array(self.dest.activity)
        self._plasticity_setting_stack = []

    def activate(self, input_activity):
        """
        Compute an activity matrix for output, based on the specified input_activity.

        Subclasses must override this method to whatever it means to
        calculate activity in that subclass.
        """
        raise NotImplementedError

    def learn(self):
        """
        This function has to be re-implemented by sub-classes, if they wish
        to support learning.
        """
        pass

    def apply_learn_output_fns(self, active_units_mask=True):
        """
        Sub-classes can implement this function if they wish to
        perform an operation after learning has completed, such as
        normalizing weight values across different projections.

        The active_units_mask argument determines whether or not to
        apply the output_fn to non-responding units.
        """
        pass

    def override_plasticity_state(self, new_plasticity_state):
        """
        Temporarily override plasticity of medium and long term internal
        state.

        This function should be implemented by all subclasses so that
        it preserves the ability of the Projection to compute
        activity, i.e. to operate over a short time scale, while
        preventing any lasting changes to the state.

        For instance, if new_plasticity_state is False, in a
        Projection with modifiable connection weights, the values of
        those weights should temporarily be made fixed and unchanging
        after this call.  For a Projection with automatic
        normalization, homeostatic plasticity, or other features that
        depend on a history of events (rather than just the current
        item being processed), changes in those properties would be
        disabled temporarily.  Setting the plasticity state to False
        is useful during analysis operations (e.g. map measurement)
        that would otherwise change the state of the underlying
        network.

        Any process that does not have any lasting state, such as
        those affecting only the current activity level, should not
        be affected by this call.

        By default, this call simply calls override_plasticity_state()
        on the Projection's output_fn, and sets the 'plastic'
        parameter to False.
        """
        self._plasticity_setting_stack.append(self.plastic)
        self.plastic = new_plasticity_state

        for of in self.output_fns:
            if hasattr(of, 'override_plasticity_state'):
                of.override_plasticity_state(new_plasticity_state)

    def restore_plasticity_state(self):
        """
        Restore previous plasticity state of medium and long term
        internal state after a override_plasticity_state call.

        This function should be implemented by all subclasses to
        remove the effect of the most recent override_plasticity_state call,
        e.g. to reenable plasticity of any type that was disabled.
        """
        self.plastic = self._plasticity_setting_stack.pop()

        for of in self.output_fns:
            if hasattr(of, 'restore_plasticity_state'):
                of.restore_plasticity_state()

    def projection_view(self, timestamp=None):
        """Returns the activity in a single projection"""
        if timestamp is None:
            timestamp = self.src.simulation.time()
        return SheetView(self.activity.copy(),
                         self.dest.bounds,
                         metadata=AttrDict(
                             proj_src_name=self.src.name,
                             precedence=self.src.precedence,
                             proj_name=self.name,
                             row_precedence=self.src.row_precedence,
                             src_name=self.dest.name,
                             timestamp=timestamp))

    def get_projection_view(self, timestamp):
        self.warning("Deprecated, call 'projection_view' method instead.")
        return self.projection_view(timestamp)

    def n_bytes(self):
        """
        Estimate the memory bytes taken by this Projection.

        By default, counts only the activity array, but subclasses
        should implement this method to include also the bytes taken
        by weight arrays and any similar arrays, as a rough lower
        bound from which memory requirements and memory usage patterns
        can be estimated.
        """
        (rows, cols) = self.activity.shape
        return rows * cols

    def n_conns(self):
        """
        Return the size of this projection, in number of connections.

        Must be implemented by subclasses, if only to declare that no
        connections are stored.
        """
        raise NotImplementedError
コード例 #30
0
class PatternGenerator(param.Parameterized):
    """
    A class hierarchy for callable objects that can generate 2D patterns.

    Once initialized, PatternGenerators can be called to generate a
    value or a matrix of values from a 2D function, typically
    accepting at least x and y.

    A PatternGenerator's Parameters can make use of Parameter's
    precedence attribute to specify the order in which they should
    appear, e.g. in a GUI. The precedence attribute has a nominal
    range of 0.0 to 1.0, with ordering going from 0.0 (first) to 1.0
    (last), but any value is allowed.

    The orientation and layout of the pattern matrices is defined by
    the SheetCoordinateSystem class, which see.

    Note that not every parameter defined for a PatternGenerator will
    be used by every subclass.  For instance, a Constant pattern will
    ignore the x, y, orientation, and size parameters, because the
    pattern does not vary with any of those parameters.  However,
    those parameters are still defined for all PatternGenerators, even
    Constant patterns, to allow PatternGenerators to be scaled, rotated,
    translated, etc. uniformly.
    """
    __abstract = True

    bounds = BoundingRegionParameter(
        default=BoundingBox(points=((-0.5, -0.5), (0.5, 0.5))),
        precedence=-1,
        doc="BoundingBox of the area in which the pattern is generated.")

    xdensity = param.Number(default=256,
                            bounds=(0, None),
                            precedence=-1,
                            doc="""
        Density (number of samples per 1.0 length) in the x direction.""")

    ydensity = param.Number(default=256,
                            bounds=(0, None),
                            precedence=-1,
                            doc="""
        Density (number of samples per 1.0 length) in the y direction.
        Typically the same as the xdensity.""")

    x = param.Number(default=0.0,
                     softbounds=(-1.0, 1.0),
                     precedence=0.20,
                     doc="""
        X-coordinate location of pattern center.""")

    y = param.Number(default=0.0,
                     softbounds=(-1.0, 1.0),
                     precedence=0.21,
                     doc="""
        Y-coordinate location of pattern center.""")

    z = param.ClassSelector(default=None,
                            precedence=-1,
                            class_=Dimension,
                            doc="""
        The Dimension object associated with the z-values generated by
        the PatternGenerator . If None, uses the default set by
        HoloViews.Image.""")

    group = param.String(default='Pattern',
                         precedence=-1,
                         doc="""
       The group name assigned to the returned HoloViews object.""")

    position = param.Composite(attribs=['x', 'y'],
                               precedence=-1,
                               doc="""
        Coordinates of location of pattern center.
        Provides a convenient way to set the x and y parameters together
        as a tuple (x,y), but shares the same actual storage as x and y
        (and thus only position OR x and y need to be specified).""")

    orientation = param.Number(default=0.0,
                               softbounds=(0.0, 2 * pi),
                               precedence=0.40,
                               doc="""
        Polar angle of pattern, i.e., the orientation in the Cartesian coordinate
        system, with zero at 3 o'clock and increasing counterclockwise.""")

    size = param.Number(default=1.0,
                        bounds=(0.0, None),
                        softbounds=(0.0, 6.0),
                        precedence=0.30,
                        doc="""Determines the overall size of the pattern.""")

    scale = param.Number(default=1.0,
                         softbounds=(0.0, 2.0),
                         precedence=0.10,
                         doc="""
        Multiplicative strength of input pattern, defaulting to 1.0""")

    offset = param.Number(default=0.0,
                          softbounds=(-1.0, 1.0),
                          precedence=0.11,
                          doc="""
        Additive offset to input pattern, defaulting to 0.0""")

    mask = param.Parameter(default=None,
                           precedence=-1,
                           doc="""
        Optional object (expected to be an array) with which to multiply the
        pattern array after it has been created, before any output_fns are
        applied. This can be used to shape the pattern.""")

    # Note that the class type is overridden to PatternGenerator below
    mask_shape = param.ClassSelector(param.Parameterized,
                                     default=None,
                                     precedence=0.06,
                                     doc="""
        Optional PatternGenerator used to construct a mask to be applied to
        the pattern.""")

    output_fns = param.HookList(default=[],
                                precedence=0.08,
                                doc="""
        Optional function(s) to apply to the pattern array after it has been created.
        Can be used for normalization, thresholding, etc.""")

    def __init__(self, **params):
        super(PatternGenerator, self).__init__(**params)
        self.set_matrix_dimensions(self.bounds, self.xdensity, self.ydensity)

    def __call__(self, **params_to_override):
        """
        Call the subclass's 'function' method on a rotated and scaled
        coordinate system.

        Creates and fills an array with the requested pattern.  If
        called without any params, uses the values for the Parameters
        as currently set on the object. Otherwise, any params
        specified override those currently set on the object.
        """
        if 'output_fns' in params_to_override:
            self.warning(
                "Output functions specified through the call method will be ignored."
            )

        p = ParamOverrides(self, params_to_override)

        # CEBERRORALERT: position parameter is not currently
        # supported. We should delete the position parameter or fix
        # this.
        #
        # position=params_to_override.get('position',None) if position
        # is not None: x,y = position

        self._setup_xy(p.bounds, p.xdensity, p.ydensity, p.x, p.y,
                       p.orientation)
        fn_result = self.function(p)
        self._apply_mask(p, fn_result)
        if p.scale != 1.0:
            result = p.scale * fn_result
        else:
            result = fn_result
        if p.offset != 0.0:
            result += p.offset

        for of in p.output_fns:
            of(result)

        return result

    def __getitem__(self, coords):
        value_dims = {}
        if self.num_channels() in [0, 1]:
            raster, data = Image, self()
            value_dims = {
                'value_dimensions': [self.z]
            } if self.z else value_dims
        elif self.num_channels() in [3, 4]:
            raster = RGB
            data = np.dstack(self.channels().values()[1:])

        image = raster(data,
                       bounds=self.bounds,
                       **dict(group=self.group,
                              label=self.__class__.__name__,
                              **value_dims))
        # Works round a bug fixed shortly after HoloViews 1.0.0 release
        return image if isinstance(coords,
                                   slice) else image.__getitem__(coords)

    def channels(self, use_cached=False, **params_to_override):
        """
        Channels() adds a shared interface for single channel and
        multichannel structures.  It will always return an ordered
        dict: its first element is the single channel of the pattern
        (if single-channel) or the channel average (if multichannel);
        the successive elements are the individual channels' arrays
        (key: 0,1,..N-1).
        """
        return collections.OrderedDict(
            {'default': self.__call__(**params_to_override)})

    def num_channels(self):
        """
        Query the number of channels implemented by the
        PatternGenerator. In case of single-channel generators this
        will return 1; in case of multichannel, it will return the
        number of channels (eg, in the case of RGB images it would
        return '3', Red-Green-Blue, even though the OrderedDict
        returned by channels() will have 4 elements -- the 3 channels
        + their average).
        """
        return 1

    def _setup_xy(self, bounds, xdensity, ydensity, x, y, orientation):
        """
        Produce pattern coordinate matrices from the bounds and
        density (or rows and cols), and transforms them according to
        x, y, and orientation.
        """
        self.debug(
            "bounds=%s, xdensity=%s, ydensity=%s, x=%s, y=%s, orientation=%s",
            bounds, xdensity, ydensity, x, y, orientation)
        # Generate vectors representing coordinates at which the pattern
        # will be sampled.

        # CB: note to myself - use slice_._scs if supplied?
        x_points, y_points = SheetCoordinateSystem(
            bounds, xdensity, ydensity).sheetcoordinates_of_matrixidx()

        # Generate matrices of x and y sheet coordinates at which to
        # sample pattern, at the correct orientation
        self.pattern_x, self.pattern_y = self._create_and_rotate_coordinate_arrays(
            x_points - x, y_points - y, orientation)

    def function(self, p):
        """
        Function to draw a pattern that will then be scaled and rotated.

        Instead of implementing __call__ directly, PatternGenerator
        subclasses will typically implement this helper function used
        by __call__, because that way they can let __call__ handle the
        scaling and rotation for them.  Alternatively, __call__ itself
        can be reimplemented entirely by a subclass (e.g. if it does
        not need to do any scaling or rotation), in which case this
        function will be ignored.
        """
        raise NotImplementedError

    def _create_and_rotate_coordinate_arrays(self, x, y, orientation):
        """
        Create pattern matrices from x and y vectors, and rotate them
        to the specified orientation.
        """
        # Using this two-liner requires that x increase from left to
        # right and y decrease from left to right; I don't think it
        # can be rewritten in so little code otherwise - but please
        # prove me wrong.
        pattern_y = np.subtract.outer(
            np.cos(orientation) * y,
            np.sin(orientation) * x)
        pattern_x = np.add.outer(
            np.sin(orientation) * y,
            np.cos(orientation) * x)
        return pattern_x, pattern_y

    def _apply_mask(self, p, mat):
        """Create (if necessary) and apply the mask to the given matrix mat."""
        mask = p.mask
        ms = p.mask_shape
        if ms is not None:
            mask = ms(
                x=p.x + p.size *
                (ms.x * np.cos(p.orientation) - ms.y * np.sin(p.orientation)),
                y=p.y + p.size *
                (ms.x * np.sin(p.orientation) + ms.y * np.cos(p.orientation)),
                orientation=ms.orientation + p.orientation,
                size=ms.size * p.size,
                bounds=p.bounds,
                ydensity=p.ydensity,
                xdensity=p.xdensity)
        if mask is not None:
            mat *= mask

    def set_matrix_dimensions(self, bounds, xdensity, ydensity):
        """
        Change the dimensions of the matrix into which the pattern
        will be drawn.  Users of this class should call this method
        rather than changing the bounds, xdensity, and ydensity
        parameters directly.  Subclasses can override this method to
        update any internal data structures that may depend on the
        matrix dimensions.
        """
        self.bounds = bounds
        self.xdensity = xdensity
        self.ydensity = ydensity
        scs = SheetCoordinateSystem(bounds, xdensity, ydensity)
        for of in self.output_fns:
            if isinstance(of, TransferFn):
                of.initialize(SCS=scs, shape=scs.shape)

    def state_push(self):
        "Save the state of the output functions, to be restored with state_pop."
        for of in self.output_fns:
            if hasattr(of, 'state_push'):
                of.state_push()
        super(PatternGenerator, self).state_push()

    def state_pop(self):
        "Restore the state of the output functions saved by state_push."
        for of in self.output_fns:
            if hasattr(of, 'state_pop'):
                of.state_pop()
        super(PatternGenerator, self).state_pop()

    def anim(self,
             duration,
             offset=0,
             timestep=1,
             label=None,
             unit=None,
             time_fn=param.Dynamic.time_fn):
        """
        duration: The temporal duration to animate in the units
        defined on the global time function.

        offset: The temporal offset from which the animation is
        generated given the supplied pattern

        timestep: The time interval between successive frames. The
        duration must be an exact multiple of the timestep.

        label: A label string to override the label of the global time
        function (if not None).

        unit: The unit string to override the unit value of the global
        time function (if not None).

        time_fn: The global time function object that is shared across
        the time-varying objects that are being sampled.

        Note that the offset, timestep and time_fn only affect
        patterns parameterized by time-dependent number
        generators. Otherwise, the frames are generated by successive
        call to the pattern which may or may not be varying (e.g to
        view the patterns contained within a Selector).
        """
        frames = (duration // timestep) + 1
        if duration % timestep != 0:
            raise ValueError(
                "The duration value must be an exact multiple of the timestep."
            )

        if label is None:
            label = time_fn.label if hasattr(time_fn, 'label') else 'Time'

        unit = time_fn.unit if (not unit
                                and hasattr(time_fn, 'unit')) else unit
        vmap = HoloMap(
            key_dimensions=[Dimension(label, unit=unit if unit else '')])

        self.state_push()
        with time_fn as t:
            t(offset)
            for i in range(frames):
                vmap[t()] = self[:]
                t += timestep
        self.state_pop()
        return vmap

    ## Support for compositional expressions of PatternGenerator objects
    def _promote(self, other):
        if not isinstance(other, PatternGenerator):
            other = Constant(scale=other, offset=0)
        return [self, other]

    def _rpromote(self, other):
        if not isinstance(other, PatternGenerator):
            other = Constant(scale=other, offset=0)
        return [other, self]

    # Could define any of Python's operators here, esp. if they have operator or ufunc equivalents
    def __add__(self, other):
        return Composite(generators=self._promote(other), operator=np.add)

    def __sub__(self, other):
        return Composite(generators=self._promote(other), operator=np.subtract)

    def __mul__(self, other):
        return Composite(generators=self._promote(other), operator=np.multiply)

    def __mod__(self, other):
        return Composite(generators=self._promote(other), operator=np.mod)

    def __pow__(self, other):
        return Composite(generators=self._promote(other), operator=np.power)

    def __div__(self, other):
        return Composite(generators=self._promote(other), operator=np.divide)

    def __and__(self, other):
        return Composite(generators=self._promote(other), operator=np.minimum)

    def __or__(self, other):
        return Composite(generators=self._promote(other), operator=np.maximum)

    def __radd__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.add)

    def __rsub__(self, other):
        return Composite(generators=self._rpromote(other),
                         operator=np.subtract)

    def __rmul__(self, other):
        return Composite(generators=self._rpromote(other),
                         operator=np.multiply)

    def __rmod__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.mod)

    def __rpow__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.power)

    def __rdiv__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.divide)

    def __rand__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.minimum)

    def __ror__(self, other):
        return Composite(generators=self._rpromote(other), operator=np.maximum)

    def __neg__(self):
        return Composite(generators=[Constant(scale=0), self],
                         operator=np.subtract)

    class abs_first(object):
        @staticmethod
        def reduce(x):
            return np.abs(x[0])

    def __abs__(self):
        return Composite(generators=[self], operator=self.abs_first)

    def pil(self, **params_to_override):
        """Returns a PIL image for this pattern, overriding parameters if provided."""
        from PIL.Image import fromarray
        nchans = self.num_channels()

        if nchans in [0, 1]:
            mode, arr = None, self(**params_to_override)
            arr = (255.0 / arr.max() * (arr - arr.min())).astype(np.uint8)

        elif nchans in [3, 4]:
            mode = 'RGB' if nchans == 3 else 'RGBA'
            arr = np.dstack(self.channels(**params_to_override).values()[1:])
            arr = (255.0 * arr).astype(np.uint8)

        else:
            raise ValueError("Unsupported number of channels")

        return fromarray(arr, mode)