Exemple #1
0
class Dynamic(param.ParameterizedFunction):
    """
    Dynamically applies a callable to the Elements in any HoloViews
    object. Will return a DynamicMap wrapping the original map object,
    which will lazily evaluate when a key is requested. By default
    Dynamic applies a no-op, making it useful for converting HoloMaps
    to a DynamicMap.

    Any supplied kwargs will be passed to the callable and any streams
    will be instantiated on the returned DynamicMap. If the supplied
    operation is a method on a parameterized object which was
    decorated with parameter dependencies Dynamic will automatically
    create a stream to watch the parameter changes. This default
    behavior may be disabled by setting watch=False.
    """

    operation = param.Callable(default=lambda x: x,
                               doc="""
        Operation or user-defined callable to apply dynamically""")

    kwargs = param.Dict(default={},
                        doc="""
        Keyword arguments passed to the function.""")

    link_inputs = param.Boolean(default=True,
                                doc="""
         If Dynamic is applied to another DynamicMap, determines whether
         linked streams attached to its Callable inputs are
         transferred to the output of the utility.

         For example if the Dynamic utility is applied to a DynamicMap
         with an RangeXY, this switch determines whether the
         corresponding visualization should update this stream with
         range changes originating from the newly generated axes.""")

    shared_data = param.Boolean(default=False,
                                doc="""
        Whether the cloned DynamicMap will share the same cache.""")

    streams = param.List(default=[],
                         doc="""
        List of streams to attach to the returned DynamicMap""")

    def __call__(self, map_obj, **params):
        watch = params.pop('watch', True)
        self.p = param.ParamOverrides(self, params)
        callback = self._dynamic_operation(map_obj)
        streams = self._get_streams(map_obj, watch)
        if isinstance(map_obj, DynamicMap):
            dmap = map_obj.clone(callback=callback,
                                 shared_data=self.p.shared_data,
                                 streams=streams)
            if self.p.shared_data:
                dmap.data = OrderedDict([(k, callback.callable(*k))
                                         for k, v in dmap.data])
        else:
            dmap = self._make_dynamic(map_obj, callback, streams)
        return dmap

    def _get_streams(self, map_obj, watch=True):
        """
        Generates a list of streams to attach to the returned DynamicMap.
        If the input is a DynamicMap any streams that are supplying values
        for the key dimension of the input are inherited. And the list
        of supplied stream classes and instances are processed and
        added to the list.
        """
        streams = []
        op = self.p.operation
        for stream in self.p.streams:
            if inspect.isclass(stream) and issubclass(stream, Stream):
                stream = stream()
            elif not (isinstance(stream, Stream)
                      or util.is_param_method(stream)):
                raise ValueError(
                    'Streams must be Stream classes or instances, found %s type'
                    % type(stream).__name__)
            if isinstance(op, Operation):
                updates = {
                    k: op.p.get(k)
                    for k, v in stream.contents.items()
                    if v is None and k in op.p
                }
                if updates:
                    reverse = {v: k for k, v in stream._rename.items()}
                    stream.update(
                        **{reverse.get(k, k): v
                           for k, v in updates.items()})
            streams.append(stream)

        params = {}
        for k, v in self.p.kwargs.items():
            if 'panel' in sys.modules:
                from panel.widgets.base import Widget
                if isinstance(v, Widget):
                    v = v.param.value
            if isinstance(v, param.Parameter) and isinstance(
                    v.owner, param.Parameterized):
                params[k] = v
        streams += Params.from_params(params)

        # Inherit dimensioned streams
        if isinstance(map_obj, DynamicMap):
            dim_streams = util.dimensioned_streams(map_obj)
            streams = list(util.unique_iterator(streams + dim_streams))

        # If callback is a parameterized method and watch is disabled add as stream
        has_dependencies = (util.is_param_method(op, has_deps=True)
                            or isinstance(op, FunctionType)
                            and hasattr(op, '_dinfo'))
        if has_dependencies and watch:
            streams.append(op)

        # Add any keyword arguments which are parameterized methods
        # with dependencies as streams
        for value in self.p.kwargs.values():
            if util.is_param_method(value, has_deps=True):
                streams.append(value)
            elif isinstance(value, FunctionType) and hasattr(value, '_dinfo'):
                dependencies = list(value._dinfo.get('dependencies', []))
                dependencies += list(value._dinfo.get('kw', {}).values())
                params = [
                    d for d in dependencies if isinstance(d, param.Parameter)
                    and isinstance(d.owner, param.Parameterized)
                ]
                streams.append(Params(parameters=params, watch_only=True))

        valid, invalid = Stream._process_streams(streams)
        if invalid:
            msg = ('The supplied streams list contains objects that '
                   'are not Stream instances: {objs}')
            raise TypeError(
                msg.format(objs=', '.join('%r' % el for el in invalid)))
        return valid

    def _process(self, element, key=None, kwargs={}):
        if util.is_param_method(self.p.operation) and util.get_method_owner(
                self.p.operation) is element:
            return self.p.operation(**kwargs)
        elif isinstance(self.p.operation, Operation):
            kwargs = {
                k: v
                for k, v in kwargs.items() if k in self.p.operation.param
            }
            return self.p.operation.process_element(element, key, **kwargs)
        else:
            return self.p.operation(element, **kwargs)

    def _dynamic_operation(self, map_obj):
        """
        Generate function to dynamically apply the operation.
        Wraps an existing HoloMap or DynamicMap.
        """
        def resolve(key, kwargs):
            if not isinstance(map_obj, HoloMap):
                return key, map_obj
            elif isinstance(map_obj,
                            DynamicMap) and map_obj._posarg_keys and not key:
                key = tuple(kwargs[k] for k in map_obj._posarg_keys)
            return key, map_obj[key]

        def apply(element, *key, **kwargs):
            kwargs = dict(util.resolve_dependent_kwargs(self.p.kwargs),
                          **kwargs)
            return self._process(element, key, kwargs)

        def dynamic_operation(*key, **kwargs):
            key, obj = resolve(key, kwargs)
            return apply(obj, *key, **kwargs)

        if isinstance(self.p.operation, Operation):
            return OperationCallable(dynamic_operation,
                                     inputs=[map_obj],
                                     link_inputs=self.p.link_inputs,
                                     operation=self.p.operation)
        else:
            return Callable(dynamic_operation,
                            inputs=[map_obj],
                            link_inputs=self.p.link_inputs,
                            operation=apply)

    def _make_dynamic(self, hmap, dynamic_fn, streams):
        """
        Accepts a HoloMap and a dynamic callback function creating
        an equivalent DynamicMap from the HoloMap.
        """
        if isinstance(hmap, ViewableElement):
            return DynamicMap(dynamic_fn, streams=streams)
        dim_values = zip(*hmap.data.keys())
        params = util.get_param_values(hmap)
        kdims = [
            d.clone(values=list(util.unique_iterator(values)))
            for d, values in zip(hmap.kdims, dim_values)
        ]
        return DynamicMap(dynamic_fn,
                          streams=streams,
                          **dict(params, kdims=kdims))
Exemple #2
0
class Collator(NdMapping):
    """
    Collator is an NdMapping type which can merge any number
    of HoloViews components with whatever level of nesting
    by inserting the Collators key dimensions on the HoloMaps.
    If the items in the Collator do not contain HoloMaps
    they will be created. Collator also supports filtering
    of Tree structures and dropping of constant dimensions.
    """

    drop = param.List(default=[], doc="""
        List of dimensions to drop when collating data, specified
        as strings.""")

    drop_constant = param.Boolean(default=False, doc="""
        Whether to demote any non-varying key dimensions to
        constant dimensions.""")

    filters = param.List(default=[], doc="""
        List of paths to drop when collating data, specified
        as strings or tuples.""")

    group = param.String(default='Collator')

    progress_bar = param.Parameter(default=None, doc="""
         The progress bar instance used to report progress. Set to
         None to disable progress bars.""")

    merge_type = param.ClassSelector(class_=NdMapping, default=HoloMap,
                                     is_instance=False,instantiate=False)

    value_transform = param.Callable(default=None, doc="""
        If supplied the function will be applied on each Collator
        value during collation. This may be used to apply an operation
        to the data or load references from disk before they are collated
        into a displayable HoloViews object.""")

    vdims = param.List(default=[], doc="""
         Collator operates on HoloViews objects, if vdims are specified
         a value_transform function must also be supplied.""")

    _deep_indexable = False
    _auxiliary_component = False

    _nest_order = {HoloMap: ViewableElement,
                   GridSpace: (HoloMap, CompositeOverlay, ViewableElement),
                   NdLayout: (GridSpace, HoloMap, ViewableElement),
                   NdOverlay: Element}

    def __init__(self, data=None, **params):
        if isinstance(data, Element):
            params = dict(get_param_values(data), **params)
            if 'kdims' not in params:
                params['kdims'] = data.kdims
            if 'vdims' not in params:
                params['vdims'] = data.vdims
            data = data.mapping()
        super(Collator, self).__init__(data, **params)


    def __call__(self):
        """
        Filter each Layout in the Collator with the supplied
        path_filters. If merge is set to True all Layouts are
        merged, otherwise an NdMapping containing all the
        Layouts is returned. Optionally a list of dimensions
        to be ignored can be supplied.
        """
        constant_dims = self.static_dimensions
        ndmapping = NdMapping(kdims=self.kdims)

        num_elements = len(self)
        for idx, (key, data) in enumerate(self.data.items()):
            if isinstance(data, AttrTree):
                data = data.filter(self.filters)
            if len(self.vdims) and self.value_transform:
                vargs = dict(zip(self.dimensions('value', label=True), data))
                data = self.value_transform(vargs)
            if not isinstance(data, Dimensioned):
                raise ValueError("Collator values must be Dimensioned objects "
                                 "before collation.")

            dim_keys = zip(self.kdims, key)
            varying_keys = [(d, k) for d, k in dim_keys if not self.drop_constant or
                            (d not in constant_dims and d not in self.drop)]
            constant_keys = [(d, k) for d, k in dim_keys if d in constant_dims
                             and d not in self.drop and self.drop_constant]
            if varying_keys or constant_keys:
                data = self._add_dimensions(data, varying_keys,
                                            dict(constant_keys))
            ndmapping[key] = data
            if self.progress_bar is not None:
                self.progress_bar(float(idx+1)/num_elements*100)

        components = ndmapping.values()
        accumulator = ndmapping.last.clone(components[0].data)
        for component in components:
            accumulator.update(component)
        return accumulator


    @property
    def static_dimensions(self):
        """
        Return all constant dimensions.
        """
        dimensions = []
        for dim in self.kdims:
            if len(set(self.dimension_values(dim.name))) == 1:
                dimensions.append(dim)
        return dimensions


    def _add_dimensions(self, item, dims, constant_keys):
        """
        Recursively descend through an Layout and NdMapping objects
        in order to add the supplied dimension values to all contained
        HoloMaps.
        """
        if isinstance(item, Layout):
            item.fixed = False

        dim_vals = [(dim, val) for dim, val in dims[::-1]
                    if dim not in self.drop]
        if isinstance(item, self.merge_type):
            new_item = item.clone(cdims=constant_keys)
            for dim, val in dim_vals:
                dim = dim if isinstance(dim, Dimension) else Dimension(dim)
                if dim not in new_item.kdims:
                    new_item = new_item.add_dimension(dim, 0, val)
        elif isinstance(item, self._nest_order[self.merge_type]):
            if len(dim_vals):
                dimensions, key = zip(*dim_vals)
                new_item = self.merge_type({key: item}, kdims=list(dimensions),
                                           cdims=constant_keys)
            else:
                new_item = item
        else:
            new_item = item.clone(shared_data=False, cdims=constant_keys)
            for k, v in item.items():
                new_item[k] = self._add_dimensions(v, dims[::-1], constant_keys)
        if isinstance(new_item, Layout):
            new_item.fixed = True

        return new_item
Exemple #3
0
class Histogram(Element2D):
    """
    Histogram contains a number of bins, which are defined by the
    upper and lower bounds of their edges and the computed bin values.
    """

    kdims = param.List(default=[Dimension('x')],
                       bounds=(1, 1),
                       doc="""
        Dimensions on Element2Ds determine the number of indexable
        dimensions.""")

    group = param.String(default='Histogram', constant=True)

    vdims = param.List(default=[Dimension('Frequency')], bounds=(1, 1))

    def __init__(self, values, edges=None, **params):
        self.values, self.edges, settings = self._process_data(values, edges)
        settings.update(params)
        super(Histogram, self).__init__((self.values, self.edges), **settings)

    def __getitem__(self, key):
        """
        Implements slicing or indexing of the Histogram
        """
        if key in self.dimensions(): return self.dimension_values(key)
        if key is () or key is Ellipsis:
            return self  # May no longer be necessary
        key = util.process_ellipses(self, key)
        if not isinstance(key, tuple): pass
        elif len(key) == self.ndims + 1:
            if key[-1] != slice(None) and (key[-1] not in self.vdims):
                raise KeyError("%r is the only selectable value dimension" %
                               self.vdims[0].name)
            key = key[0]
        elif len(key) == self.ndims + 1:
            key = key[0]
        else:
            raise KeyError("Histogram cannot slice more than %d dimension." %
                           len(self.kdims) + 1)

        centers = [(float(l) + r) / 2
                   for (l, r) in zip(self.edges, self.edges[1:])]
        if isinstance(key, slice):
            start, stop = key.start, key.stop
            if [start, stop] == [None, None]: return self
            start_idx, stop_idx = None, None
            if start is not None:
                start_idx = np.digitize([start], centers, right=True)[0]
            if stop is not None:
                stop_idx = np.digitize([stop], centers, right=True)[0]

            slice_end = stop_idx + 1 if stop_idx is not None else None
            slice_values = self.values[start_idx:stop_idx]
            slice_edges = self.edges[start_idx:slice_end]

            extents = (min(slice_edges), self.extents[1], max(slice_edges),
                       self.extents[3])
            return self.clone((slice_values, slice_edges), extents=extents)
        else:
            if not (self.edges.min() <= key < self.edges.max()):
                raise KeyError("Key value %s is out of the histogram bounds" %
                               key)
            idx = np.digitize([key], self.edges)[0]
            return self.values[idx - 1 if idx > 0 else idx]

    def _process_data(self, values, edges):
        """
        Ensure that edges are specified as left and right edges of the
        histogram bins rather than bin centers.
        """
        settings = {}
        (values, edges) = values if isinstance(values, tuple) else (values,
                                                                    edges)
        if isinstance(values, Chart):
            settings = dict(values.get_param_values(onlychanged=True))
            edges = values.dimension_values(0)
            values = values.dimension_values(1)
        elif isinstance(values, np.ndarray) and len(values.shape) == 2:
            edges = values[:, 0]
            values = values[:, 1]
        elif all(isinstance(el, tuple) for el in values):
            edges, values = zip(*values)
        else:
            values = np.array(values)
            if edges is None:
                edges = np.arange(len(values), dtype=np.float)
            else:
                edges = np.array(edges, dtype=np.float)

        if len(edges) == len(values):
            edges = compute_edges(edges)
        return values, edges, settings

    def range(self, dimension, data_range=True):
        if self.get_dimension_index(dimension) == 0 and data_range:
            dim = self.get_dimension(dimension)
            lower, upper = np.min(self.edges), np.max(self.edges)
            lower, upper = util.max_range([(lower, upper), dim.soft_range])
            dmin, dmax = dim.range
            lower = lower if dmin is None or not np.isfinite(dmin) else dmin
            upper = upper if dmax is None or not np.isfinite(dmax) else dmax
            return lower, upper
        else:
            return super(Histogram, self).range(dimension, data_range)

    def dimension_values(self, dim):
        dim = self.get_dimension(dim, strict=True).name
        if dim in self.vdims:
            return self.values
        elif dim in self.kdims:
            return np.convolve(self.edges, np.ones((2, )) / 2, mode='valid')
        else:
            return super(Histogram, self).dimension_values(dim)

    def sample(self, samples=[], **sample_values):
        raise NotImplementedError('Cannot sample a Histogram.')

    def reduce(self, dimensions=None, function=None, **reduce_map):
        raise NotImplementedError('Reduction of Histogram not implemented.')
Exemple #4
0
class BaseTable(ReactiveData, Widget):

    editors = param.Dict(default={},
                         doc="""
        Bokeh CellEditor to use for a particular column
        (overrides the default chosen based on the type).""")

    formatters = param.Dict(default={},
                            doc="""
        Bokeh CellFormatter to use for a particular column
        (overrides the default chosen based on the type).""")

    row_height = param.Integer(default=40,
                               doc="""
        The height of each table row.""")

    selection = param.List(default=[],
                           doc="""
        The currently selected rows of the table.""")

    show_index = param.Boolean(default=True,
                               doc="""
        Whether to show the index column.""")

    titles = param.Dict(default={},
                        doc="""
        A mapping from column name to a title to override the name with.""")

    widths = param.ClassSelector(default={},
                                 class_=(dict, int),
                                 doc="""
        A mapping from column name to column width or a fixed column
        width.""")

    value = param.Parameter(default=None)

    _data_params = ['value']

    _manual_params = [
        'formatters', 'editors', 'widths', 'titles', 'value', 'show_index'
    ]

    _rename = {'disabled': 'editable', 'selection': None}

    __abstract = True

    def __init__(self, value=None, **params):
        self._renamed_cols = {}
        self._filters = []
        super().__init__(value=value, **params)

    def _validate(self, event):
        if self.value is None:
            return
        cols = self.value.columns
        if len(cols) != len(cols.drop_duplicates()):
            raise ValueError('Cannot display a pandas.DataFrame with '
                             'duplicate column names.')

    def _process_param_change(self, msg):
        msg = super()._process_param_change(msg)
        if 'editable' in msg:
            msg['editable'] = not msg.pop('editable') and len(
                self.indexes) <= 1
        return msg

    def _get_columns(self):
        if self.value is None:
            return []

        indexes = self.indexes
        col_names = list(self.value.columns)
        if len(indexes) == 1:
            col_names = indexes + col_names
        else:
            col_names = indexes[-1:] + col_names
        df = self.value.reset_index() if len(indexes) > 1 else self.value
        return self._get_column_definitions(col_names, df)

    def _get_column_definitions(self, col_names, df):
        import pandas as pd
        indexes = self.indexes
        columns = []
        for col in col_names:
            if col in df.columns:
                data = df[col]
            else:
                data = df.index

            if isinstance(data, pd.DataFrame):
                raise ValueError("DataFrame contains duplicate column names.")

            col_kwargs = {}
            kind = data.dtype.kind
            if kind == 'i':
                formatter = NumberFormatter()
                editor = IntEditor()
            elif kind == 'b':
                formatter = StringFormatter()
                editor = CheckboxEditor()
            elif kind == 'f':
                formatter = NumberFormatter(format='0,0.0[00000]')
                editor = NumberEditor()
            elif isdatetime(data) or kind == 'M':
                if len(data) and isinstance(data.values[0], dt.date):
                    date_format = '%Y-%m-%d'
                else:
                    date_format = '%Y-%m-%d %H:%M:%S'
                formatter = DateFormatter(format=date_format)
                editor = DateEditor()
            else:
                formatter = StringFormatter()
                editor = StringEditor()

            if col in self.editors and not isinstance(self.editors[col],
                                                      (dict, str)):
                editor = self.editors[col]

            if col in indexes or editor is None:
                editor = CellEditor()

            if col in self.formatters and not isinstance(
                    self.formatters[col], (dict, str)):
                formatter = self.formatters[col]

            if str(col) != col:
                self._renamed_cols[str(col)] = col

            if isinstance(self.widths, int):
                col_kwargs['width'] = self.widths
            elif str(col) in self.widths:
                col_kwargs['width'] = self.widths.get(str(col))
            else:
                col_kwargs['width'] = 0

            title = self.titles.get(col, str(col))
            if col in indexes and len(indexes) > 1 and self.hierarchical:
                title = 'Index: %s' % ' | '.join(indexes)
            column = TableColumn(field=str(col),
                                 title=title,
                                 editor=editor,
                                 formatter=formatter,
                                 **col_kwargs)
            columns.append(column)
        return columns

    def _get_model(self, doc, root=None, parent=None, comm=None):
        source = ColumnDataSource(data=self._data)
        source.selected.indices = self.selection
        model = self._widget_type(**self._get_properties(source))
        if root is None:
            root = model
        self._link_props(model.source, ['data'], doc, root, comm)
        self._link_props(model.source.selected, ['indices'], doc, root, comm)
        self._models[root.ref['id']] = (model, parent)
        return model

    def _update_columns(self, event, model):
        model.columns = self._get_columns()

    def _manual_update(self, events, model, doc, root, parent, comm):
        for event in events:
            if event.type == 'triggered' and self._updating:
                continue
            elif event.name in ('value', 'show_index'):
                self._update_columns(event, model)
                if isinstance(model, DataCube):
                    model.groupings = self._get_groupings()
            elif hasattr(self, '_update_' + event.name):
                getattr(self, '_update_' + event.name)(model)
            else:
                self._update_columns(event, model)

    def _filter_dataframe(
        self,
        df,
    ):
        """
        Filter the DataFrame.

        Parameters
        ----------
        df : DataFrame
           The DataFrame to filter
        query : dict
            A dictionary containing all the query parameters

        Returns
        -------
        DataFrame
            The filtered DataFrame
        """
        filters = []
        for col_name, filt in self._filters:
            if isinstance(filt, (FunctionType, MethodType)):
                df = filt(df)
                continue
            if isinstance(filt, param.Parameter):
                val = getattr(filt.owner, filt.name)
            else:
                val = filt
            column = df[col_name]
            if np.isscalar(val):
                mask = column == val
            elif isinstance(val, (list, set)):
                if not val:
                    continue
                mask = column.isin(val)
            elif isinstance(val, tuple):
                start, end = val
                if start is None and end is None:
                    continue
                elif start is None:
                    mask = column <= end
                elif end is None:
                    mask = column >= start
                else:
                    mask = (column >= start) & (column <= end)
            else:
                raise ValueError(f"'{col_name} filter value not "
                                 "understood. Must be either a scalar, "
                                 "tuple or list.")
            filters.append(mask)
        if filters:
            mask = filters[0]
            for f in filters:
                mask &= f
            df = df[mask]
        return df

    def add_filter(self, filter, column=None):
        """
        Adds a filter to the table which can be a static value or
        dynamic parameter based object which will automatically
        update the table when changed..

        When a static value, widget or parameter is supplied the
        filtering will follow a few well defined behaviors:

          * scalar: Filters by checking for equality
          * tuple: A tuple will be interpreted as range.
          * list: A list will be interpreted as a set of discrete
                  scalars and the filter will check if the values
                  in the column match any of the items in the list.

        Arguments
        ---------
        filter: Widget, param.Parameter or FunctionType
            The value by which to filter the DataFrame along the
            declared column, or a function accepting the DataFrame to
            be filtered and returning a filtered copy of the DataFrame.
        column: str or None
            Column to which the filter will be applied, if the filter
            is a constant value, widget or parameter.

        Raises
        ------
        ValueError: If the filter type is not supported or no column
                    was declared.
        """
        if isinstance(filter, (tuple, list, set)) or np.isscalar(filter):
            deps = []
        elif isinstance(filter, (FunctionType, MethodType)):
            deps = list(filter._dinfo['kw'].values()) if hasattr(
                filter, '_dinfo') else []
        else:
            filter = param_value_if_widget(filter)
            if not isinstance(filter, param.Parameter):
                raise ValueError(f'{type(self).__name__} filter must be '
                                 'a constant value, parameter, widget '
                                 'or function.')
            elif column is None:
                raise ValueError('When filtering with a parameter or '
                                 'widget, a column to filter on must be '
                                 'declared.')
            deps = [filter]
        for dep in deps:
            dep.owner.param.watch(self._update_cds, dep.name)
        self._filters.append((column, filter))
        self._update_cds()

    def remove_filter(self, filter):
        """
        Removes a filter which was previously added.
        """
        self._filters = [(column, filt) for (column, filt) in self._filters
                         if filt is not filter]
        self._update_cds()

    def _get_data(self):
        df = self._filter_dataframe(self.value)
        if df is None:
            return [], {}
        elif len(self.indexes) > 1:
            df = df.reset_index()
        data = ColumnDataSource.from_df(df).items()
        return df, {k if isinstance(k, str) else str(k): v for k, v in data}

    def _update_column(self, column, array):
        self.value[column] = array

    #----------------------------------------------------------------
    # Public API
    #----------------------------------------------------------------

    @property
    def indexes(self):
        import pandas as pd
        if self.value is None or not self.show_index:
            return []
        elif isinstance(self.value.index, pd.MultiIndex):
            return list(self.value.index.names)
        return [self.value.index.name or 'index']

    def stream(self, stream_value, rollover=None, reset_index=True):
        """
        Streams (appends) the `stream_value` provided to the existing
        value in an efficient manner.

        Arguments
        ---------
        stream_value (Union[pd.DataFrame, pd.Series, Dict])
          The new value(s) to append to the existing value.
        rollover: int
           A maximum column size, above which data from the start of
           the column begins to be discarded. If None, then columns
           will continue to grow unbounded.
        reset_index (bool, default=True):
          If True and the stream_value is a DataFrame,
          then its index is reset. Helps to keep the
          index unique and named `index`

        Raises
        ------
        ValueError: Raised if the stream_value is not a supported type.

        Examples
        --------

        Stream a Series to a DataFrame
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> stream_value = pd.Series({"x": 4, "y": "d"})
        >>> tabulator.stream(stream_value)
        >>> tabulator.value.to_dict("list")
        {'x': [1, 2, 4], 'y': ['a', 'b', 'd']}

        Stream a Dataframe to a Dataframe
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> stream_value = pd.DataFrame({"x": [3, 4], "y": ["c", "d"]})
        >>> tabulator.stream(stream_value)
        >>> tabulator.value.to_dict("list")
        {'x': [1, 2, 3, 4], 'y': ['a', 'b', 'c', 'd']}

        Stream a Dictionary row to a DataFrame
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> stream_value = {"x": 4, "y": "d"}
        >>> tabulator.stream(stream_value)
        >>> tabulator.value.to_dict("list")
        {'x': [1, 2, 4], 'y': ['a', 'b', 'd']}

        Stream a Dictionary of Columns to a Dataframe
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> stream_value = {"x": [3, 4], "y": ["c", "d"]}
        >>> tabulator.stream(stream_value)
        >>> tabulator.value.to_dict("list")
        {'x': [1, 2, 3, 4], 'y': ['a', 'b', 'c', 'd']}
        """
        import pandas as pd
        value_index_start = self.value.index.max() + 1
        if isinstance(stream_value, pd.DataFrame):
            if reset_index:
                stream_value = stream_value.reset_index(drop=True)
                stream_value.index += value_index_start
            combined = pd.concat([self.value, stream_value])
            if rollover is not None:
                combined = combined.iloc[-rollover:]
            with param.discard_events(self):
                self.value = combined
            try:
                self._updating = True
                self.param.trigger('value')
            finally:
                self._updating = False
            stream_value = self._filter_dataframe(stream_value)
            try:
                self._updating = True
                self._stream(stream_value, rollover)
            finally:
                self._updating = False
        elif isinstance(stream_value, pd.Series):
            self.value.loc[value_index_start] = stream_value
            if rollover is not None and len(self.value) > rollover:
                with param.discard_events(self):
                    self.value = self.value.iloc[-rollover:]
            stream_value = self._filter_dataframe(self.value.iloc[-1:])
            try:
                self._updating = True
                self._stream(stream_value, rollover)
            finally:
                self._updating = False
        elif isinstance(stream_value, dict):
            if stream_value:
                try:
                    stream_value = pd.DataFrame(stream_value)
                except ValueError:
                    stream_value = pd.Series(stream_value)
                self.stream(stream_value, rollover)
        else:
            raise ValueError(
                "The stream value provided is not a DataFrame, Series or Dict!"
            )

    def patch(self, patch_value):
        """
        Efficiently patches (updates) the existing value with the `patch_value`.

        Arguments
        ---------
        patch_value: (Union[pd.DataFrame, pd.Series, Dict])
          The value(s) to patch the existing value with.

        Raises
        ------
        ValueError: Raised if the patch_value is not a supported type.

        Examples
        --------

        Patch a DataFrame with a Dictionary row.
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> patch_value = {"x": [(0, 3)]}
        >>> tabulator.patch(patch_value)
        >>> tabulator.value.to_dict("list")
        {'x': [3, 2], 'y': ['a', 'b']}

        Patch a Dataframe with a Dictionary of Columns.
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> patch_value = {"x": [(slice(2), (3,4))], "y": [(1,'d')]}
        >>> tabulator.patch(patch_value)
        >>> tabulator.value.to_dict("list")
        {'x': [3, 4], 'y': ['a', 'd']}

        Patch a DataFrame with a Series. Please note the index is used in the update.
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> patch_value = pd.Series({"index": 1, "x": 4, "y": "d"})
        >>> tabulator.patch(patch_value)
        >>> tabulator.value.to_dict("list")
        {'x': [1, 4], 'y': ['a', 'd']}

        Patch a Dataframe with a Dataframe. Please note the index is used in the update.
        >>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
        >>> tabulator = Tabulator(value=value)
        >>> patch_value = pd.DataFrame({"x": [3, 4], "y": ["c", "d"]})
        >>> tabulator.patch(patch_value)
        >>> tabulator.value.to_dict("list")
        {'x': [3, 4], 'y': ['c', 'd']}
        """
        if self.value is None or isinstance(patch_value, dict):
            self._patch(patch_value)
            return

        import pandas as pd
        if not isinstance(self.value, pd.DataFrame):
            raise ValueError(
                f"Patching an object of type {type(self.value).__name__} "
                "is not supported. Please provide a dict.")

        if isinstance(patch_value, pd.DataFrame):
            patch_value_dict = {}
            for column in patch_value.columns:
                patch_value_dict[column] = []
                for index in patch_value.index:
                    patch_value_dict[column].append(
                        (index, patch_value.loc[index, column]))
            self.patch(patch_value_dict)
        elif isinstance(patch_value, pd.Series):
            if "index" in patch_value:  # Series orient is row
                patch_value_dict = {
                    k: [(patch_value["index"], v)]
                    for k, v in patch_value.items()
                }
                patch_value_dict.pop("index")
            else:  # Series orient is column
                patch_value_dict = {
                    patch_value.name:
                    [(index, value) for index, value in patch_value.items()]
                }
            self.patch(patch_value_dict)
        elif isinstance(patch_value, dict):
            for k, v in patch_value.items():
                for update in v:
                    self.value.loc[update[0], k] = update[1]
                self._patch(patch_value)
        else:
            raise ValueError(
                f"Patching with a patch_value of type {type(patch_value).__name__} "
                "is not supported. Please provide a DataFrame, Series or Dict."
            )

    @property
    def selected_dataframe(self):
        """
        Returns a DataFrame of the currently selected rows.
        """
        if not self.selection:
            return self.value
        return self.value.iloc[self.selection]
Exemple #5
0
class TriMesh(Graph):
    """
    A TriMesh represents a mesh of triangles represented as the
    simplices and nodes. The simplices represent a indices into the
    nodes array. The mesh therefore follows a datastructure very
    similar to a graph, with the abstract connectivity between nodes
    stored on the TriMesh element itself, the node positions stored on
    a Nodes element and the concrete paths making up each triangle
    generated when required by accessing the edgepaths.

    Unlike a Graph each simplex is represented as the node indices of
    the three corners of each triangle.
    """

    kdims = param.List(default=['node1', 'node2', 'node3'],
                       bounds=(3, 3),
                       doc="""
        Dimensions declaring the node indices of each triangle.""")

    group = param.String(default='TriMesh', constant=True)

    _node_type = Nodes

    _edge_type = EdgePaths

    def __init__(self, data, kdims=None, vdims=None, **params):
        if isinstance(data, tuple):
            data = data + (None, ) * (3 - len(data))
            edges, nodes, edgepaths = data
        else:
            edges, nodes, edgepaths = data, None, None

        super(TriMesh, self).__init__(edges,
                                      kdims=kdims,
                                      vdims=vdims,
                                      **params)
        if nodes is None:
            if len(self) == 0:
                nodes = []
            else:
                raise ValueError("TriMesh expects both simplices and nodes "
                                 "to be supplied.")

        if isinstance(nodes, self._node_type):
            pass
        elif isinstance(nodes, Points):
            # Add index to make it a valid Nodes object
            nodes = self._node_type(
                Dataset(nodes).add_dimension('index', 2,
                                             np.arange(len(nodes))))
        elif not isinstance(nodes, Dataset) or nodes.ndims in [2, 3]:
            try:
                # Try assuming data contains indices (3 columns)
                nodes = self._node_type(nodes)
            except:
                # Try assuming data contains just coordinates (2 columns)
                try:
                    points = Points(nodes)
                    ds = Dataset(points).add_dimension('index', 2,
                                                       np.arange(len(points)))
                    nodes = self._node_type(ds)
                except:
                    raise ValueError(
                        "Nodes argument could not be interpreted, expected "
                        "data with two or three columns representing the "
                        "x/y positions and optionally the node indices.")
        if edgepaths is not None and not isinstance(edgepaths,
                                                    self._edge_type):
            edgepaths = self._edge_type(edgepaths)

        self._nodes = nodes
        self._edgepaths = edgepaths

    @classmethod
    def from_vertices(cls, points):
        """
        Uses Delauney triangulation to compute triangle simplices for
        each point.
        """
        try:
            from scipy.spatial import Delaunay
        except:
            raise ImportError("Generating triangles from points requires, "
                              "SciPy to be installed.")
        if not isinstance(points, Points):
            points = Points(points)
        tris = Delaunay(points.array([0, 1]))
        return cls((tris.simplices, points))

    @property
    def edgepaths(self):
        """
        Returns the EdgePaths by generating a triangle for each simplex.
        """
        if self._edgepaths:
            return self._edgepaths
        elif not len(self):
            edgepaths = self._edge_type([], kdims=self.nodes.kdims[:2])
            self._edgepaths = edgepaths
            return edgepaths

        simplices = self.array([0, 1, 2]).astype(np.int32)
        pts = self.nodes.array([0, 1]).astype(float)
        empty = np.array([[np.NaN, np.NaN]])
        paths = [
            arr for tri in pts[simplices]
            for arr in (tri[[0, 1, 2, 0], :], empty)
        ][:-1]
        edgepaths = self._edge_type([np.concatenate(paths)],
                                    kdims=self.nodes.kdims[:2])
        self._edgepaths = edgepaths
        return edgepaths

    def select(self, selection_specs=None, **selection):
        """
        Allows selecting data by the slices, sets and scalar values
        along a particular dimension. The indices should be supplied as
        keywords mapping between the selected dimension and
        value. Additionally selection_specs (taking the form of a list
        of type.group.label strings, types or functions) may be
        supplied, which will ensure the selection is only applied if the
        specs match the selected object.
        """
        # Ensure that edgepaths are initialized so they can be selected on
        self.edgepaths
        return super(TriMesh, self).select(selection_specs=None,
                                           selection_mode='nodes',
                                           **selection)
Exemple #6
0
class Tabs(Panel):
    """
    Panel of Viewables to be displayed in separate tabs.
    """

    active = param.Integer(default=0,
                           doc="""
        Number of the currently active tab.""")

    objects = param.List(default=[],
                         doc="""
        The list of child objects that make up the tabs.""")

    height = param.Integer(default=None, bounds=(0, None))

    width = param.Integer(default=None, bounds=(0, None))

    _bokeh_model = BkTabs

    _rename = {'objects': 'tabs'}

    _linked_props = ['active']

    def __init__(self, *items, **params):
        from .pane import panel
        objects = []
        for pane in items:
            if isinstance(pane, tuple):
                name, pane = pane
            elif isinstance(pane, Viewable):
                name = pane.name
            else:
                name = None
            objects.append(panel(pane, name=name, _internal=True))
        super(Tabs, self).__init__(*objects, **params)

    def _get_objects(self, model, old_objects, doc, root, comm=None):
        """
        Returns new child models for the layout while reusing unchanged
        models and cleaning up any dropped objects.
        """
        from .pane import panel
        new_models = []
        for i, pane in enumerate(self.objects):
            pane = panel(pane, _internal=True)
            self.objects[i] = pane
            if pane in old_objects:
                child = pane._models[root.ref['id']]
            else:
                child = pane._get_model(doc, root, model, comm)
            name = pane[0].name if isinstance(
                pane, Panel) and len(pane) == 1 else pane.name
            child = BkPanel(title=name, child=child)
            new_models.append(child)
        return new_models

    def __setitem__(self, index, pane):
        from .pane import panel
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects[index] = panel(pane, name=name, _internal=True)
        self.objects = new_objects

    def append(self, pane):
        from .pane import panel
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects.append(panel(pane, name=name, _internal=True))
        self.objects = new_objects

    def insert(self, index, pane):
        from .pane import panel
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects.insert(index, panel(pane, _internal=True))
        self.objects = new_objects

    def pop(self, index):
        new_objects = list(self.objects)
        if index in new_objects:
            index = new_objects.index(index)
        new_objects.pop(index)
        self.objects = new_objects
Exemple #7
0
class FileBrowser(param.Parameterized):
    """
    """
    path = param.ClassSelector(Path, precedence=-1)
    path_text = param.String(label='', precedence=0.3)
    home = param.Action(lambda self: self.go_home(), label='🏠', precedence=0.1)
    up = param.Action(lambda self: self.move_up(), label='⬆️', precedence=0.2)
    refresh_control = param.Action(lambda self: self.refresh(),
                                   label='🔄',
                                   precedence=0.25)
    callback = param.Action(lambda x: None, label='Select', precedence=0.4)
    file_listing = param.ListSelector(default=[], label='', precedence=0.5)
    patterns = param.List(precedence=-1, default=['*'])
    show_hidden = param.Boolean(default=False,
                                label='Show Hidden Files',
                                precedence=0.35)

    def __init__(self, delayed_init=False, **params):
        self.delayed_init = delayed_init
        super().__init__(**params)
        self._initialize_path()

    def init(self):
        self.delayed_init = False
        self._initialize_path()

    def _initialize_path(self):
        if self.delayed_init:
            return

        if self.path_text:
            self.validate()

        if not self.path:
            self.go_home()
        else:
            self.make_options()

    def _new_path(self, path):
        return Path(path)

    @property
    def controls(self):
        return ['home', 'up', 'refresh_control']

    @property
    def control_styles(self):
        styles = {c: {'width': 25} for c in self.controls}

        styles.update(
            path_text={'width_policy': 'max'},
            callback={
                'width': 100,
                'button_type': 'success'
            },
        )
        return styles

    @property
    def panel(self):
        return pn.Column(
            pn.Param(
                self,
                parameters=self.controls + ['path_text', 'callback'],
                widgets=self.control_styles,
                default_layout=pn.Row,
                width_policy='max',
                show_name=False,
                margin=0,
            ),
            self.param.show_hidden,
            pn.Param(self.param.file_listing,
                     widgets={'file_listing': {
                         'height': 200
                     }},
                     width_policy='max'),
            width_policy='max',
            margin=0,
        )

    @property
    def value(self):
        if self.file_listing:
            return [str(self.path / v) for v in self.file_listing]
        else:
            return [self.path.as_posix()]

    def go_home(self):
        self.path = Path.cwd()

    def move_up(self):
        self.path = self.path.parent

    @param.depends('file_listing', watch=True)
    def move_down(self):
        for filename in self.file_listing:
            fn = self.path / filename
            if fn.is_dir():
                self.path = fn
                self.make_options()
            if self.callback:
                self.callback(True)

    def refresh(self):
        self.file_listing = ['.']

    @param.depends('path_text', watch=True)
    def validate(self):
        """Check that inputted path is valid - set validator accordingly"""
        path = self._new_path(self.path_text)
        if path and path.is_dir():
            self.path = path
        elif path and path.is_file():
            self.path = path.parent
        else:
            log.warning(f'Invalid Directory: {path}')

    @param.depends('path', 'show_hidden', watch=True)
    def make_options(self):
        self.path_text = self.path.as_posix()
        selected = []
        try:
            selected = [
                p.name + '/' for p in self.path.glob('*') if p.is_dir()
            ]
            for pattern in self.patterns:
                selected.extend([
                    p.name for p in self.path.glob(pattern) if not p.is_dir()
                ])
            if not self.show_hidden:
                selected = [p for p in selected if not str(p).startswith('.')]
        except Exception as e:
            log.exception(str(e))

        self.file_listing = []
        self.param.file_listing.objects = sorted(selected)
Exemple #8
0
class sanitize_identifier_fn(param.ParameterizedFunction):
    """
    Sanitizes group/label values for use in AttrTree attribute
    access. Depending on the version parameter, either sanitization
    appropriate for Python 2 (no unicode gn identifiers allowed) or
    Python 3 (some unicode allowed) is used.

    Note that if you are using Python 3, you can switch to version 2
    for compatibility but you cannot enable relaxed sanitization if
    you are using Python 2.

    Special characters are sanitized using their (lowercase) unicode
    name using the unicodedata module. For instance:

    >>> unicodedata.name(u'$').lower()
    'dollar sign'

    As these names are often very long, this parameterized function
    allows filtered, substitions and transforms to help shorten these
    names appropriately.
    """

    version = param.ObjectSelector(sys.version_info.major,
                                   objects=[2, 3],
                                   doc="""
        The sanitization version. If set to 2, more aggresive
        sanitization appropriate for Python 2 is applied. Otherwise,
        if set to 3, more relaxed, Python 3 sanitization is used.""")

    capitalize = param.Boolean(default=True,
                               doc="""
       Whether the first letter should be converted to
       uppercase. Note, this will only be applied to ASCII characters
       in order to make sure paths aren't confused with method
       names.""")

    eliminations = param.List([
        'extended', 'accent', 'small', 'letter', 'sign', 'digit', 'latin',
        'greek', 'arabic-indic', 'with', 'dollar'
    ],
                              doc="""
       Lowercase strings to be eliminated from the unicode names in
       order to shorten the sanitized name ( lowercase). Redundant
       strings should be removed but too much elimination could cause
       two unique strings to map to the same sanitized output.""")

    substitutions = param.Dict(default={
        'circumflex': 'power',
        'asterisk': 'times',
        'solidus': 'over'
    },
                               doc="""
       Lowercase substitutions of substrings in unicode names. For
       instance the ^ character has the name 'circumflex accent' even
       though it is more typically used for exponentiation. Note that
       substitutions occur after filtering and that there should be no
       ordering dependence between substitutions.""")

    transforms = param.List(default=[capitalize_unicode_name],
                            doc="""
       List of string transformation functions to apply after
       filtering and substitution in order to further compress the
       unicode name. For instance, the default capitalize_unicode_name
       function will turn the string "capital delta" into "Delta".""")

    disallowed = param.List(
        default=['trait_names', '_ipython_display_', '_getAttributeNames'],
        doc="""
       An explicit list of name that should not be allowed as
       attribute names on Tree objects.

       By default, prevents IPython from creating an entry called
       Trait_names due to an inconvenient getattr check (during
       tab-completion).""")

    disable_leading_underscore = param.Boolean(default=False,
                                               doc="""
       Whether leading underscores should be allowed to be sanitized
       with the leading prefix.""")

    aliases = param.Dict(default={},
                         doc="""
       A dictionary of aliases mapping long strings to their short,
       sanitized equivalents""")

    prefix = 'A_'

    _lookup_table = param.Dict(default={},
                               doc="""
       Cache of previously computed sanitizations""")

    @param.parameterized.bothmethod
    def add_aliases(self_or_cls, **kwargs):
        """
        Conveniently add new aliases as keyword arguments. For instance
        you can add a new alias with add_aliases(short='Longer string')
        """
        self_or_cls.aliases.update({v: k for k, v in kwargs.items()})

    @param.parameterized.bothmethod
    def remove_aliases(self_or_cls, aliases):
        """
        Remove a list of aliases.
        """
        for k, v in self_or_cls.aliases.items():
            if v in aliases:
                self_or_cls.aliases.pop(k)

    @param.parameterized.bothmethod
    def allowable(self_or_cls, name, disable_leading_underscore=None):
        disabled_reprs = [
            'javascript', 'jpeg', 'json', 'latex', 'latex', 'pdf', 'png',
            'svg', 'markdown'
        ]
        disabled_ = (self_or_cls.disable_leading_underscore
                     if disable_leading_underscore is None else
                     disable_leading_underscore)
        if disabled_ and name.startswith('_'):
            return False
        isrepr = any(('_repr_%s_' % el) == name for el in disabled_reprs)
        return (name not in self_or_cls.disallowed) and not isrepr

    @param.parameterized.bothmethod
    def prefixed(self, identifier, version):
        """
        Whether or not the identifier will be prefixed.
        Strings that require the prefix are generally not recommended.
        """
        invalid_starting = ['Mn', 'Mc', 'Nd', 'Pc']
        if identifier.startswith('_'): return True
        return ((identifier[0] in string.digits) if version == 2 else
                (unicodedata.category(identifier[0]) in invalid_starting))

    @param.parameterized.bothmethod
    def remove_diacritics(self_or_cls, identifier):
        """
        Remove diacritics and accents from the input leaving other
        unicode characters alone."""
        chars = ''
        for c in identifier:
            replacement = unicodedata.normalize('NFKD',
                                                c).encode('ASCII', 'ignore')
            if replacement != '':
                chars += bytes_to_unicode(replacement)
            else:
                chars += c
        return chars

    @param.parameterized.bothmethod
    def shortened_character_name(self_or_cls,
                                 c,
                                 eliminations=[],
                                 substitutions={},
                                 transforms=[]):
        """
        Given a unicode character c, return the shortened unicode name
        (as a list of tokens) by applying the eliminations,
        substitutions and transforms.
        """
        name = unicodedata.name(c).lower()
        # Filtering
        for elim in eliminations:
            name = name.replace(elim, '')
        # Substitition
        for i, o in substitutions.items():
            name = name.replace(i, o)
        for transform in transforms:
            name = transform(name)
        return ' '.join(name.strip().split()).replace(' ',
                                                      '_').replace('-', '_')

    def __call__(self, name, escape=True, version=None):
        if name in [None, '']:
            return name
        elif name in self.aliases:
            return self.aliases[name]
        elif name in self._lookup_table:
            return self._lookup_table[name]
        name = bytes_to_unicode(name)
        version = self.version if version is None else version
        if not self.allowable(name):
            raise AttributeError(
                "String %r is in the disallowed list of attribute names: %r" %
                self.disallowed)

        if version == 2:
            name = self.remove_diacritics(name)
        if self.capitalize and name and name[0] in string.ascii_lowercase:
            name = name[0].upper() + name[1:]

        sanitized = (self.sanitize_py2(name)
                     if version == 2 else self.sanitize_py3(name))
        if self.prefixed(name, version):
            sanitized = self.prefix + sanitized
        self._lookup_table[name] = sanitized
        return sanitized

    def _process_underscores(self, tokens):
        "Strip underscores to make sure the number is correct after join"
        groups = [[str(''.join(el))] if b else list(el)
                  for (b, el) in itertools.groupby(tokens, lambda k: k == '_')]
        flattened = [el for group in groups for el in group]
        processed = []
        for token in flattened:
            if token == '_': continue
            if token.startswith('_'):
                token = str(token[1:])
            if token.endswith('_'):
                token = str(token[:-1])
            processed.append(token)
        return processed

    def sanitize_py2(self, name):
        # This fix works but masks an issue in self.sanitize (py2)
        prefix = '_' if name.startswith('_') else ''
        valid_chars = string.ascii_letters + string.digits + '_'
        return prefix + str('_'.join(
            self.sanitize(name, lambda c: c in valid_chars)))

    def sanitize_py3(self, name):
        if not name.isidentifier():
            return '_'.join(
                self.sanitize(name, lambda c: ('_' + c).isidentifier()))
        else:
            return name

    def sanitize(self, name, valid_fn):
        "Accumulate blocks of hex and separate blocks by underscores"
        invalid = {'\a': 'a', '\b': 'b', '\v': 'v', '\f': 'f', '\r': 'r'}
        for cc in filter(lambda el: el in name, invalid.keys()):
            raise Exception(
                "Please use a raw string or escape control code '\%s'" %
                invalid[cc])
        sanitized, chars = [], ''
        for split in name.split():
            for c in split:
                if valid_fn(c): chars += str(c) if c == '_' else c
                else:
                    short = self.shortened_character_name(
                        c, self.eliminations, self.substitutions,
                        self.transforms)
                    sanitized.extend([chars] if chars else [])
                    if short != '':
                        sanitized.append(short)
                    chars = ''
            if chars:
                sanitized.extend([chars])
                chars = ''
        return self._process_underscores(sanitized +
                                         ([chars] if chars else []))
Exemple #9
0
class SideHistogramPlot(ColorbarPlot, HistogramPlot):

    style_opts = HistogramPlot.style_opts + ['cmap']

    height = param.Integer(default=125, doc="The height of the plot")

    width = param.Integer(default=125, doc="The width of the plot")

    show_title = param.Boolean(default=False,
                               doc="""
        Whether to display the plot title.""")

    default_tools = param.List(
        default=[
            'save', 'pan', 'wheel_zoom', 'box_zoom', 'reset', 'ybox_select'
        ],
        doc="A list of plugin tools to use on the plot.")

    _callback = """
    color_mapper.low = cb_data['geometry']['y0'];
    color_mapper.high = cb_data['geometry']['y1'];
    source.trigger('change')
    main_source.trigger('change')
    """

    def get_data(self, element, ranges, style):
        if self.invert_axes:
            mapping = dict(top='right', bottom='left', left=0, right='top')
        else:
            mapping = dict(top='top', bottom=0, left='left', right='right')

        if self.static_source:
            data = dict(top=[], left=[], right=[])
        else:
            data = dict(top=element.values,
                        left=element.edges[:-1],
                        right=element.edges[1:])

        color_dims = self.adjoined.traverse(
            lambda x: x.handles.get('color_dim'))
        dim = color_dims[0] if color_dims else None
        cmapper = self._get_colormapper(dim, element, {}, {})
        if cmapper and dim in element.dimensions():
            data[
                dim.
                name] = [] if self.static_source else element.dimension_values(
                    dim)
            mapping['fill_color'] = {'field': dim.name, 'transform': cmapper}
        self._get_hover_data(data, element)
        return (data, mapping, style)

    def _init_glyph(self, plot, mapping, properties):
        """
        Returns a Bokeh glyph object.
        """
        ret = super(SideHistogramPlot,
                    self)._init_glyph(plot, mapping, properties)
        if not 'field' in mapping.get('fill_color', {}):
            return ret
        dim = mapping['fill_color']['field']
        sources = self.adjoined.traverse(
            lambda x: (x.handles.get('color_dim'), x.handles.get('source')))
        sources = [src for cdim, src in sources if cdim == dim]
        tools = [
            t for t in self.handles['plot'].tools
            if isinstance(t, BoxSelectTool)
        ]
        if not tools or not sources:
            return
        box_select, main_source = tools[0], sources[0]
        handles = {
            'color_mapper': self.handles['color_mapper'],
            'source': self.handles['source'],
            'main_source': main_source
        }
        if box_select.callback:
            box_select.callback.code += self._callback
            box_select.callback.args.update(handles)
        else:
            box_select.callback = CustomJS(args=handles, code=self._callback)
        return ret
Exemple #10
0
class Param(PaneBase):
    """
    Param panes render a Parameterized class to a set of widgets which
    are linked to the parameter values on the class.
    """

    display_threshold = param.Number(default=0,
                                     precedence=-10,
                                     doc="""
        Parameters with precedence below this value are not displayed.""")

    default_layout = param.ClassSelector(default=Column,
                                         class_=Panel,
                                         is_instance=False)

    default_precedence = param.Number(default=1e-8,
                                      precedence=-10,
                                      doc="""
        Precedence value to use for parameters with no declared
        precedence.  By default, zero predecence is available for
        forcing some parameters to the top of the list, and other
        values above the default_precedence values can be used to sort
        or group parameters arbitrarily.""")

    expand = param.Boolean(default=False,
                           doc="""
        Whether parameterized subobjects are expanded or collapsed on
        instantiation.""")

    expand_button = param.Boolean(default=None,
                                  doc="""
        Whether to add buttons to expand and collapse sub-objects.""")

    expand_layout = param.Parameter(default=Column,
                                    doc="""
        Layout to expand sub-objects into.""")

    height = param.Integer(default=None,
                           bounds=(0, None),
                           doc="""
        Height of widgetbox the parameter widgets are displayed in.""")

    initializer = param.Callable(default=None,
                                 doc="""
        User-supplied function that will be called on initialization,
        usually to update the default Parameter values of the
        underlying parameterized object.""")

    parameters = param.List(default=[],
                            doc="""
        If set this serves as a whitelist of parameters to display on
        the supplied Parameterized object.""")

    show_labels = param.Boolean(default=True,
                                doc="""
        Whether to show labels for each widget""")

    show_name = param.Boolean(default=True,
                              doc="""
        Whether to show the parameterized object's name""")

    width = param.Integer(default=300,
                          allow_None=True,
                          bounds=(0, None),
                          doc="""
        Width of widgetbox the parameter widgets are displayed in.""")

    widgets = param.Dict(doc="""
        Dictionary of widget overrides, mapping from parameter name
        to widget class.""")

    priority = 0.1

    _unpack = True

    _mapping = {
        param.Action: Button,
        param.Boolean: Checkbox,
        param.Color: ColorPicker,
        param.Date: DatetimeInput,
        param.DateRange: DateRangeSlider,
        param.CalendarDateRange: DateRangeSlider,
        param.DataFrame: DataFrame,
        param.Dict: LiteralInputTyped,
        param.FileSelector: SingleFileSelector,
        param.Filename: TextInput,
        param.Foldername: TextInput,
        param.Integer: IntSlider,
        param.List: LiteralInputTyped,
        param.MultiFileSelector: FileSelector,
        param.ListSelector: MultiSelect,
        param.Number: FloatSlider,
        param.ObjectSelector: Select,
        param.Parameter: LiteralInputTyped,
        param.Range: RangeSlider,
        param.Selector: Select,
        param.String: TextInput,
    }

    _rerender_params = []

    def __init__(self, object=None, **params):
        if isinstance(object, param.Parameter):
            if not 'show_name' in params:
                params['show_name'] = False
            params['parameters'] = [object.name]
            object = object.owner
        if isinstance(object, param.parameterized.Parameters):
            object = object.cls if object.self is None else object.self
        if 'parameters' not in params and object is not None:
            params['parameters'] = [p for p in object.param if p != 'name']
        super(Param, self).__init__(object, **params)
        self._updating = []

        # Construct Layout
        kwargs = {
            p: v
            for p, v in self.param.get_param_values()
            if p in Layoutable.param and v is not None
        }
        self._widget_box = self.default_layout(**kwargs)

        layout = self.expand_layout
        if isinstance(layout, Panel):
            self._expand_layout = layout
            self.layout = self._widget_box
        elif isinstance(self._widget_box, layout):
            self.layout = self._expand_layout = self._widget_box
        elif isinstance(layout, type) and issubclass(layout, Panel):
            self.layout = self._expand_layout = layout(self._widget_box,
                                                       **kwargs)
        else:
            raise ValueError(
                'expand_layout expected to be a panel.layout.Panel'
                'type or instance, found %s type.' % type(layout).__name__)
        self.param.watch(self._update_widgets, [
            'object', 'parameters', 'display_threshold', 'expand_button',
            'expand', 'expand_layout', 'widgets', 'show_labels', 'show_name'
        ])
        self._update_widgets()

    def __repr__(self, depth=0):
        cls = type(self).__name__
        obj_cls = type(self.object).__name__
        params = [] if self.object is None else list(self.object.param)
        parameters = [k for k in params if k != 'name']
        params = []
        for p, v in sorted(self.param.get_param_values()):
            if v is self.param[p].default: continue
            elif v is None: continue
            elif isinstance(v, string_types) and v == '': continue
            elif p == 'object' or (p == 'name' and (v.startswith(obj_cls)
                                                    or v.startswith(cls))):
                continue
            elif p == 'parameters' and v == parameters:
                continue
            try:
                params.append('%s=%s' % (p, abbreviated_repr(v)))
            except RuntimeError:
                params.append('%s=%s' % (p, '...'))
        obj = 'None' if self.object is None else '%s' % type(
            self.object).__name__
        template = '{cls}({obj}, {params})' if params else '{cls}({obj})'
        return template.format(cls=cls, params=', '.join(params), obj=obj)

    #----------------------------------------------------------------
    # Callback API
    #----------------------------------------------------------------

    def _synced_params(self):
        ignored_params = ['name', 'default_layout']
        return [p for p in Layoutable.param if p not in ignored_params]

    def _update_widgets(self, *events):
        parameters = []
        for event in sorted(events, key=lambda x: x.name):
            if event.name == 'object':
                if isinstance(event.new, param.parameterized.Parameters):
                    self.object = object.cls if object.self is None else object.self
                    return
                if event.new is None:
                    parameters = None
                else:
                    parameters = [p for p in event.new.param if p != 'name']
            if event.name == 'parameters':
                parameters = None if event.new == [] else event.new

        if parameters != [] and parameters != self.parameters:
            self.parameters = parameters
            return

        for cb in list(self._callbacks):
            if cb.inst in self._widget_box.objects:
                cb.inst.param.unwatch(cb)
                self._callbacks.remove(cb)

        # Construct widgets
        if self.object is None:
            self._widgets = {}
        else:
            self._widgets = self._get_widgets()

        alias = {'_title': 'name'}
        widgets = [
            widget for p, widget in self._widgets.items()
            if (self.object.param[alias.get(p, p)].precedence is None) or (
                self.object.param[alias.get(
                    p, p)].precedence >= self.display_threshold)
        ]
        self._widget_box.objects = widgets
        if not (self.expand_button == False and not self.expand):
            self._link_subobjects()

    def _link_subobjects(self):
        for pname, widget in self._widgets.items():
            widgets = [widget] if isinstance(widget, Widget) else widget
            if not any(
                    is_parameterized(getattr(w, 'value', None)) or any(
                        is_parameterized(o) for o in getattr(w, 'options', []))
                    for w in widgets):
                continue
            if (isinstance(widgets, Row) and isinstance(widgets[1], Toggle)):
                selector, toggle = (widgets[0], widgets[1])
            else:
                selector, toggle = (widget, None)

            def toggle_pane(change, parameter=pname):
                "Adds or removes subpanel from layout"
                parameterized = getattr(self.object, parameter)
                existing = [
                    p for p in self._expand_layout.objects
                    if isinstance(p, Param)
                    and p.object in recursive_parameterized(parameterized)
                ]
                if not change.new:
                    self._expand_layout[:] = [
                        e for e in self._expand_layout.objects
                        if e not in existing
                    ]
                elif change.new:
                    kwargs = {
                        k: v
                        for k, v in self.param.get_param_values()
                        if k not in ['name', 'object', 'parameters']
                    }
                    pane = Param(parameterized,
                                 name=parameterized.name,
                                 **kwargs)
                    if isinstance(self._expand_layout, Tabs):
                        title = self.object.param[pname].label
                        pane = (title, pane)
                    self._expand_layout.append(pane)

            def update_pane(change, parameter=pname):
                "Adds or removes subpanel from layout"
                layout = self._expand_layout
                existing = [
                    p for p in layout.objects
                    if isinstance(p, Param) and p.object is change.old
                ]

                if toggle:
                    toggle.disabled = not is_parameterized(change.new)
                if not existing:
                    return
                elif is_parameterized(change.new):
                    parameterized = change.new
                    kwargs = {
                        k: v
                        for k, v in self.param.get_param_values()
                        if k not in ['name', 'object', 'parameters']
                    }
                    pane = Param(parameterized,
                                 name=parameterized.name,
                                 **kwargs)
                    layout[layout.objects.index(existing[0])] = pane
                else:
                    layout.pop(existing[0])

            watchers = [selector.param.watch(update_pane, 'value')]
            if toggle:
                watchers.append(toggle.param.watch(toggle_pane, 'value'))
            self._callbacks += watchers

            if self.expand:
                if self.expand_button:
                    toggle.value = True
                else:
                    toggle_pane(namedtuple('Change', 'new')(True))

    def widget(self, p_name):
        """Get widget for param_name"""
        p_obj = self.object.param[p_name]
        kw_widget = {}

        if self.widgets is None or p_name not in self.widgets:
            widget_class = self.widget_type(p_obj)
        elif isinstance(self.widgets[p_name], dict):
            if 'type' in self.widgets[p_name]:
                widget_class = self.widgets[p_name].pop('type')
            else:
                widget_class = self.widget_type(p_obj)
            kw_widget = self.widgets[p_name]
        else:
            widget_class = self.widgets[p_name]

        if not self.show_labels and not issubclass(widget_class, _ButtonBase):
            label = ''
        else:
            label = p_obj.label
        kw = dict(disabled=p_obj.constant, name=label)

        value = getattr(self.object, p_name)
        if value is not None:
            kw['value'] = value

        # Update kwargs
        kw.update(kw_widget)

        if hasattr(p_obj, 'get_range'):
            options = p_obj.get_range()
            if not options and value is not None:
                options = [value]
            kw['options'] = options
        if hasattr(p_obj, 'get_soft_bounds'):
            bounds = p_obj.get_soft_bounds()
            if bounds[0] is not None:
                kw['start'] = bounds[0]
            if bounds[1] is not None:
                kw['end'] = bounds[1]
            if ('start' not in kw or 'end' not in kw):
                if isinstance(p_obj, param.Number):
                    widget_class = Spinner
                    if isinstance(p_obj, param.Integer):
                        kw['step'] = 1
                elif not issubclass(widget_class, LiteralInput):
                    widget_class = LiteralInput
            if hasattr(widget_class, 'step') and getattr(p_obj, 'step', None):
                kw['step'] = p_obj.step

        kwargs = {k: v for k, v in kw.items() if k in widget_class.param}

        if isinstance(widget_class, Widget):
            widget = widget_class
        else:
            widget = widget_class(**kwargs)
        widget._param_pane = self

        watchers = self._callbacks
        if isinstance(widget, Toggle):
            pass
        else:

            def link_widget(change):
                if p_name in self._updating:
                    return
                try:
                    self._updating.append(p_name)
                    self.object.param.set_param(**{p_name: change.new})
                finally:
                    self._updating.remove(p_name)

            if isinstance(p_obj, param.Action):

                def action(change):
                    value(self.object)

                watcher = widget.param.watch(action, 'clicks')
            else:
                watcher = widget.param.watch(link_widget, 'value')
            watchers.append(watcher)

            def link(change, watchers=[watcher]):
                updates = {}
                if change.what == 'constant':
                    updates['disabled'] = change.new
                elif change.what == 'precedence':
                    if (change.new < self.display_threshold
                            and widget in self._widget_box.objects):
                        self._widget_box.pop(widget)
                    elif change.new >= self.display_threshold:
                        precedence = lambda k: self.object.param[
                            'name' if k == '_title' else k].precedence
                        params = self._ordered_params
                        if self.show_name:
                            params.insert(0, '_title')
                        widgets = []
                        for k in params:
                            if precedence(k) is None or precedence(
                                    k) >= self.display_threshold:
                                widgets.append(self._widgets[k])
                        self._widget_box.objects = widgets
                    return
                elif change.what == 'objects':
                    updates['options'] = p_obj.get_range()
                elif change.what == 'bounds':
                    start, end = p_obj.get_soft_bounds()
                    updates['start'] = start
                    updates['end'] = end
                elif change.what == 'step':
                    updates['step'] = p_obj.step
                elif change.what == 'label':
                    updates['name'] = p_obj.label
                elif p_name in self._updating:
                    return
                elif isinstance(p_obj, param.Action):
                    prev_watcher = watchers[0]
                    widget.param.unwatch(prev_watcher)

                    def action(event):
                        change.new(self.object)

                    watchers[0] = widget.param.watch(action, 'clicks')
                    idx = self._callbacks.index(prev_watcher)
                    self._callbacks[idx] = watchers[0]
                    return
                else:
                    updates['value'] = change.new

                try:
                    self._updating.append(p_name)
                    widget.param.set_param(**updates)
                finally:
                    self._updating.remove(p_name)

            # Set up links to parameterized object
            watchers.append(self.object.param.watch(link, p_name, 'constant'))
            watchers.append(self.object.param.watch(link, p_name,
                                                    'precedence'))
            watchers.append(self.object.param.watch(link, p_name, 'label'))
            if hasattr(p_obj, 'get_range'):
                watchers.append(
                    self.object.param.watch(link, p_name, 'objects'))
            if hasattr(p_obj, 'get_soft_bounds'):
                watchers.append(self.object.param.watch(
                    link, p_name, 'bounds'))
            if 'step' in kw:
                watchers.append(self.object.param.watch(link, p_name, 'step'))
            watchers.append(self.object.param.watch(link, p_name))

        options = kwargs.get('options', [])
        if isinstance(options, dict):
            options = options.values()
        if ((is_parameterized(value)
             or any(is_parameterized(o) for o in options))
                and (self.expand_button or
                     (self.expand_button is None and not self.expand))):
            widget.margin = (5, 0, 5, 10)
            toggle = Toggle(name='\u22EE',
                            button_type='primary',
                            disabled=not is_parameterized(value),
                            max_height=30,
                            max_width=20,
                            height_policy='fit',
                            align='end',
                            margin=(0, 0, 5, 10))
            widget.width = self._widget_box.width - 60
            return Row(widget, toggle, width_policy='max', margin=0)
        else:
            return widget

    @property
    def _ordered_params(self):
        params = [(p, pobj)
                  for p, pobj in self.object.param.objects('existing').items()
                  if p in self.parameters or p == 'name']
        key_fn = lambda x: x[1].precedence if x[
            1].precedence is not None else self.default_precedence
        sorted_precedence = sorted(params, key=key_fn)
        filtered = [(k, p) for k, p in sorted_precedence]
        groups = itertools.groupby(filtered, key=key_fn)
        # Params preserve definition order in Python 3.6+
        dict_ordered_py3 = (sys.version_info.major == 3
                            and sys.version_info.minor >= 6)
        dict_ordered = dict_ordered_py3 or (sys.version_info.major > 3)
        ordered_groups = [
            list(grp) if dict_ordered else sorted(grp) for (_, grp) in groups
        ]
        ordered_params = [
            el[0] for group in ordered_groups for el in group
            if (el[0] != 'name' or el[0] in self.parameters)
        ]
        return ordered_params

    #----------------------------------------------------------------
    # Model API
    #----------------------------------------------------------------

    def _get_widgets(self):
        """Return name,widget boxes for all parameters (i.e., a property sheet)"""
        # Format name specially
        if self.expand_layout is Tabs:
            widgets = []
        elif self.show_name:
            name = param_name(self.object.name)
            widgets = [('_title', StaticText(value='<b>{0}</b>'.format(name)))]
        else:
            widgets = []
        widgets += [(pname, self.widget(pname))
                    for pname in self._ordered_params]
        return OrderedDict(widgets)

    def _get_model(self, doc, root=None, parent=None, comm=None):
        model = self.layout._get_model(doc, root, parent, comm)
        self._models[root.ref['id']] = (model, parent)
        return model

    def _cleanup(self, root):
        self.layout._cleanup(root)
        super(Param, self)._cleanup(root)

    #----------------------------------------------------------------
    # Public API
    #----------------------------------------------------------------

    @classmethod
    def applies(cls, obj):
        return (is_parameterized(obj)
                or isinstance(obj, param.parameterized.Parameters) or
                (isinstance(obj, param.Parameter) and obj.owner is not None))

    @classmethod
    def widget_type(cls, pobj):
        ptype = type(pobj)
        for t in classlist(ptype)[::-1]:
            if t in cls._mapping:
                if isinstance(cls._mapping[t], types.FunctionType):
                    return cls._mapping[t](pobj)
                return cls._mapping[t]

    def get_root(self, doc=None, comm=None):
        """
        Returns the root model and applies pre-processing hooks

        Arguments
        ---------
        doc: bokeh.Document
          Bokeh document the bokeh model will be attached to.
        comm: pyviz_comms.Comm
          Optional pyviz_comms when working in notebook

        Returns
        -------
        Returns the bokeh model corresponding to this panel object
        """
        doc = doc or _curdoc()
        root = self.layout.get_root(doc, comm)
        ref = root.ref['id']
        self._models[ref] = (root, None)
        state._views[ref] = (self, root, doc, comm)
        return root
Exemple #11
0
class SegmentationModelBase(ModelConfigBase):
    """
    A class that holds all settings that are specific to segmentation models.
    """

    #: The segmentation model architecture to use.
    #: Valid options are defined at :class:`ModelArchitectureConfig`: 'Basic (DeepMedic)', 'UNet3D', 'UNet2D'
    architecture: str = param.String("Basic", doc="The model architecture (for example, UNet). Valid options are"
                                                  "UNet3D, UNet2D, Basic (DeepMedic)")

    #: The loss type to use during training.
    #: Valid options are defined at :class:`SegmentationLoss`: "SoftDice", "CrossEntropy", "Focal", "Mixture"
    loss_type: SegmentationLoss = param.ClassSelector(default=SegmentationLoss.SoftDice, class_=SegmentationLoss,
                                                      instantiate=False, doc="The loss_type to use")

    #: List of pairs of weights, loss types and class-weight-power values for use when loss_type is
    #: :attr:`SegmentationLoss.MixtureLoss`".
    mixture_loss_components: Optional[List[MixtureLossComponent]] = param.List(
        None, class_=MixtureLossComponent, instantiate=False,
        doc="List of pairs of weights, loss types and class-weight-power values for use when loss_type is MixtureLoss")

    #: For weighted loss, power to which to raise the weights per class. If this is None, loss is not weighted.
    loss_class_weight_power: Optional[float] = param.Number(None, allow_None=True,
                                                            doc="Power to which to raise class weights for loss "
                                                                "function; default value will depend on loss_type")

    #: Gamma value for focal loss: weight for each pixel is posterior likelihood to the power -focal_loss_gamma.
    focal_loss_gamma: float = param.Number(1.0, doc="Gamma value for focal loss: weight for each pixel is "
                                                    "posterior likelihood to the power -focal_loss_gamma.")

    #: The spacing X, Y, Z expected for all images in the dataset
    dataset_expected_spacing_xyz: Optional[TupleFloat3] = param.NumericTuple(
        None, length=3, allow_None=True,
        doc="The spacing X, Y, Z expected for all images in the dataset")

    #: The number of feature channels at different stages of the model.
    feature_channels: List[int] = param.List(None, class_=int, bounds=(1, None), instantiate=False,
                                             doc="The number of feature channels at different stages of the model.")

    #: The size of the convolution kernels.
    kernel_size: int = param.Integer(3, bounds=(1, None), doc="The size of the convolution kernels.")

    #: The size of the random crops that will be drawn from the input images during training. This is also the
    #: input size of the model.
    crop_size: TupleInt3 = IntTuple((1, 1, 1), length=3, doc="The size of the random crops that will be "
                                                             "drawn from the input images. This is also the "
                                                             "input size of the model.")

    #: The names of the image input channels that the model consumes. These channels must be present in the
    #: dataset.csv file.
    image_channels: List[str] = param.List(None, class_=str, bounds=(1, None), instantiate=False,
                                           doc="The names of the image input channels that the model consumes. "
                                               "These channels must be present in the dataset.csv file")

    #: The names of the ground truth channels that the model consumes. These channels must be present in the
    #: dataset.csv file
    ground_truth_ids: List[str] = param.List(None, class_=str, bounds=(1, None), instantiate=False,
                                             doc="The names of the ground truth channels that the model consumes. "
                                                 "These channels must be present in the dataset.csv file")

    #: The name of the channel that contains the `inside/outside body` information (to mask out the background).
    #: This channel must be present in the dataset
    mask_id: Optional[str] = param.String(None, allow_None=True, doc="The name of the channel that contains the "
                                                                     "`inside/outside body` information."
                                                                     "This channel must be present in the dataset")

    #: The type of image normalization that should be applied. Must be None, or of type
    # :attr:`PhotometricNormalizationMethod`: Unchanged, SimpleNorm, MriWindow , CtWindow, TrimmedNorm
    norm_method: PhotometricNormalizationMethod = \
        param.ClassSelector(default=PhotometricNormalizationMethod.CtWindow,
                            class_=PhotometricNormalizationMethod,
                            instantiate=False,
                            doc="The type of image normalization that should be applied. Must be one of None, "
                                "Unchanged, SimpleNorm, MriWindow , CtWindow, TrimmedNorm")

    #: The Window setting for the :attr:`PhotometricNormalizationMethod.CtWindow` normalization.
    window: int = param.Integer(600, bounds=(0, None), doc="The Window setting for the 'CtWindow' normalization.")

    #: The level setting for the :attr:`PhotometricNormalizationMethod.CtWindow` normalization.
    level: int = param.Integer(50, doc="The level setting for the 'CtWindow' normalization.")

    #: The value range that image normalization should produce. This is the input range to the network.
    output_range: TupleFloat2 = param.NumericTuple((-1.0, 1.0), length=2,
                                                   doc="The value range that image normalization should produce. "
                                                       "This is the input range to the network.")

    #: If true, create additional plots during image normalization.
    debug_mode: bool = param.Boolean(False, doc="If true, create additional plots during image normalization.")

    #: Tail parameter allows window range to be extended to right, used in
    #: :attr:`PhotometricNormalizationMethod.MriWindow`. The value must be a list with one entry per input channel
    #: if the model has multiple input channels
    tail: List[float] = param.List(None, class_=float,
                                   doc="Tail parameter allows window range to be extended to right, Used in MriWindow."
                                       " The value must be a list with one entry per input channel "
                                       "if the model has multiple input channels.")

    #: Sharpen parameter specifies number of standard deviations from mean to be included in window range.
    #: Used in :attr:`PhotometricNormalizationMethod.MriWindow`
    sharpen: float = param.Number(0.9, doc="Sharpen parameter specifies number of standard deviations "
                                           "from mean to be included in window range. Used in MriWindow")

    #: Percentile at which to trim input distribution prior to normalization. Used in
    #: :attr:`PhotometricNormalizationMethod.TrimmedNorm`
    trim_percentiles: TupleFloat2 = param.NumericTuple((1.0, 99.0), length=2,
                                                       doc="Percentile at which to trim input distribution prior "
                                                           "to normalization. Used in TrimmedNorm")

    #: Padding mode to use for training and inference. See :attr:`PaddingMode` for valid options.
    padding_mode: PaddingMode = param.ClassSelector(default=PaddingMode.Edge, class_=PaddingMode,
                                                    instantiate=False,
                                                    doc="Padding mode to use for training and inference")

    #: The batch size to use for inference forward pass.
    inference_batch_size: int = param.Integer(8, bounds=(1, None),
                                              doc="The batch size to use for inference forward pass")

    #: The crop size to use for model testing. If nothing is specified, crop_size parameter is used instead,
    #: i.e. training and testing crop size will be the same.
    test_crop_size: Optional[TupleInt3] = IntTuple(None, length=3, allow_None=True,
                                                   doc="The crop size to use for model testing. "
                                                       "If nothing is specified, "
                                                       "crop_size parameter is used instead, "
                                                       "i.e. training and testing crop size "
                                                       "will be the same.")

    #: The per-class probabilities for picking a center point of a crop.
    class_weights: Optional[List[float]] = param.List(None, class_=float, bounds=(1, None), allow_None=True,
                                                      instantiate=False,
                                                      doc="The per-class probabilities for picking a center point of "
                                                          "a crop.")

    #: Layer name hierarchy (parent, child recursive) as by model definition. If None, no activation maps will be saved
    activation_map_layers: Optional[List[str]] = param.List(None, class_=str, allow_None=True, bounds=(1, None),
                                                            instantiate=False,
                                                            doc="Layer name hierarchy (parent, child "
                                                                "recursive) as by model definition. If None, "
                                                                "no activation maps will be saved")

    #: The aggregation method to use when testing ensemble models. See :attr: `EnsembleAggregationType` for options.
    ensemble_aggregation_type: EnsembleAggregationType = param.ClassSelector(default=EnsembleAggregationType.Average,
                                                                             class_=EnsembleAggregationType,
                                                                             instantiate=False,
                                                                             doc="The aggregation method to use when"
                                                                                 "testing ensemble models.")

    #: The size of the smoothing kernel in mm to be used for smoothing posteriors before computing the final
    #: segmentations. No smoothing is performed if set to None.
    posterior_smoothing_mm: Optional[TupleInt3] = param.NumericTuple(None, length=3, allow_None=True,
                                                                     doc="The size of the smoothing kernel in mm to be "
                                                                         "used for smoothing posteriors before "
                                                                         "computing the final segmentations. No "
                                                                         "smoothing is performed if set to None")

    #: If True save image and segmentations for one image in a batch for each training epoch
    store_dataset_sample: bool = param.Boolean(False, doc="If True save image and segmentations for one image"
                                                          "in a batch for each training epoch")

    #: List of (name, container) pairs, where name is a descriptive name and container is a Azure ML storage account
    #: container name to be used for statistical comparisons
    comparison_blob_storage_paths: List[Tuple[str, str]] = param.List(
        None, class_=tuple,
        allow_None=True,
        doc="List of (name, container) pairs, where name is a descriptive name and container is a "
            "Azure ML storage account container name to be used for statistical comparisons")

    #: List of rules for structures that should be prevented from sharing the same slice.
    #: These are not applied if :attr:`disable_extra_postprocessing` is True.
    #: Parameter should be a list of :attr:`SliceExclusionRule` objects.
    slice_exclusion_rules: List[SliceExclusionRule] = param.List(
        default=[], class_=SliceExclusionRule, allow_None=False,
        doc="List of rules for structures that should be prevented from sharing the same slice; "
            "not applied if disable_extra_postprocessing is True.")

    #: List of rules for class pairs whose summed probability is used to create the segmentation map from predicted
    #: posterior probabilities.
    #: These are not applied if :attr:`disable_extra_postprocessing` is True.
    #: Parameter should be a list of :attr:`SummedProbabilityRule` objects.
    summed_probability_rules: List[SummedProbabilityRule] = param.List(
        default=[], class_=SummedProbabilityRule, allow_None=False,
        doc="List of rules for class pairs whose summed probability is used to create the segmentation map from "
            "predicted posterior probabilities; not applied if disable_extra_postprocessing is True.")

    #: Whether to ignore :attr:`slice_exclusion_rules` and :attr:`summed_probability_rules` even if defined
    disable_extra_postprocessing: bool = param.Boolean(
        False, doc="Whether to ignore slice_exclusion_rules and summed_probability_rules even if defined")

    #: User friendly display names to be used for each of the predicted GT classes. Default is ground_truth_ids if
    #: None provided
    ground_truth_ids_display_names: List[str] = param.List(None, class_=str, bounds=(1, None), instantiate=False,
                                                           allow_None=True,
                                                           doc="User friendly display names to be used for each of "
                                                               "the predicted GT classes. Default is ground_truth_ids "
                                                               "if None provided")

    #: Colours in (R, G, B) for the structures, same order as in ground_truth_ids_display_names
    colours: List[TupleInt3] = param.List(None, class_=tuple, bounds=(1, None), instantiate=False,
                                          allow_None=True,
                                          doc="Colours in (R, G, B) for the structures, same order as in "
                                              "ground_truth_ids_display_names")

    #: List of bool specifiying if structures need filling holes. If True, the output of the model for that class
    #: will include postprocessing to fill holes, in the same order as in ground_truth_ids_display_names
    fill_holes: List[bool] = param.List(None, class_=bool, bounds=(1, None), instantiate=False,
                                        allow_None=True,
                                        doc="List of bool specifiying if structures need filling holes. If True "
                                            "output of the model for that class includes postprocessing to fill holes, "
                                            "in the same order as in ground_truth_ids_display_names")

    _inference_stride_size: Optional[TupleInt3] = IntTuple(None, length=3, allow_None=True,
                                                           doc="The stride size in the inference pipeline. "
                                                               "At most, this should be the output_size to "
                                                               "avoid gaps in output posterior image. If it "
                                                               "is not specified, its value is set to "
                                                               "output size.")
    _center_size: Optional[TupleInt3] = IntTuple(None, length=3, allow_None=True)
    _train_output_size: Optional[TupleInt3] = IntTuple(None, length=3, allow_None=True)
    _test_output_size: Optional[TupleInt3] = IntTuple(None, length=3, allow_None=True)

    #: Dictionary of types to enforce for certain DataFrame columns, where key is column name and value is desired type.
    col_type_converters: Optional[Dict[str, Any]] = param.Dict(None,
                                                               doc="Dictionary of types to enforce for certain "
                                                                   "DataFrame columns, where key is column name "
                                                                   "and value is desired type.",
                                                               allow_None=True, instantiate=False)

    _largest_connected_component_foreground_classes: LARGEST_CC_TYPE = \
        param.List(None, class_=None, bounds=(1, None), instantiate=False, allow_None=True,
                   doc="The names of the ground truth channels for which to select the largest connected component in "
                       "the model predictions as an inference post-processing step. Alternatively, a member of the "
                       "list can be a tuple (name, threshold), where name is a channel name and threshold is a value "
                       "between 0 and 0.5 such that disconnected components will be kept if their volume (relative "
                       "to the whole structure) exceeds that value.")

    #: If true, various overview plots with results are generated during model evaluation. Set to False if you see
    #: non-deterministic pull request build failures.
    is_plotting_enabled: bool = param.Boolean(True, doc="If true, various overview plots with results are generated "
                                                        "during model evaluation. Set to False if you see "
                                                        "non-deterministic pull request build failures.")
    show_patch_sampling: int = param.Integer(5, bounds=(0, None),
                                             doc="Number of patients from the training set for which the effect of"
                                                 "patch sampling will be shown. Nifti images and thumbnails for each"
                                                 "of the first N subjects in the training set will be "
                                                 "written to the outputs folder.")

    def __init__(self, center_size: Optional[TupleInt3] = None,
                 inference_stride_size: Optional[TupleInt3] = None,
                 min_l_rate: float = 0,
                 largest_connected_component_foreground_classes: LARGEST_CC_TYPE = None,
                 **params: Any):
        super().__init__(**params)
        self.test_crop_size = self.test_crop_size if self.test_crop_size is not None else self.crop_size
        self.inference_stride_size = inference_stride_size
        self.min_l_rate = min_l_rate
        self.largest_connected_component_foreground_classes = largest_connected_component_foreground_classes
        self._center_size = center_size
        self._model_category = ModelCategory.Segmentation

    def validate(self) -> None:
        """
        Validates the parameters stored in the present object.
        """
        super().validate()
        check_is_any_of("Architecture", self.architecture, vars(ModelArchitectureConfig).keys())

        def len_or_zero(lst: Optional[List[Any]]) -> int:
            return 0 if lst is None else len(lst)

        if self.kernel_size % 2 == 0:
            raise ValueError("The kernel size must be an odd number (kernel_size: {})".format(self.kernel_size))

        if self.architecture != ModelArchitectureConfig.UNet3D:
            if any_pairwise_larger(self.center_size, self.crop_size):
                raise ValueError("Each center_size should be less than or equal to the crop_size "
                                 "(center_size: {}, crop_size: {}".format(self.center_size, self.crop_size))
        else:
            if self.crop_size != self.center_size:
                raise ValueError("For UNet3D, the center size of each dimension should be equal to the crop size "
                                 "(center_size: {}, crop_size: {}".format(self.center_size, self.crop_size))

        self.validate_inference_stride_size(self.inference_stride_size, self.get_output_size())

        # check to make sure there is no overlap between image and ground-truth channels
        image_gt_intersect = np.intersect1d(self.image_channels, self.ground_truth_ids)
        if len(image_gt_intersect) != 0:
            raise ValueError("Channels: {} were found in both image_channels, and ground_truth_ids"
                             .format(image_gt_intersect))

        valid_norm_methods = [method.value for method in PhotometricNormalizationMethod]
        check_is_any_of("norm_method", self.norm_method.value, valid_norm_methods)

        if len(self.trim_percentiles) < 2 or self.trim_percentiles[0] >= self.trim_percentiles[1]:
            raise ValueError("Thresholds should contain lower and upper percentile thresholds, but got: {}"
                             .format(self.trim_percentiles))

        if len_or_zero(self.class_weights) != (len_or_zero(self.ground_truth_ids) + 1):
            raise ValueError("class_weights needs to be equal to number of ground_truth_ids + 1")
        if self.class_weights is None:
            raise ValueError("class_weights must be set.")
        SegmentationModelBase.validate_class_weights(self.class_weights)
        if self.ground_truth_ids is None:
            raise ValueError("ground_truth_ids is None")
        if len(self.ground_truth_ids_display_names) != len(self.ground_truth_ids):
            raise ValueError("len(ground_truth_ids_display_names)!=len(ground_truth_ids)")
        if len(self.ground_truth_ids_display_names) != len(self.colours):
            raise ValueError("len(ground_truth_ids_display_names)!=len(colours)")
        if len(self.ground_truth_ids_display_names) != len(self.fill_holes):
            raise ValueError("len(ground_truth_ids_display_names)!=len(fill_holes)")
        if self.mean_teacher_alpha is not None:
            raise ValueError("Mean teacher model is currently only supported for ScalarModels."
                             "Please reset mean_teacher_alpha to None.")

    @staticmethod
    def validate_class_weights(class_weights: List[float]) -> None:
        """
        Checks that the given list of class weights is valid: The weights must be positive and add up to 1.0.
        Raises a ValueError if that is not the case.
        """
        if not isclose(sum(class_weights), 1.0):
            raise ValueError(f'class_weights needs to add to 1 but it was: {sum(class_weights)}')
        if np.any(np.array(class_weights) < 0):
            raise ValueError("class_weights must have non-negative values only, found: {}".format(class_weights))

    @staticmethod
    def validate_inference_stride_size(inference_stride_size: Optional[TupleInt3],
                                       output_size: Optional[TupleInt3]) -> None:
        """
        Checks that patch stride size is positive and smaller than output patch size to ensure that posterior
        predictions are obtained for all pixels
        """
        if inference_stride_size is not None:
            if any_smaller_or_equal_than(inference_stride_size, 0):
                raise ValueError("inference_stride_size must be > 0 in all dimensions, found: {}"
                                 .format(inference_stride_size))

            if output_size is not None:
                if any_pairwise_larger(inference_stride_size, output_size):
                    raise ValueError("inference_stride_size must be <= output_size in all dimensions"
                                     "Found: output_size={}, inference_stride_size={}"
                                     .format(output_size, inference_stride_size))

    @property
    def number_of_image_channels(self) -> int:
        """
        Gets the number of image input channels that the model has (usually 1 CT channel, or multiple MR).
        """
        return 0 if self.image_channels is None else len(self.image_channels)

    @property
    def number_of_classes(self) -> int:
        """
        Returns the number of ground truth ids, including the background class.
        """
        return 1 if self.ground_truth_ids is None else len(self.ground_truth_ids) + 1

    @property
    def center_size(self) -> TupleInt3:
        """
        Gets the size of the center crop that the model predicts.
        """
        if self._center_size is None:
            return get_center_size(arch=self.architecture, crop_size=self.crop_size)
        Warning("'center_size' argument will soon be deprecated. Output shapes are inferred from models on the fly.")
        return self._center_size

    @property
    def inference_stride_size(self) -> Optional[TupleInt3]:
        """
        Gets the stride size that should be used when stitching patches at inference time.
        """
        if self._inference_stride_size is None:
            return self.get_output_size(ModelExecutionMode.TEST)
        return self._inference_stride_size

    @inference_stride_size.setter
    def inference_stride_size(self, val: Optional[TupleInt3]) -> None:
        """
        Sets the inference stride size with given value. This setter is used if output shape needs to be
        determined dynamically at run time
        """
        self._inference_stride_size = val
        self.validate_inference_stride_size(inference_stride_size=val,
                                            output_size=self.get_output_size(ModelExecutionMode.TEST))

    @property
    def example_images_folder(self) -> str:
        """
        Gets the full path in which the example images should be stored during training.
        """
        return str(self.outputs_folder / EXAMPLE_IMAGES_FOLDER)

    @property
    def largest_connected_component_foreground_classes(self) -> LARGEST_CC_TYPE:
        """
        Gets the list of classes for which the largest connected components should be computed when predicting.
        """
        return self._largest_connected_component_foreground_classes

    @largest_connected_component_foreground_classes.setter
    def largest_connected_component_foreground_classes(self, value: LARGEST_CC_TYPE) -> None:
        """
        Sets the list of classes for which the largest connected components should be computed when predicting.
        """
        pairs: Optional[List[Tuple[str, Optional[float]]]] = None
        if value is not None:
            # Set all members to be tuples rather than just class names.
            pairs = [val if isinstance(val, tuple) else (val, None) for val in value]
            class_names = set(pair[0] for pair in pairs)
            unknown_labels = class_names - set(self.ground_truth_ids)
            if unknown_labels:
                raise ValueError(
                    f"Found unknown labels {unknown_labels} in largest_connected_component_foreground_classes: "
                    f"labels must exist in [{self.ground_truth_ids}]")
            bad_thresholds = [pair[1] for pair in pairs if (pair[1] is not None)
                              and (pair[1] <= 0.0 or pair[1] > 0.5)]  # type: ignore
            if bad_thresholds:
                raise ValueError(
                    f"Found bad threshold(s) {bad_thresholds} in largest_connected_component_foreground_classes: "
                    "thresholds must be positive and at most 0.5.")

        self._largest_connected_component_foreground_classes = pairs

    def read_dataset_into_dataframe_and_pre_process(self) -> None:
        """
        Loads a dataset from the dataset.csv file, and stores it in the present object.
        """
        assert self.local_dataset is not None  # for mypy
        self.dataset_data_frame = pd.read_csv(self.local_dataset / DATASET_CSV_FILE_NAME,
                                              converters=self.col_type_converters, low_memory=False)
        self.pre_process_dataset_dataframe()

    def get_parameter_search_hyperdrive_config(self, estimator: Estimator) -> HyperDriveConfig:
        """
        Turns the given AzureML estimator (settings for running a job in AzureML) into a configuration object
        for doing hyperparameter searches.

        :param estimator: The settings for running a single AzureML job.
        :return: A HyperDriveConfig object for running multiple AzureML jobs.
        """
        return super().get_parameter_search_hyperdrive_config(estimator)

    def get_model_train_test_dataset_splits(self, dataset_df: DataFrame) -> DatasetSplits:
        """
        Computes the training, validation and test splits for the model, from a dataframe that contains
        the full dataset.

        :param dataset_df: A dataframe that contains the full dataset that the model is using.
        :return: An instance of DatasetSplits with dataframes for training, validation and testing.
        """
        return super().get_model_train_test_dataset_splits(dataset_df)

    def get_output_size(self, execution_mode: ModelExecutionMode = ModelExecutionMode.TRAIN) -> Optional[TupleInt3]:
        """
        Returns shape of model's output tensor for training, validation and testing inference modes
        """
        if (execution_mode == ModelExecutionMode.TRAIN) or (execution_mode == ModelExecutionMode.VAL):
            return self._train_output_size
        elif execution_mode == ModelExecutionMode.TEST:
            return self._test_output_size
        raise ValueError("Unknown execution mode '{}' for function 'get_output_size'".format(execution_mode))

    def adjust_after_mixed_precision_and_parallel(self, model: Any) -> None:
        """
        Updates the model config parameters (e.g. output patch size). If testing patch stride size is unset then
        its value is set by the output patch size
        """
        self._train_output_size = model.get_output_shape(input_shape=self.crop_size)
        self._test_output_size = model.get_output_shape(input_shape=self.test_crop_size)
        if self.inference_stride_size is None:
            self.inference_stride_size = self._test_output_size
        else:
            if any_pairwise_larger(self.inference_stride_size, self._test_output_size):
                raise ValueError("The inference stride size must be smaller than the model's output size in each"
                                 "dimension. Inference stride was set to {}, the model outputs {} in test mode."
                                 .format(self.inference_stride_size, self._test_output_size))

    def class_and_index_with_background(self) -> Dict[str, int]:
        """
        Returns a dict of class names to indices, including the background class.
        The class index assumes that background is class 0, foreground starts at 1.
        For example, if the ground_truth_ids are ["foo", "bar"], the result
        is {"background": 0, "foo": 1, "bar": 2}

        :return: A dict, one entry for each entry in ground_truth_ids + 1 for the background class.
        """
        classes = {BACKGROUND_CLASS_NAME: 0}
        classes.update({x: i + 1 for i, x in enumerate(self.ground_truth_ids)})
        return classes

    def create_and_set_torch_datasets(self, for_training: bool = True, for_inference: bool = True) -> None:
        """
        Creates torch datasets for all model execution modes, and stores them in the object.
        """
        from InnerEye.ML.dataset.cropping_dataset import CroppingDataset
        from InnerEye.ML.dataset.full_image_dataset import FullImageDataset

        dataset_splits = self.get_dataset_splits()
        crop_transforms = self.get_cropped_image_sample_transforms()
        full_image_transforms = self.get_full_image_sample_transforms()
        if for_training:
            self._datasets_for_training = {
                ModelExecutionMode.TRAIN: CroppingDataset(
                    self,
                    dataset_splits.train,
                    cropped_sample_transforms=crop_transforms.train,  # type: ignore
                    full_image_sample_transforms=full_image_transforms.train),  # type: ignore
                ModelExecutionMode.VAL: CroppingDataset(
                    self, dataset_splits.val,
                    cropped_sample_transforms=crop_transforms.val,  # type: ignore
                    full_image_sample_transforms=full_image_transforms.val),  # type: ignore
            }
        if for_inference:
            self._datasets_for_inference = {
                mode: FullImageDataset(
                    self,
                    dataset_splits[mode],
                    full_image_sample_transforms=full_image_transforms.test)  # type: ignore
                for mode in ModelExecutionMode if len(dataset_splits[mode]) > 0
            }

    def create_model(self) -> Any:
        """
        Creates a PyTorch model from the settings stored in the present object.

        :return: The network model as a torch.nn.Module object
        """
        # Use a local import here to avoid reliance on pytorch too early.
        # Return type should be BaseModel, but that would also introduce reliance on pytorch.
        from InnerEye.ML.utils.model_util import build_net
        return build_net(self)

    def get_full_image_sample_transforms(self) -> ModelTransformsPerExecutionMode:
        """
        Get transforms to perform on full image samples for each model execution mode.
        By default only PhotometricNormalization is performed.
        """
        from InnerEye.ML.utils.transforms import Compose3D
        from InnerEye.ML.photometric_normalization import PhotometricNormalization

        photometric_transformation = Compose3D(transforms=[PhotometricNormalization(self, use_gpu=False)])
        return ModelTransformsPerExecutionMode(train=photometric_transformation,
                                               val=photometric_transformation,
                                               test=photometric_transformation)

    def get_cropped_image_sample_transforms(self) -> ModelTransformsPerExecutionMode:
        """
        Get transforms to perform on cropped samples for each model execution mode.
        By default no transformation is performed.
        """
        return ModelTransformsPerExecutionMode()
Exemple #12
0
class BarPlot(LegendPlot):

    group_index = param.Integer(default=0,
                                doc="""
       Index of the dimension in the supplied Bars
       Element, which will be laid out into groups.""")

    category_index = param.Integer(default=1,
                                   doc="""
       Index of the dimension in the supplied Bars
       Element, which will be laid out into categories.""")

    stack_index = param.Integer(default=2,
                                doc="""
       Index of the dimension in the supplied Bars
       Element, which will stacked.""")

    padding = param.Number(default=0.2,
                           doc="""
       Defines the padding between groups.""")

    color_by = param.List(default=['category'],
                          doc="""
       Defines how the Bar elements colored. Valid options include
       any permutation of 'group', 'category' and 'stack'.""")

    show_legend = param.Boolean(default=True,
                                doc="""
        Whether to show legend for the plot.""")

    xticks = param.Integer(0, precedence=-1)

    style_opts = [
        'alpha', 'color', 'align', 'visible', 'edgecolor', 'log', 'facecolor',
        'capsize', 'error_kw', 'hatch'
    ]

    legend_specs = dict(
        LegendPlot.legend_specs, **{
            'top':
            dict(bbox_to_anchor=(0., 1.02, 1., .102),
                 ncol=3,
                 loc=3,
                 mode="expand",
                 borderaxespad=0.),
            'bottom':
            dict(ncol=3,
                 mode="expand",
                 loc=2,
                 bbox_to_anchor=(0., -0.4, 1., .102),
                 borderaxespad=0.1)
        })

    _dimensions = OrderedDict([('group', 0), ('category', 1), ('stack', 2)])

    def __init__(self, element, **params):
        super(BarPlot, self).__init__(element, **params)
        self.values, self.bar_dimensions = self._get_values()

    def _get_values(self):
        """
        Get unique index value for each bar
        """
        gi, ci, si = self.group_index, self.category_index, self.stack_index
        ndims = self.hmap.last.ndims
        dims = self.hmap.last.kdims
        dimensions = []
        values = {}
        for vidx, vtype in zip([gi, ci, si], self._dimensions):
            if vidx < ndims:
                dim = dims[vidx]
                dimensions.append(dim)
                vals = self.hmap.dimension_values(dim.name)
            else:
                dimensions.append(None)
                vals = [None]
            values[vtype] = list(unique_iterator(vals))
        return values, dimensions

    def _compute_styles(self, element, style_groups):
        """
        Computes color and hatch combinations by
        any combination of the 'group', 'category'
        and 'stack'.
        """
        style = self.lookup_options(element, 'style')[0]
        sopts = []
        for sopt in ['color', 'hatch']:
            if sopt in style:
                sopts.append(sopt)
                style.pop(sopt, None)
        color_groups = []
        for sg in style_groups:
            color_groups.append(self.values[sg])
        style_product = list(product(*color_groups))
        wrapped_style = self.lookup_options(element, 'style').max_cycles(
            len(style_product))
        color_groups = {
            k: tuple(wrapped_style[n][sopt] for sopt in sopts)
            for n, k in enumerate(style_product)
        }
        return style, color_groups, sopts

    def get_extents(self, element, ranges):
        ngroups = len(self.values['group'])
        vdim = element.vdims[0].name
        if self.stack_index in range(element.ndims):
            return 0, 0, ngroups, np.NaN
        else:
            vrange = ranges[vdim]
            return 0, np.nanmin([vrange[0], 0]), ngroups, vrange[1]

    @mpl_rc_context
    def initialize_plot(self, ranges=None):
        element = self.hmap.last
        vdim = element.vdims[0]
        axis = self.handles['axis']
        key = self.keys[-1]

        ranges = self.compute_ranges(self.hmap, key, ranges)
        ranges = match_spec(element, ranges)

        self.handles['artist'], self.handles[
            'xticks'], xdims = self._create_bars(axis, element)
        return self._finalize_axis(key,
                                   ranges=ranges,
                                   xticks=self.handles['xticks'],
                                   dimensions=[xdims, vdim])

    def _finalize_ticks(self, axis, element, xticks, yticks, zticks):
        """
        Apply ticks with appropriate offsets.
        """
        yalignments = None
        if xticks is not None:
            ticks, labels, yalignments = zip(
                *sorted(xticks, key=lambda x: x[0]))
            xticks = (list(ticks), list(labels))
        super(BarPlot, self)._finalize_ticks(axis, element, xticks, yticks,
                                             zticks)
        if yalignments:
            for t, y in zip(axis.get_xticklabels(), yalignments):
                t.set_y(y)

    def _create_bars(self, axis, element):
        # Get style and dimension information
        values = self.values
        gi, ci, si = self.group_index, self.category_index, self.stack_index
        gdim, cdim, sdim = [
            element.kdims[i] if i < element.ndims else None
            for i in (gi, ci, si)
        ]
        indices = dict(zip(self._dimensions, (gi, ci, si)))
        style_groups = [
            sg for sg in self.color_by if indices[sg] < element.ndims
        ]
        style_opts, color_groups, sopts = self._compute_styles(
            element, style_groups)
        dims = element.dimensions('key', label=True)
        ndims = len(dims)
        xdims = [d for d in [cdim, gdim] if d is not None]

        # Compute widths
        width = (1 - (2. * self.padding)) / len(values['category'])

        # Initialize variables
        xticks = []
        val_key = [None] * ndims
        style_key = [None] * len(style_groups)
        label_key = [None] * len(style_groups)
        labels = []
        bars = {}

        # Iterate over group, category and stack dimension values
        # computing xticks and drawing bars and applying styles
        for gidx, grp_name in enumerate(values['group']):
            if grp_name is not None:
                grp = gdim.pprint_value(grp_name)
                if 'group' in style_groups:
                    idx = style_groups.index('group')
                    label_key[idx] = str(grp)
                    style_key[idx] = grp_name
                val_key[gi] = grp_name
                if ci < ndims:
                    yalign = -0.04
                else:
                    yalign = 0
                xticks.append((gidx + 0.5, grp, yalign))
            for cidx, cat_name in enumerate(values['category']):
                xpos = gidx + self.padding + (cidx * width)
                if cat_name is not None:
                    cat = gdim.pprint_value(cat_name)
                    if 'category' in style_groups:
                        idx = style_groups.index('category')
                        label_key[idx] = str(cat)
                        style_key[idx] = cat_name
                    val_key[ci] = cat_name
                    xticks.append((xpos + width / 2., cat, 0))
                prev = 0
                for stk_name in values['stack']:
                    if stk_name is not None:
                        if 'stack' in style_groups:
                            idx = style_groups.index('stack')
                            stk = gdim.pprint_value(stk_name)
                            label_key[idx] = str(stk)
                            style_key[idx] = stk_name
                        val_key[si] = stk_name
                    vals = element.sample([tuple(val_key)]).dimension_values(
                        element.vdims[0].name)
                    val = float(vals[0]) if len(vals) else np.NaN
                    label = ', '.join(label_key)
                    style = dict(style_opts,
                                 label='' if label in labels else label,
                                 **dict(
                                     zip(sopts,
                                         color_groups[tuple(style_key)])))
                    bar = axis.bar([xpos], [val],
                                   width=width,
                                   bottom=prev,
                                   **style)

                    # Update variables
                    bars[tuple(val_key)] = bar
                    prev += val if np.isfinite(val) else 0
                    labels.append(label)
        title = [
            str(element.kdims[indices[cg]]) for cg in self.color_by
            if indices[cg] < ndims
        ]

        if self.show_legend and any(len(l) for l in labels):
            leg_spec = self.legend_specs[self.legend_position]
            if self.legend_cols: leg_spec['ncol'] = self.legend_cols
            axis.legend(title=', '.join(title), **leg_spec)
        return bars, xticks, xdims

    def update_handles(self, key, axis, element, ranges, style):
        dims = element.dimensions('key', label=True)
        ndims = len(dims)
        ci, gi, si = self.category_index, self.group_index, self.stack_index
        val_key = [None] * ndims
        for g in self.values['group']:
            if g is not None: val_key[gi] = g
            for c in self.values['category']:
                if c is not None: val_key[ci] = c
                prev = 0
                for s in self.values['stack']:
                    if s is not None: val_key[si] = s
                    bar = self.handles['artist'].get(tuple(val_key))
                    if bar:
                        vals = element.sample([
                            tuple(val_key)
                        ]).dimension_values(element.vdims[0].name)
                        height = float(vals[0]) if len(vals) else np.NaN
                        bar[0].set_height(height)
                        bar[0].set_y(prev)
                        prev += height if np.isfinite(height) else 0
        return {'xticks': self.handles['xticks']}
Exemple #13
0
class Tabs(Layout):
    """
    Tabs allows selecting between the supplied panes.
    """

    objects = param.List(default=[],
                         doc="""
        The list of child objects that make up the tabs.""")

    height = param.Integer(default=None, bounds=(0, None))

    width = param.Integer(default=None, bounds=(0, None))

    _bokeh_model = BkTabs

    _rename = {'objects': 'tabs'}

    def __init__(self, *items, **params):
        objects = []
        for pane in items:
            if isinstance(pane, tuple):
                name, pane = pane
            elif isinstance(pane, PaneBase):
                name = pane.name
            else:
                name = None
            objects.append(Pane(pane, name=name))
        super(Tabs, self).__init__(*objects, **params)

    def _get_objects(self, model, old_objects, doc, root, comm=None):
        """
        Returns new child models for the layout while reusing unchanged
        models and cleaning up any dropped objects.
        """
        old_children = getattr(model, self._rename.get('objects', 'objects'))
        new_models = []
        for i, pane in enumerate(self.objects):
            if pane in old_objects:
                child = old_children[old_objects.index(pane)]
            else:
                child = pane._get_model(doc, root, model, comm)
                child = BkPanel(title=pane.name, child=child)
            new_models.append(child)

        for pane, old_child in zip(old_objects, old_children):
            if old_child not in new_models:
                pane._cleanup(old_child.child, pane._temporary)

        return new_models

    def __setitem__(self, index, pane):
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects[index] = Pane(pane, name=name)
        self.objects = new_objects

    def append(self, pane):
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects.append(Pane(pane, name=name))
        self.objects = new_objects

    def insert(self, index, pane):
        name = None
        if isinstance(pane, tuple):
            name, pane = pane
        new_objects = list(self.objects)
        new_objects.insert(index, Pane(pane))
        self.objects = new_objects

    def pop(self, index):
        new_objects = list(self.objects)
        if index in new_objects:
            index = new_objects.index(index)
        new_objects.pop(index)
        self.objects = new_objects

    def _cleanup(self, model=None, final=False):
        super(Layout, self)._cleanup(model, final)
        if model is not None:
            for p, c in zip(self.objects, model.tabs):
                p._cleanup(c.child, final)
Exemple #14
0
class Layout(Reactive):
    """
    Abstract baseclass for a layout of Panes.
    """

    objects = param.List(default=[],
                         doc="""
        The list of child objects that make up the layout.""")

    _bokeh_model = None

    __abstract = True

    _rename = {'objects': 'children'}

    def __init__(self, *objects, **params):
        objects = [Pane(pane) for pane in objects]
        super(Layout, self).__init__(objects=objects, **params)

    def _init_properties(self):
        properties = {
            k: v
            for k, v in self.param.get_param_values() if v is not None
        }
        del properties['objects']
        return self._process_param_change(properties)

    def _link_params(self, model, params, doc, root, comm=None):
        def set_value(*events):
            msg = {event.name: event.new for event in events}
            events = {event.name: event for event in events}
            if 'objects' in msg:
                old = events['objects'].old
                msg['objects'] = self._get_objects(model, old, doc, root, comm)
            msg = self._process_param_change(msg)

            def update_model():
                model.update(**msg)

            if comm:
                update_model()
                push(doc, comm)
            else:
                doc.add_next_tick_callback(update_model)

        ref = model.ref['id']
        watcher = self.param.watch(set_value, params)
        self._callbacks[ref].append(watcher)

    def _cleanup(self, model=None, final=False):
        super(Layout, self)._cleanup(model, final)
        if model is not None:
            for p, c in zip(self.objects, model.children):
                p._cleanup(c, final)

    def select(self, selector=None):
        """
        Iterates over the Viewable and any potential children in the
        applying the Selector.

        Arguments
        ---------
        selector: type or callable or None
            The selector allows selecting a subset of Viewables by
            declaring a type or callable function to filter by.

        Returns
        -------
        viewables: list(Viewable)
        """
        objects = super(Layout, self).select(selector)
        for obj in self.objects:
            objects += obj.select(selector)
        return objects

    def _get_objects(self, model, old_objects, doc, root, comm=None):
        """
        Returns new child models for the layout while reusing unchanged
        models and cleaning up any dropped objects.
        """
        old_children = getattr(model, self._rename.get('objects', 'objects'))
        new_models = []
        for i, pane in enumerate(self.objects):
            pane = Pane(pane, _temporary=True)
            self.objects[i] = pane
            if pane in old_objects:
                child = old_children[old_objects.index(pane)]
            else:
                child = pane._get_model(doc, root, model, comm)
            new_models.append(child)

        for pane, old_child in zip(old_objects, old_children):
            if old_child not in new_models:
                pane._cleanup(old_child)
        return new_models

    def _get_model(self, doc, root=None, parent=None, comm=None):
        model = self._bokeh_model()
        root = model if root is None else root
        objects = self._get_objects(model, [], doc, root, comm)

        # HACK ALERT: Insert Spacer if last item in Column has no height
        if isinstance(self, Column) and objects and getattr(
                objects[-1], 'height', False) is None:
            objects.append(BkSpacer(height=50))

        props = dict(self._init_properties(), objects=objects)
        model.update(**self._process_param_change(props))
        params = [p for p in self.params() if p != 'name']
        self._link_params(model, params, doc, root, comm)
        return model

    def __setitem__(self, index, pane):
        new_objects = list(self.objects)
        new_objects[index] = Pane(pane)
        self.objects = new_objects

    def append(self, pane):
        new_objects = list(self.objects)
        new_objects.append(Pane(pane))
        self.objects = new_objects

    def insert(self, index, pane):
        new_objects = list(self.objects)
        new_objects.insert(index, Pane(pane))
        self.objects = new_objects

    def pop(self, index):
        new_objects = list(self.objects)
        if index in new_objects:
            index = new_objects.index(index)
        new_objects.pop(index)
        self.objects = new_objects
Exemple #15
0
class DataFrame(HTML):
    """
    The `DataFrame` pane renders pandas, dask and streamz DataFrame types using
    their custom HTML repr. 
    
    In the case of a streamz DataFrame the rendered data will update
    periodically.
    
    Reference: https://panel.holoviz.org/reference/panes/DataFrame.html

    :Example:

    >>> DataFrame(df, index=False, max_rows=25, width=400)
    """

    bold_rows = param.Boolean(default=True, doc="""
        Make the row labels bold in the output.""")

    border = param.Integer(default=0, doc="""
        A ``border=border`` attribute is included in the opening
        `<table>` tag.""")

    classes = param.List(default=['panel-df'], doc="""
        CSS class(es) to apply to the resulting html table.""")

    col_space = param.ClassSelector(default=None, class_=(str, int), doc="""
        The minimum width of each column in CSS length units. An int
        is assumed to be px units.""")

    decimal = param.String(default='.', doc="""
        Character recognized as decimal separator, e.g. ',' in Europe.""")

    escape = param.Boolean(default=True, doc="""
        Whether or not to escape the dataframe HTML. For security reasons
        the default value is True.""")

    float_format = param.Callable(default=None, doc="""
        Formatter function to apply to columns' elements if they are
        floats. The result of this function must be a unicode string.""")

    formatters = param.ClassSelector(default=None, class_=(dict, list), doc="""
        Formatter functions to apply to columns' elements by position
        or name. The result of each function must be a unicode string.""")

    header = param.Boolean(default=True, doc="""
        Whether to print column labels.""")

    index = param.Boolean(default=True, doc="""
        Whether to print index (row) labels.""")

    index_names = param.Boolean(default=True, doc="""
        Prints the names of the indexes.""")

    justify = param.ObjectSelector(default=None, allow_None=True, objects=[
        'left', 'right', 'center', 'justify', 'justify-all', 'start',
        'end', 'inherit', 'match-parent', 'initial', 'unset'], doc="""
        How to justify the column labels.""")

    max_rows = param.Integer(default=None, doc="""
        Maximum number of rows to display.""")

    max_cols = param.Integer(default=None, doc="""
        Maximum number of columns to display.""")

    na_rep = param.String(default='NaN', doc="""
        String representation of NAN to use.""")

    render_links = param.Boolean(default=False, doc="""
        Convert URLs to HTML links.""")

    show_dimensions = param.Boolean(default=False, doc="""
        Display DataFrame dimensions (number of rows by number of
        columns).""")

    sparsify = param.Boolean(default=True, doc="""
        Set to False for a DataFrame with a hierarchical index to
        print every multi-index key at each row.""")

    _object = param.Parameter(default=None, doc="""Hidden parameter.""")

    _dask_params = ['max_rows']

    _rerender_params = [
        'object', '_object', 'bold_rows', 'border', 'classes',
        'col_space', 'decimal', 'escape', 'float_format', 'formatters',
        'header', 'index', 'index_names', 'justify', 'max_rows',
        'max_cols', 'na_rep', 'render_links', 'show_dimensions',
        'sparsify', 'sizing_mode'
    ]

    def __init__(self, object=None, **params):
        super().__init__(object, **params)
        self._stream = None
        self._setup_stream()

    @classmethod
    def applies(cls, obj):
        module = getattr(obj, '__module__', '')
        name = type(obj).__name__
        if (any(m in module for m in ('pandas', 'dask', 'streamz')) and
            name in ('DataFrame', 'Series', 'Random', 'DataFrames', 'Seriess', 'Styler')):
            return 0.3
        else:
            return False

    def _set_object(self, object):
        self._object = object

    @param.depends('object', watch=True)
    def _setup_stream(self):
        if not self._models or not hasattr(self.object, 'stream'):
            return
        elif self._stream:
            self._stream.destroy()
            self._stream = None
        self._stream = self.object.stream.latest().rate_limit(0.5).gather()
        self._stream.sink(self._set_object)

    def _get_model(self, doc, root=None, parent=None, comm=None):
        model = super()._get_model(doc, root, parent, comm)
        self._setup_stream()
        return model

    def _cleanup(self, model):
        super()._cleanup(model)
        if not self._models and self._stream:
            self._stream.destroy()
            self._stream = None

    def _get_properties(self):
        properties = DivPaneBase._get_properties(self)
        if self._stream:
            df = self._object
        else:
            df = self.object
        if hasattr(df, 'to_frame'):
            df = df.to_frame()

        module = getattr(df, '__module__', '')
        if hasattr(df, 'to_html'):
            if 'dask' in module:
                html = df.to_html(max_rows=self.max_rows).replace('border="1"', '')
            elif 'style' in module:
                classes = ' '.join(self.classes)
                html = df.to_html(table_attributes=f'class="{classes}"')
            else:
                kwargs = {p: getattr(self, p) for p in self._rerender_params
                          if p not in DivPaneBase.param and p != '_object'}
                html = df.to_html(**kwargs)
        else:
            html = ''
        return dict(properties, text=escape(html))
Exemple #16
0
class MontageBitmap(Bitmap):
    """
    A bitmap composed of tiles containing other bitmaps.

    Bitmaps are scaled to fit in the given tile size, and tiled
    right-to-left, top-to-bottom into the given number of rows and columns.
    """
    bitmaps = param.List(class_=Bitmap,doc="""
        The list of bitmaps to compose.""")

    rows = param.Integer(default=2, doc="""
        The number of rows in the montage.""")
    cols = param.Integer(default=2, doc="""
        The number of columns in the montage.""")
    shape = param.Composite(attribs=['rows','cols'], doc="""
        The shape of the montage. Same as (self.rows,self.cols).""")

    margin = param.Integer(default=5,doc="""
        The size in pixels of the margin to put around each
        tile in the montage.""")

    tile_size = param.NumericTuple(default=(100,100), doc="""
        The size in pixels of a tile in the montage.""")

    titles = param.List(class_=str, default=[], doc="""
        A list of titles to overlay on the tiles.""")

    title_pos = param.NumericTuple(default=(10,10), doc="""
        The position of the upper left corner of the title in each tile.""")

    title_options = param.Dict(default={}, doc="""
        Dictionary of options for drawing the titles.  Dict should
        contain keyword options for the PIL draw.text method.  Possible
        options include 'fill' (fill color), 'outline' (outline color),
        and 'font' (an ImageFont font instance).  The PIL defaults will
        be used for any omitted options.""",
        instantiate=False)

    hooks = param.List(default=[], doc="""
        A list of functions, one per tile, that take a PIL image as
        input and return a PIL image as output.  The hooks are applied
        to the tile images before resizing.  The value None can be
        inserted as a placeholder where no hook function is needed.""")

    resize_filter = param.Integer(default=Image.NEAREST,doc="""
       The filter used for resizing the images.  Defaults
       to NEAREST.  See PIL Image module documentation for other
       options and their meanings.""")

    bg_color = param.NumericTuple(default=(0,0,0), doc="""
       The background color for the montage, as (r,g,b).""")

    def __init__(self,**params):
        ## JPALERT: The Bitmap class is a Parameterized object,but its
        ## __init__ doesn't take **params and doesn't call super.__init__,
        ## so we have to skip it.
        ## JAB: Good point; Bitmap should be modified to be more like
        ## other PO classes.
        param.Parameterized.__init__(self,**params)

        rows,cols = self.shape
        tilew,tileh = self.tile_size
        bgr,bgg,bgb = self.bg_color

        width  = tilew*cols + self.margin*(cols*2)
        height = tileh*rows + self.margin*(rows*2)
        self.image = Image.new('RGB',(width,height),
                               (bgr*255,bgg*255,bgb*255))

        self.title_options.setdefault('font',TITLE_FONT)

        for r in xrange(rows):
            for c in xrange(cols):
                i = r*self.cols+c
                if i < len(self.bitmaps):
                    bm = self.bitmaps[i]
                    bmw,bmh = bm.image.size
                    if bmw > bmh:
                        bmh = int( float(tilew)/bmw * bmh )
                        bmw = tilew
                    else:
                        bmw = int( float(tileh)/bmh * bmw )
                        bmh = tileh

                    if self.hooks and self.hooks[i]:
                        f = self.hooks[i]
                    else:
                        f = lambda x:x
                    new_bm = Bitmap(f(bm.image).resize((bmw,bmh)))
                    if self.titles:
                        draw = ImageDraw.Draw(new_bm.image)
                        draw.text(self.title_pos,self.titles[i],**self.title_options)
                    self.image.paste( new_bm.image,
                                      (c * width/cols + tilew/2 - bmw/2 + self.margin,
                                       r * height/rows + tileh/2 - bmh/2 + self.margin) )

                else:
                    break
Exemple #17
0
class MWCSelect(WebComponent):
    """Implementation of the mwc-select component

    The `value` is the value selected by the user. Can be None.
    The `options` are the options that can be selected by the user

    Set `outlined` to change the style
    """

    html = param.String("""<mwc-select style="width:100%"></mwc-select>""")
    attributes_to_watch = param.Dict({"label": "name", "outlined": "outlined"})
    properties_to_watch = param.Dict({"value": "_index"})
    events_to_watch = param.Dict(default={"selected": "selects"})
    parameters_to_watch = param.List(["options"])

    outlined = param.Boolean(default=False)

    def __init__(self, min_height=60, **params):
        super().__init__(min_height=min_height, **params)

        self._set_class_()

    value = param.Parameter()
    selects = param.Integer()
    options = param.ClassSelector(default=[], class_=(dict, list))
    _index = param.String()

    def _get_html_from_parameters_to_watch(self, **params) -> str:
        options = params["options"]
        if not options:
            return """<mwc-select></mwc-select>"""

        innerhtml = []
        if isinstance(options, list):
            for index, obj in enumerate(options):
                if hasattr(obj, "name"):
                    value = obj.name
                else:
                    value = str(obj)
                item = f'<mwc-list-item value="{index}">{value}</mwc-list-item>'
                innerhtml.append(item)
        if isinstance(options, dict):
            for index, value in enumerate(options.values()):
                item = f'<mwc-list-item value="{index}">{str(value)}</mwc-list-item>'
                innerhtml.append(item)

        return f"""<mwc-select>{"".join(innerhtml)}</mwc-select>"""

    @param.depends("options", watch=True)
    def _set_class_(self):
        if isinstance(self.options, list):
            self.param.options.class_ = list
        if isinstance(self.options, dict):
            self.param.options.class_ = dict

    @param.depends("value", watch=True)
    def _update_index(self):
        # pylint: disable=unsupported-membership-test
        if isinstance(self.options, list) and self.value in self.options:
            self._index = str(self.options.index(self.value))
        elif isinstance(self.options, dict) and self.value in self.options:
            self._index = str(list(self.options).index(self.value))
        else:
            self._index = ""

    @param.depends("_index", watch=True)
    def _update_value(self):
        if self._index == "":
            self.value = None
        elif isinstance(self.options, list):
            self.value = self.options[int(self._index)]  # pylint: disable=unsubscriptable-object
        elif isinstance(self.options, dict):
            self.value = list(self.options)[int(self._index)]
        else:
            self.value = None
Exemple #18
0
class ElementPlot(GenericElementPlot, MPLPlot):

    apply_ticks = param.Boolean(default=True,
                                doc="""
        Whether to apply custom ticks.""")

    aspect = param.Parameter(default='square',
                             doc="""
        The aspect ratio mode of the plot. By default, a plot may
        select its own appropriate aspect ratio but sometimes it may
        be necessary to force a square aspect ratio (e.g. to display
        the plot as an element of a grid). The modes 'auto' and
        'equal' correspond to the axis modes of the same name in
        matplotlib, a numeric value specifying the ratio between plot
        width and height may also be passed. To control the aspect
        ratio between the axis scales use the data_aspect option
        instead.""")

    data_aspect = param.Number(default=None,
                               doc="""
        Defines the aspect of the axis scaling, i.e. the ratio of
        y-unit to x-unit.""")

    invert_zaxis = param.Boolean(default=False,
                                 doc="""
        Whether to invert the plot z-axis.""")

    labelled = param.List(default=['x', 'y'],
                          doc="""
        Whether to plot the 'x' and 'y' labels.""")

    logz = param.Boolean(default=False,
                         doc="""
         Whether to apply log scaling to the y-axis of the Chart.""")

    xformatter = param.ClassSelector(default=None,
                                     class_=(util.basestring, ticker.Formatter,
                                             FunctionType),
                                     doc="""
        Formatter for ticks along the x-axis.""")

    yformatter = param.ClassSelector(default=None,
                                     class_=(util.basestring, ticker.Formatter,
                                             FunctionType),
                                     doc="""
        Formatter for ticks along the y-axis.""")

    zformatter = param.ClassSelector(default=None,
                                     class_=(util.basestring, ticker.Formatter,
                                             FunctionType),
                                     doc="""
        Formatter for ticks along the z-axis.""")

    zaxis = param.Boolean(default=True,
                          doc="""
        Whether to display the z-axis.""")

    zlabel = param.String(default=None,
                          doc="""
        An explicit override of the z-axis label, if set takes precedence
        over the dimension label.""")

    zrotation = param.Integer(default=0,
                              bounds=(0, 360),
                              doc="""
        Rotation angle of the zticks.""")

    zticks = param.Parameter(default=None,
                             doc="""
        Ticks along z-axis specified as an integer, explicit list of
        tick locations, list of tuples containing the locations and
        labels or a matplotlib tick locator object. If set to None
        default matplotlib ticking behavior is applied.""")

    # Element Plots should declare the valid style options for matplotlib call
    style_opts = []

    # Declare which styles cannot be mapped to a non-scalar dimension
    _nonvectorized_styles = ['marker', 'alpha', 'cmap', 'angle', 'visible']

    # Whether plot has axes, disables setting axis limits, labels and ticks
    _has_axes = True

    def __init__(self, element, **params):
        super(ElementPlot, self).__init__(element, **params)
        check = self.hmap.last
        if isinstance(check, CompositeOverlay):
            check = check.values()[0]  # Should check if any are 3D plots
        if isinstance(check, Element3D):
            self.projection = '3d'

        for hook in self.initial_hooks:
            try:
                hook(self, element)
            except Exception as e:
                self.param.warning("Plotting hook %r could not be "
                                   "applied:\n\n %s" % (hook, e))

    def _finalize_axis(self,
                       key,
                       element=None,
                       title=None,
                       dimensions=None,
                       ranges=None,
                       xticks=None,
                       yticks=None,
                       zticks=None,
                       xlabel=None,
                       ylabel=None,
                       zlabel=None):
        """
        Applies all the axis settings before the axis or figure is returned.
        Only plots with zorder 0 get to apply their settings.

        When the number of the frame is supplied as n, this method looks
        up and computes the appropriate title, axis labels and axis bounds.
        """
        if element is None:
            element = self._get_frame(key)
        self.current_frame = element
        if not dimensions and element and not self.subplots:
            el = element.traverse(lambda x: x, [Element])
            if el:
                el = el[0]
                dimensions = el.nodes.dimensions() if isinstance(
                    el, Graph) else el.dimensions()
        axis = self.handles['axis']

        subplots = list(self.subplots.values()) if self.subplots else []
        if self.zorder == 0 and key is not None:
            if self.bgcolor:
                if mpl_version <= '1.5.9':
                    axis.set_axis_bgcolor(self.bgcolor)
                else:
                    axis.set_facecolor(self.bgcolor)

            # Apply title
            title = self._format_title(key)
            if self.show_title and title is not None:
                fontsize = self._fontsize('title')
                if 'title' in self.handles:
                    self.handles['title'].set_text(title)
                else:
                    self.handles['title'] = axis.set_title(title, **fontsize)

            # Apply subplot label
            self._subplot_label(axis)

            # Apply axis options if axes are enabled
            if element is not None and not any(not sp._has_axes
                                               for sp in [self] + subplots):
                # Set axis labels
                if dimensions:
                    self._set_labels(axis, dimensions, xlabel, ylabel, zlabel)
                else:
                    if self.xlabel is not None:
                        axis.set_xlabel(self.xlabel)
                    if self.ylabel is not None:
                        axis.set_ylabel(self.ylabel)
                    if self.zlabel is not None and hasattr(axis, 'set_zlabel'):
                        axis.set_zlabel(self.zlabel)

                if not subplots:
                    legend = axis.get_legend()
                    if legend:
                        legend.set_visible(self.show_legend)
                        self.handles["bbox_extra_artists"] += [legend]
                    axis.xaxis.grid(self.show_grid)
                    axis.yaxis.grid(self.show_grid)

                # Apply log axes
                if self.logx:
                    axis.set_xscale('log')
                if self.logy:
                    axis.set_yscale('log')

                if not self.projection == '3d':
                    self._set_axis_position(axis, 'x', self.xaxis)
                    self._set_axis_position(axis, 'y', self.yaxis)

                # Apply ticks
                if self.apply_ticks:
                    self._finalize_ticks(axis, dimensions, xticks, yticks,
                                         zticks)

                # Set axes limits
                self._set_axis_limits(axis, element, subplots, ranges)

            # Apply aspects
            if self.aspect is not None and self.projection != 'polar' and not self.adjoined:
                self._set_aspect(axis, self.aspect)

        if not subplots and not self.drawn:
            self._finalize_artist(element)

        self._execute_hooks(element)
        return super(ElementPlot, self)._finalize_axis(key)

    def _finalize_ticks(self, axis, dimensions, xticks, yticks, zticks):
        """
        Finalizes the ticks on the axes based on the supplied ticks
        and Elements. Sets the axes position as well as tick positions,
        labels and fontsize.
        """
        ndims = len(dimensions) if dimensions else 0
        xdim = dimensions[0] if ndims else None
        ydim = dimensions[1] if ndims > 1 else None

        # Tick formatting
        if xdim:
            self._set_axis_formatter(axis.xaxis, xdim, self.xformatter)
        if ydim:
            self._set_axis_formatter(axis.yaxis, ydim, self.yformatter)
        if self.projection == '3d':
            zdim = dimensions[2] if ndims > 2 else None
            if zdim or self.zformatter is not None:
                self._set_axis_formatter(axis.zaxis, zdim, self.zformatter)

        xticks = xticks if xticks else self.xticks
        self._set_axis_ticks(axis.xaxis,
                             xticks,
                             log=self.logx,
                             rotation=self.xrotation)

        yticks = yticks if yticks else self.yticks
        self._set_axis_ticks(axis.yaxis,
                             yticks,
                             log=self.logy,
                             rotation=self.yrotation)

        if self.projection == '3d':
            zticks = zticks if zticks else self.zticks
            self._set_axis_ticks(axis.zaxis,
                                 zticks,
                                 log=self.logz,
                                 rotation=self.zrotation)

        axes_str = 'xy'
        axes_list = [axis.xaxis, axis.yaxis]

        if hasattr(axis, 'zaxis'):
            axes_str += 'z'
            axes_list.append(axis.zaxis)

        for ax, ax_obj in zip(axes_str, axes_list):
            tick_fontsize = self._fontsize('%sticks' % ax,
                                           'labelsize',
                                           common=False)
            if tick_fontsize: ax_obj.set_tick_params(**tick_fontsize)

    def _finalize_artist(self, element):
        """
        Allows extending the _finalize_axis method with Element
        specific options.
        """
        pass

    def _set_labels(self,
                    axes,
                    dimensions,
                    xlabel=None,
                    ylabel=None,
                    zlabel=None):
        """
        Sets the labels of the axes using the supplied list of dimensions.
        Optionally explicit labels may be supplied to override the dimension
        label.
        """
        xlabel, ylabel, zlabel = self._get_axis_labels(dimensions, xlabel,
                                                       ylabel, zlabel)
        if self.invert_axes:
            xlabel, ylabel = ylabel, xlabel
        if xlabel and self.xaxis and 'x' in self.labelled:
            axes.set_xlabel(xlabel, **self._fontsize('xlabel'))
        if ylabel and self.yaxis and 'y' in self.labelled:
            axes.set_ylabel(ylabel, **self._fontsize('ylabel'))
        if zlabel and self.zaxis and 'z' in self.labelled:
            axes.set_zlabel(zlabel, **self._fontsize('zlabel'))

    def _set_axis_formatter(self, axis, dim, formatter):
        """
        Set axis formatter based on dimension formatter.
        """
        if isinstance(dim, list): dim = dim[0]
        if formatter is not None:
            pass
        elif dim.value_format:
            formatter = dim.value_format
        elif dim.type in dim.type_formatters:
            formatter = dim.type_formatters[dim.type]
        if formatter:
            axis.set_major_formatter(wrap_formatter(formatter))

    def get_aspect(self, xspan, yspan):
        """
        Computes the aspect ratio of the plot
        """
        if isinstance(self.aspect, (int, float)):
            return self.aspect
        elif self.aspect == 'square':
            return 1
        elif self.aspect == 'equal':
            return xspan / yspan
        return 1

    def _set_aspect(self, axes, aspect):
        """
        Set the aspect on the axes based on the aspect setting.
        """
        if ((isinstance(aspect, util.basestring) and aspect != 'square')
                or self.data_aspect):
            data_ratio = self.data_aspect or aspect
        else:
            (x0, x1), (y0, y1) = axes.get_xlim(), axes.get_ylim()
            xsize = np.log(x1) - np.log(x0) if self.logx else x1 - x0
            ysize = np.log(y1) - np.log(y0) if self.logy else y1 - y0
            xsize = max(abs(xsize), 1e-30)
            ysize = max(abs(ysize), 1e-30)
            data_ratio = 1. / (ysize / xsize)
            if aspect != 'square':
                data_ratio = data_ratio / aspect
        axes.set_aspect(data_ratio)

    def _set_axis_limits(self, axis, view, subplots, ranges):
        """
        Compute extents for current view and apply as axis limits
        """
        # Extents
        extents = self.get_extents(view, ranges)
        if not extents or self.overlaid:
            axis.autoscale_view(scalex=True, scaley=True)
            return

        valid_lim = lambda c: util.isnumeric(c) and not np.isnan(c)
        coords = [
            coord
            if np.isreal(coord) or isinstance(coord, np.datetime64) else np.NaN
            for coord in extents
        ]
        coords = [
            date2num(util.dt64_to_dt(c)) if isinstance(c, np.datetime64) else c
            for c in coords
        ]
        if self.projection == '3d' or len(extents) == 6:
            l, b, zmin, r, t, zmax = coords
            if self.invert_zaxis or any(p.invert_zaxis for p in subplots):
                zmin, zmax = zmax, zmin
            if zmin != zmax:
                if valid_lim(zmin):
                    axis.set_zlim(bottom=zmin)
                if valid_lim(zmax):
                    axis.set_zlim(top=zmax)
        else:
            l, b, r, t = coords

        if self.invert_axes:
            l, b, r, t = b, l, t, r

        invertx = self.invert_xaxis or any(p.invert_xaxis for p in subplots)
        xlim, scalex = self._compute_limits(l, r, self.logx, invertx, 'left',
                                            'right')
        inverty = self.invert_yaxis or any(p.invert_yaxis for p in subplots)
        ylim, scaley = self._compute_limits(b, t, self.logy, inverty, 'bottom',
                                            'top')
        if xlim:
            axis.set_xlim(**xlim)
        if ylim:
            axis.set_ylim(**ylim)
        axis.autoscale_view(scalex=scalex, scaley=scaley)

    def _compute_limits(self, low, high, log, invert, low_key, high_key):
        scale = True
        lims = {}
        valid_lim = lambda c: util.isnumeric(c) and not np.isnan(c)
        if not isinstance(low, util.datetime_types) and log and (low is None
                                                                 or low <= 0):
            low = 0.01 if high < 0.01 else 10**(np.log10(high) - 2)
            self.param.warning("Logarithmic axis range encountered value less "
                               "than or equal to zero, please supply explicit "
                               "lower-bound to override default of %.3f." %
                               low)
        if invert:
            high, low = low, high
        if isinstance(low, util.cftime_types) or low != high:
            if valid_lim(low):
                lims[low_key] = low
                scale = False
            if valid_lim(high):
                lims[high_key] = high
                scale = False
        return lims, scale

    def _set_axis_position(self, axes, axis, option):
        """
        Set the position and visibility of the xaxis or yaxis by
        supplying the axes object, the axis to set, i.e. 'x' or 'y'
        and an option to specify the position and visibility of the axis.
        The option may be None, 'bare' or positional, i.e. 'left' and
        'right' for the yaxis and 'top' and 'bottom' for the xaxis.
        May also combine positional and 'bare' into for example 'left-bare'.
        """
        positions = {'x': ['bottom', 'top'], 'y': ['left', 'right']}[axis]
        axis = axes.xaxis if axis == 'x' else axes.yaxis
        if option in [None, False]:
            axis.set_visible(False)
            for pos in positions:
                axes.spines[pos].set_visible(False)
        else:
            if option is True:
                option = positions[0]
            if 'bare' in option:
                axis.set_ticklabels([])
                axis.set_label_text('')
            if option != 'bare':
                option = option.split('-')[0]
                axis.set_ticks_position(option)
                axis.set_label_position(option)
        if not self.overlaid and not self.show_frame and self.projection != 'polar':
            pos = (positions[1] if
                   (option and (option == 'bare' or positions[0] in option))
                   else positions[0])
            axes.spines[pos].set_visible(False)

    def _set_axis_ticks(self, axis, ticks, log=False, rotation=0):
        """
        Allows setting the ticks for a particular axis either with
        a tuple of ticks, a tick locator object, an integer number
        of ticks, a list of tuples containing positions and labels
        or a list of positions. Also supports enabling log ticking
        if an integer number of ticks is supplied and setting a
        rotation for the ticks.
        """
        if isinstance(ticks, (list, tuple)) and all(
                isinstance(l, list) for l in ticks):
            axis.set_ticks(ticks[0])
            axis.set_ticklabels(ticks[1])
        elif isinstance(ticks, ticker.Locator):
            axis.set_major_locator(ticks)
        elif not ticks and ticks is not None:
            axis.set_ticks([])
        elif isinstance(ticks, int):
            if log:
                locator = ticker.LogLocator(numticks=ticks, subs=range(1, 10))
            else:
                locator = ticker.MaxNLocator(ticks)
            axis.set_major_locator(locator)
        elif isinstance(ticks, (list, tuple)):
            labels = None
            if all(isinstance(t, tuple) for t in ticks):
                ticks, labels = zip(*ticks)
            axis.set_ticks(ticks)
            if labels:
                axis.set_ticklabels(labels)
        for tick in axis.get_ticklabels():
            tick.set_rotation(rotation)

    @mpl_rc_context
    def update_frame(self, key, ranges=None, element=None):
        """
        Set the plot(s) to the given frame number.  Operates by
        manipulating the matplotlib objects held in the self._handles
        dictionary.

        If n is greater than the number of available frames, update
        using the last available frame.
        """
        reused = isinstance(self.hmap, DynamicMap) and self.overlaid
        if not reused and element is None:
            element = self._get_frame(key)
        elif element is not None:
            self.current_key = key
            self.current_frame = element

        if element is not None:
            self.param.set_param(
                **self.lookup_options(element, 'plot').options)
        axis = self.handles['axis']

        axes_visible = element is not None or self.overlaid
        axis.xaxis.set_visible(axes_visible and self.xaxis)
        axis.yaxis.set_visible(axes_visible and self.yaxis)
        axis.patch.set_alpha(np.min([int(axes_visible), 1]))

        for hname, handle in self.handles.items():
            hideable = hasattr(handle, 'set_visible')
            if hname not in ['axis', 'fig'] and hideable:
                handle.set_visible(element is not None)
        if element is None:
            return

        ranges = self.compute_ranges(self.hmap, key, ranges)
        ranges = util.match_spec(element, ranges)

        max_cycles = self.style._max_cycles
        style = self.lookup_options(element, 'style')
        self.style = style.max_cycles(max_cycles) if max_cycles else style

        label = element.label if self.show_legend else ''
        style = dict(label=label,
                     zorder=self.zorder,
                     **self.style[self.cyclic_index])
        axis_kwargs = self.update_handles(key, axis, element, ranges, style)
        self._finalize_axis(key,
                            element=element,
                            ranges=ranges,
                            **(axis_kwargs if axis_kwargs else {}))

    @mpl_rc_context
    def initialize_plot(self, ranges=None):
        element = self.hmap.last
        ax = self.handles['axis']
        key = list(self.hmap.data.keys())[-1]
        dim_map = dict(zip((d.name for d in self.hmap.kdims), key))
        key = tuple(dim_map.get(d.name, None) for d in self.dimensions)
        ranges = self.compute_ranges(self.hmap, key, ranges)
        self.current_ranges = ranges
        self.current_frame = element
        self.current_key = key

        ranges = util.match_spec(element, ranges)

        style = dict(zorder=self.zorder, **self.style[self.cyclic_index])
        if self.show_legend:
            style['label'] = element.label

        plot_data, plot_kwargs, axis_kwargs = self.get_data(
            element, ranges, style)

        with abbreviated_exception():
            handles = self.init_artists(ax, plot_data, plot_kwargs)
        self.handles.update(handles)

        return self._finalize_axis(self.keys[-1],
                                   element=element,
                                   ranges=ranges,
                                   **axis_kwargs)

    def init_artists(self, ax, plot_args, plot_kwargs):
        """
        Initializes the artist based on the plot method declared on
        the plot.
        """
        plot_method = self._plot_methods.get(
            'batched' if self.batched else 'single')
        plot_fn = getattr(ax, plot_method)
        artist = plot_fn(*plot_args, **plot_kwargs)
        return {
            'artist':
            artist[0]
            if isinstance(artist, list) and len(artist) == 1 else artist
        }

    def update_handles(self, key, axis, element, ranges, style):
        """
        Update the elements of the plot.
        """
        self.teardown_handles()
        plot_data, plot_kwargs, axis_kwargs = self.get_data(
            element, ranges, style)

        with abbreviated_exception():
            handles = self.init_artists(axis, plot_data, plot_kwargs)
        self.handles.update(handles)
        return axis_kwargs

    def _apply_transforms(self, element, ranges, style):
        new_style = dict(style)
        for k, v in style.items():
            if isinstance(v, util.basestring):
                if validate(k, v) == True:
                    continue
                elif v in element or (isinstance(element, Graph)
                                      and v in element.nodes):
                    v = dim(v)
                elif any(d == v for d in self.overlay_dims):
                    v = dim([d for d in self.overlay_dims if d == v][0])

            if not isinstance(v, dim):
                continue
            elif (not v.applies(element)
                  and v.dimension not in self.overlay_dims):
                new_style.pop(k)
                self.param.warning(
                    'Specified %s dim transform %r could not be '
                    'applied, as not all dimensions could be resolved.' %
                    (k, v))
                continue

            if v.dimension in self.overlay_dims:
                ds = Dataset({d.name: v
                              for d, v in self.overlay_dims.items()},
                             list(self.overlay_dims))
                val = v.apply(ds, ranges=ranges, flat=True)[0]
            elif type(element) is Path:
                val = np.concatenate([
                    v.apply(el, ranges=ranges, flat=True)[:-1]
                    for el in element.split()
                ])
            else:
                val = v.apply(element, ranges)

            if (not np.isscalar(val) and len(util.unique_array(val)) == 1
                    and (not 'color' in k or validate('color', val))):
                val = val[0]

            if not np.isscalar(val) and k in self._nonvectorized_styles:
                element = type(element).__name__
                raise ValueError(
                    'Mapping a dimension to the "{style}" '
                    'style option is not supported by the '
                    '{element} element using the {backend} '
                    'backend. To map the "{dim}" dimension '
                    'to the {style} use a groupby operation '
                    'to overlay your data along the dimension.'.format(
                        style=k,
                        dim=v.dimension,
                        element=element,
                        backend=self.renderer.backend))

            style_groups = getattr(self, '_style_groups', [])
            groups = [sg for sg in style_groups if k.startswith(sg)]
            group = groups[0] if groups else None
            prefix = '' if group is None else group + '_'
            if (k in (prefix + 'c', prefix + 'color')
                    and isinstance(val, util.arraylike_types)
                    and not validate('color', val)):
                new_style.pop(k)
                self._norm_kwargs(element, ranges, new_style, v, val, prefix)
                if val.dtype.kind in 'OSUM':
                    range_key = dim_range_key(v)
                    if range_key in ranges and 'factors' in ranges[range_key]:
                        factors = ranges[range_key]['factors']
                    else:
                        factors = util.unique_array(val)
                    val = util.search_indices(val, factors)
                k = prefix + 'c'

            new_style[k] = val

        for k, val in list(new_style.items()):
            # If mapped to color/alpha override static fill/line style
            if k == 'c':
                new_style.pop('color', None)

            style_groups = getattr(self, '_style_groups', [])
            groups = [sg for sg in style_groups if k.startswith(sg)]
            group = groups[0] if groups else None
            prefix = '' if group is None else group + '_'

            # Check if element supports fill and line style
            supports_fill = ((prefix != 'edge'
                              or getattr(self, 'filled', True)) and any(
                                  o.startswith(prefix + 'face')
                                  for o in self.style_opts))

            if k in (prefix + 'c', prefix + 'color') and isinstance(
                    val, util.arraylike_types):
                fill_style = new_style.get(prefix + 'facecolor')
                if fill_style and validate('color', fill_style):
                    new_style.pop('facecolor')

                line_style = new_style.get(prefix + 'edgecolor')

                # If glyph has fill and line style is set overriding line color
                if supports_fill and line_style is not None:
                    continue

                if line_style and validate('color', line_style):
                    new_style.pop('edgecolor')
            elif k == 'facecolors' and not isinstance(
                    new_style.get('color', new_style.get('c')), np.ndarray):
                # Color overrides facecolors if defined
                new_style.pop('color', None)
                new_style.pop('c', None)

        return new_style

    def teardown_handles(self):
        """
        If no custom update_handles method is supplied this method
        is called to tear down any previous handles before replacing
        them.
        """
        if 'artist' in self.handles:
            self.handles['artist'].remove()
Exemple #19
0
class Panel(Reactive):
    """
    Abstract baseclass for a layout of Viewables.
    """

    objects = param.List(default=[],
                         doc="""
        The list of child objects that make up the layout.""")

    _bokeh_model = None

    __abstract = True

    _rename = {'objects': 'children'}

    _linked_props = []

    def __init__(self, *objects, **params):
        from .pane import panel
        objects = [panel(pane, _internal=True) for pane in objects]
        super(Panel, self).__init__(objects=objects, **params)

    def _init_properties(self):
        properties = {
            k: v
            for k, v in self.param.get_param_values() if v is not None
        }
        del properties['objects']
        return self._process_param_change(properties)

    def _link_params(self, model, params, doc, root, comm=None):
        def set_value(*events):
            msg = {event.name: event.new for event in events}
            events = {event.name: event for event in events}

            def update_model():
                if 'objects' in msg:
                    old = events['objects'].old
                    msg['objects'] = self._get_objects(model, old, doc, root,
                                                       comm)
                    for pane in old:
                        if pane not in self.objects:
                            pane._cleanup(root)
                processed = self._process_param_change(msg)
                model.update(**processed)

            if comm:
                update_model()
                push(doc, comm)
            else:
                doc.add_next_tick_callback(update_model)

        ref = root.ref['id']
        if ref not in self._callbacks:
            watcher = self.param.watch(set_value, params)
            self._callbacks[ref].append(watcher)

    def _cleanup(self, root=None, final=False):
        super(Panel, self)._cleanup(root, final)
        if root is not None:
            for p in self.objects:
                p._cleanup(root, final)

    def select(self, selector=None):
        """
        Iterates over the Viewable and any potential children in the
        applying the Selector.

        Arguments
        ---------
        selector: type or callable or None
            The selector allows selecting a subset of Viewables by
            declaring a type or callable function to filter by.

        Returns
        -------
        viewables: list(Viewable)
        """
        objects = super(Panel, self).select(selector)
        for obj in self.objects:
            objects += obj.select(selector)
        return objects

    def _get_objects(self, model, old_objects, doc, root, comm=None):
        """
        Returns new child models for the layout while reusing unchanged
        models and cleaning up any dropped objects.
        """
        from .pane import panel
        new_models = []
        for i, pane in enumerate(self.objects):
            pane = panel(pane, _internal=True)
            self.objects[i] = pane
            if pane in old_objects:
                child = pane._models[root.ref['id']]
            else:
                child = pane._get_model(doc, root, model, comm)
            new_models.append(child)
        return new_models

    def _get_model(self, doc, root=None, parent=None, comm=None):
        model = self._bokeh_model()
        root = model if root is None else root
        objects = self._get_objects(model, [], doc, root, comm)

        # HACK ALERT: Insert Spacer if last item in Column has no height
        if (isinstance(self, Column) and objects
                and not has_height(objects[-1])):
            objects.append(BkSpacer(height=50))

        props = dict(self._init_properties(), objects=objects)
        model.update(**self._process_param_change(props))
        params = [p for p in self.params() if p != 'name']
        self._models[root.ref['id']] = model
        self._link_params(model, params, doc, root, comm)
        self._link_props(model, self._linked_props, doc, root, comm)
        return model

    def __getitem__(self, index):
        return self.objects[index]

    def __len__(self):
        return len(self.objects)

    def __contains__(self, obj):
        return obj in self.objects

    def __setitem__(self, index, pane):
        from .pane import panel
        new_objects = list(self.objects)
        new_objects[index] = panel(pane, _internal=True)
        self.objects = new_objects

    def __repr__(self, depth=0):
        spacer = '\n' + ('    ' * (depth + 1))
        cls = type(self).__name__
        params = param_reprs(self, ['objects'])
        objs = [
            '[%d] %s' % (i, obj.__repr__(depth + 1))
            for i, obj in enumerate(self.objects)
        ]
        if not params and not objs:
            return super(Panel, self).__repr__(depth + 1)
        elif not params:
            template = '{cls}{spacer}{objs}'
        elif not objs:
            template = '{cls}({params})'
        else:
            template = '{cls}({params}){spacer}{objs}'
        return template.format(cls=cls,
                               params=', '.join(params),
                               objs=('%s' % spacer).join(objs),
                               spacer=spacer)

    def append(self, pane):
        from .pane import panel
        new_objects = list(self.objects)
        new_objects.append(panel(pane, _internal=True))
        self.objects = new_objects

    def insert(self, index, pane):
        from .pane import panel
        new_objects = list(self.objects)
        new_objects.insert(index, panel(pane, _internal=True))
        self.objects = new_objects

    def pop(self, index):
        new_objects = list(self.objects)
        if index in new_objects:
            index = new_objects.index(index)
        new_objects.pop(index)
        self.objects = new_objects
Exemple #20
0
class gridmatrix(param.ParameterizedFunction):
    """
    The gridmatrix operation takes an Element or HoloMap
    of Elements as input and creates a GridMatrix object,
    which plots each dimension in the Element against
    each other dimension. This provides a very useful
    overview of high-dimensional data and is inspired
    by pandas and seaborn scatter_matrix implementations.
    """

    chart_type = param.Parameter(default=Scatter,
                                 doc="""
        The Element type used to display bivariate distributions
        of the data.""")

    diagonal_type = param.Parameter(default=None,
                                    doc="""
       The Element type along the diagonal, may be a Histogram or any
       other plot type which can visualize a univariate distribution.
       This parameter overrides diagonal_operation.""")

    diagonal_operation = param.Parameter(default=histogram,
                                         doc="""
       The operation applied along the diagonal, may be a histogram-operation
       or any other function which returns a viewable element.""")

    overlay_dims = param.List(default=[],
                              doc="""
       If a HoloMap is supplied this will allow overlaying one or
       more of it's key dimensions.""")

    def __call__(self, data, **params):
        p = param.ParamOverrides(self, params)

        if isinstance(data, (HoloMap, NdOverlay)):
            ranges = {d.name: data.range(d) for d in data.dimensions()}
            data = data.clone({
                k: GridMatrix(self._process(p, v, ranges))
                for k, v in data.items()
            })
            data = Collator(data, merge_type=type(data))()
            if p.overlay_dims:
                data = data.map(lambda x: x.overlay(p.overlay_dims),
                                (HoloMap, ))
            return data
        elif isinstance(data, Element):
            data = self._process(p, data)
            return GridMatrix(data)

    def _process(self, p, element, ranges={}):
        # Creates a unified Dataset.data attribute
        # to draw the data from
        if isinstance(element.data, np.ndarray):
            el_data = element.table(default_datatype)
        else:
            el_data = element.data

        # Get dimensions to plot against each other
        types = (str, basestring, np.str_, np.object_) + datetime_types
        dims = [
            d for d in element.dimensions() if _is_number(element.range(d)[0])
            and not issubclass(element.get_dimension_type(d), types)
        ]
        permuted_dims = [(d1, d2) for d1 in dims for d2 in dims[::-1]]

        # Convert Histogram type to operation to avoid one case in the if below.
        if p.diagonal_type is Histogram:
            p.diagonal_type = None
            p.diagonal_operation = histogram

        data = {}
        for d1, d2 in permuted_dims:
            if d1 == d2:
                if p.diagonal_type is not None:
                    if p.diagonal_type._auto_indexable_1d:
                        el = p.diagonal_type(el_data,
                                             kdims=[d1],
                                             vdims=[d2],
                                             datatype=[default_datatype])
                    else:
                        values = element.dimension_values(d1)
                        el = p.diagonal_type(values, kdims=[d1])
                elif p.diagonal_operation is None:
                    continue
                elif p.diagonal_operation is histogram or isinstance(
                        p.diagonal_operation, histogram):
                    bin_range = ranges.get(d1.name, element.range(d1))
                    el = p.diagonal_operation(element,
                                              dimension=d1.name,
                                              bin_range=bin_range)
                else:
                    el = p.diagonal_operation(element, dimension=d1.name)
            else:
                kdims, vdims = ([d1, d2],
                                []) if len(p.chart_type.kdims) == 2 else (d1,
                                                                          d2)
                el = p.chart_type(el_data,
                                  kdims=kdims,
                                  vdims=vdims,
                                  datatype=[default_datatype])
            data[(d1.name, d2.name)] = el
        return data
Exemple #21
0
class Param(PaneBase):
    """
    Param panes render a Parameterized class to a set of widgets which
    are linked to the parameter values on the class.
    """

    display_threshold = param.Number(default=0,
                                     precedence=-10,
                                     doc="""
        Parameters with precedence below this value are not displayed.""")

    default_precedence = param.Number(default=1e-8,
                                      precedence=-10,
                                      doc="""
        Precedence value to use for parameters with no declared
        precedence.  By default, zero predecence is available for
        forcing some parameters to the top of the list, and other
        values above the default_precedence values can be used to sort
        or group parameters arbitrarily.""")

    expand = param.Boolean(default=False,
                           doc="""
        Whether parameterized subobjects are expanded or collapsed on
        instantiation.""")

    expand_button = param.Boolean(default=None,
                                  doc="""
        Whether to add buttons to expand and collapse sub-objects.""")

    expand_layout = param.Parameter(default=Column,
                                    doc="""
        Layout to expand sub-objects into.""")

    height = param.Integer(default=None,
                           bounds=(0, None),
                           doc="""
        Height of widgetbox the parameter widgets are displayed in.""")

    initializer = param.Callable(default=None,
                                 doc="""
        User-supplied function that will be called on initialization,
        usually to update the default Parameter values of the
        underlying parameterized object.""")

    label_formatter = param.Callable(
        default=default_label_formatter,
        allow_None=True,
        doc="Callable used to format the parameter names into widget labels.")

    parameters = param.List(default=None,
                            doc="""
        If set this serves as a whitelist of parameters to display on the supplied
        Parameterized object.""")

    show_labels = param.Boolean(default=True,
                                doc="""
        Whether to show labels for each widget""")

    show_name = param.Boolean(default=True,
                              doc="""
        Whether to show the parameterized object's name""")

    width = param.Integer(default=300,
                          bounds=(0, None),
                          doc="""
        Width of widgetbox the parameter widgets are displayed in.""")

    widgets = param.Dict(doc="""
        Dictionary of widget overrides, mapping from parameter name
        to widget class.""")

    precedence = 0.1

    _mapping = {
        param.Action: Button,
        param.Parameter: LiteralInput,
        param.Dict: LiteralInput,
        param.Selector: Select,
        param.ObjectSelector: ObjectSelector,
        param.FileSelector: FileSelector,
        param.Boolean: Checkbox,
        param.Number: FloatSlider,
        param.Integer: IntSlider,
        param.Range: RangeSlider,
        param.String: TextInput,
        param.ListSelector: MultiSelect,
        param.Date: DatetimeInput,
    }

    def __init__(self, object, **params):
        if isinstance(object, param.parameterized.Parameters):
            object = object.cls if object.self is None else object.self
        if 'name' not in params:
            params['name'] = object.name
        if 'parameters' not in params:
            params['parameters'] = [p for p in object.params() if p != 'name']
        super(Param, self).__init__(object, **params)

        # Construct widgets
        self._widgets = self._get_widgets()
        widgets = [
            widget for widgets in self._widgets.values() for widget in widgets
        ]
        self._widget_box = WidgetBox(*widgets,
                                     height=self.height,
                                     width=self.width,
                                     name=self.name)

        # Construct Layout
        kwargs = {'name': self.name}
        if self.expand_layout is Tabs:
            kwargs['width'] = self.width

        layout = self.expand_layout
        if isinstance(layout, Panel):
            self._expand_layout = layout
            self.layout = self._widget_box
        elif isinstance(layout, type) and issubclass(layout, Panel):
            self.layout = layout(self._widget_box, **kwargs)
            self._expand_layout = self.layout
        else:
            raise ValueError(
                'expand_layout expected to be a panel.layout.Panel'
                'type or instance, found %s type.' % type(layout).__name__)

        if not (self.expand_button == False and not self.expand):
            self._link_subobjects()

    def __repr__(self, depth=0):
        cls = type(self).__name__
        obj_cls = type(self.object).__name__
        params = [] if self.object is None else self.object.params()
        parameters = [k for k in params if k != 'name']
        params = []
        for p, v in sorted(self.get_param_values()):
            if v is self.params(p).default: continue
            elif v is None: continue
            elif isinstance(v, basestring) and v == '': continue
            elif p == 'object' or (p == 'name' and v.startswith(obj_cls)):
                continue
            elif p == 'parameters' and v == parameters:
                continue
            params.append('%s=%s' % (p, abbreviated_repr(v)))
        obj = type(self.object).__name__
        template = '{cls}({obj}, {params})' if params else '{cls}({obj})'
        return template.format(cls=cls, params=', '.join(params), obj=obj)

    def _link_subobjects(self):
        for pname, widgets in self._widgets.items():
            if not any(
                    is_parameterized(getattr(w, 'value', None)) or any(
                        is_parameterized(o) for o in getattr(w, 'options', []))
                    for w in widgets):
                continue
            selector, toggle = widgets if len(widgets) == 2 else (widgets[0],
                                                                  None)

            def toggle_pane(change, parameter=pname):
                "Adds or removes subpanel from layout"
                parameterized = getattr(self.object, parameter)
                existing = [
                    p for p in self._expand_layout.objects
                    if isinstance(p, Param) and p.object is parameterized
                ]
                if existing:
                    old_panel = existing[0]
                    if not change.new:
                        old_panel._cleanup(final=old_panel._temporary)
                        self._expand_layout.pop(old_panel)
                elif change.new:
                    kwargs = {
                        k: v
                        for k, v in self.get_param_values()
                        if k not in ['name', 'object', 'parameters']
                    }
                    pane = Param(parameterized,
                                 name=parameterized.name,
                                 _temporary=True,
                                 **kwargs)
                    self._expand_layout.append(pane)

            def update_pane(change, parameter=pname):
                "Adds or removes subpanel from layout"
                layout = self._expand_layout
                existing = [
                    p for p in layout.objects
                    if isinstance(p, Param) and p.object is change.old
                ]

                if toggle:
                    toggle.disabled = not is_parameterized(change.new)
                if not existing:
                    return
                elif is_parameterized(change.new):
                    parameterized = change.new
                    kwargs = {
                        k: v
                        for k, v in self.get_param_values()
                        if k not in ['name', 'object', 'parameters']
                    }
                    pane = Param(parameterized,
                                 name=parameterized.name,
                                 _temporary=True,
                                 **kwargs)
                    layout[layout.objects.index(existing[0])] = pane
                else:
                    layout.pop(existing[0])

            watchers = [selector.param.watch(update_pane, 'value')]
            if toggle:
                watchers.append(toggle.param.watch(toggle_pane, 'active'))
            self._callbacks['instance'] += watchers

            if self.expand:
                if self.expand_button:
                    toggle.active = True
                else:
                    toggle_pane(namedtuple('Change', 'new')(True))

    @classmethod
    def applies(cls, obj):
        return (is_parameterized(obj)
                or isinstance(obj, param.parameterized.Parameters))

    def widget_type(cls, pobj):
        ptype = type(pobj)
        for t in classlist(ptype)[::-1]:
            if t in cls._mapping:
                if isinstance(cls._mapping[t], types.FunctionType):
                    return cls._mapping[t](pobj)
                return cls._mapping[t]

    def widget(self, p_name):
        """Get widget for param_name"""
        p_obj = self.object.params(p_name)

        if self.widgets is None or p_name not in self.widgets:
            widget_class = self.widget_type(p_obj)
        else:
            widget_class = self.widgets[p_name]
        value = getattr(self.object, p_name)

        kw = dict(value=value, disabled=p_obj.constant)

        if self.label_formatter is not None:
            kw['name'] = self.label_formatter(p_name)
        else:
            kw['name'] = p_name

        if hasattr(p_obj, 'get_range'):
            options = p_obj.get_range()
            if not options and value is not None:
                options = [value]
            kw['options'] = options

        if hasattr(p_obj, 'get_soft_bounds'):
            bounds = p_obj.get_soft_bounds()
            if bounds[0] is not None:
                kw['start'] = bounds[0]
            if bounds[1] is not None:
                kw['end'] = bounds[1]
            if ('start' not in kw or 'end'
                    not in kw) and not issubclass(widget_class, LiteralInput):
                widget_class = LiteralInput

        kwargs = {k: v for k, v in kw.items() if k in widget_class.params()}
        widget = widget_class(**kwargs)
        watchers = self._callbacks['instance']
        if isinstance(p_obj, param.Action):
            widget.button_type = 'success'

            def action(change):
                value(self.object)

            watchers.append(widget.param.watch(action, 'clicks'))
        elif isinstance(widget, Toggle):
            pass
        else:
            widget.link(self.object, **{'value': p_name})

            def link(change, _updating=[]):
                key = (change.name, change.what)
                if key in _updating:
                    return

                _updating.append(key)
                updates = {}
                if change.what == 'constant':
                    updates['disabled'] = change.new
                elif change.what == 'precedence':
                    if change.new < 0 and widget in self._widget_box.objects:
                        self._widget_box.pop(widget)
                    elif change.new >= 0 and widget not in self._widget_box.objects:
                        precedence = lambda k: self.object.params(k).precedence
                        widgets = []
                        for k, ws in self._widgets.items():
                            if precedence(k) is None or precedence(
                                    k) >= self.display_threshold:
                                widgets += ws
                        self._widget_box.objects = widgets
                elif change.what == 'objects':
                    updates['options'] = p_obj.get_range()
                elif change.what == 'bounds':
                    start, end = p_obj.get_soft_bounds()
                    updates['start'] = start
                    updates['end'] = end
                else:
                    updates['value'] = change.new
                try:
                    widget.set_param(**updates)
                except:
                    raise
                finally:
                    _updating.pop(_updating.index(key))

            # Set up links to parameterized object
            watchers.append(self.object.param.watch(link, p_name, 'constant'))
            watchers.append(self.object.param.watch(link, p_name,
                                                    'precedence'))
            watchers.append(self.object.param.watch(link, p_name))
            if hasattr(p_obj, 'get_range'):
                watchers.append(
                    self.object.param.watch(link, p_name, 'objects'))
            if hasattr(p_obj, 'get_soft_bounds'):
                watchers.append(self.object.param.watch(
                    link, p_name, 'bounds'))

        options = kwargs.get('options', [])
        if isinstance(options, dict):
            options = options.values()
        if ((is_parameterized(value)
             or any(is_parameterized(o) for o in options))
                and (self.expand_button or
                     (self.expand_button is None and not self.expand))):
            toggle = Toggle(name='...',
                            button_type='primary',
                            disabled=not is_parameterized(value))
            return [widget, toggle]
        else:
            return [widget]

    def _cleanup(self, root=None, final=False):
        self.layout._cleanup(root, final)
        super(Param, self)._cleanup(root, final)

    def _get_widgets(self):
        """Return name,widget boxes for all parameters (i.e., a property sheet)"""
        params = [(p, pobj) for p, pobj in self.object.params().items()
                  if p in self.parameters or p == 'name']
        key_fn = lambda x: x[1].precedence if x[
            1].precedence is not None else self.default_precedence
        sorted_precedence = sorted(params, key=key_fn)
        filtered = [(k, p) for (k, p) in sorted_precedence if (
            (p.precedence is None) or (p.precedence >= self.display_threshold))
                    ]
        groups = itertools.groupby(filtered, key=key_fn)
        sorted_groups = [sorted(grp) for (k, grp) in groups]
        ordered_params = [el[0] for group in sorted_groups for el in group]

        # Format name specially
        ordered_params.pop(ordered_params.index('name'))
        if self.expand_layout is Tabs:
            widgets = []
        elif self.show_name:
            name = self.object.name
            match = re.match('(.)+(\d){5}', name)
            name = name[:-5] if match else name
            widgets = [('name', [StaticText(value='<b>{0}</b>'.format(name))])]
        else:
            widgets = []
        widgets += [(pname, self.widget(pname)) for pname in ordered_params]
        return OrderedDict(widgets)

    def _get_model(self, doc, root=None, parent=None, comm=None):
        return self.layout._get_model(doc, root, parent, comm)
Exemple #22
0
class decimate(Operation):
    """
    Decimates any column based Element to a specified number of random
    rows if the current element defined by the x_range and y_range
    contains more than max_samples. By default the operation returns a
    DynamicMap with a RangeXY stream allowing dynamic downsampling.
    """

    dynamic = param.Boolean(default=True,
                            doc="""
       Enables dynamic processing by default.""")

    link_inputs = param.Boolean(default=True,
                                doc="""
         By default, the link_inputs parameter is set to True so that
         when applying shade, backends that support linked streams
         update RangeXY streams on the inputs of the shade operation.""")

    max_samples = param.Integer(default=5000,
                                doc="""
        Maximum number of samples to display at the same time.""")

    random_seed = param.Integer(default=42,
                                doc="""
        Seed used to initialize randomization.""")

    streams = param.List(default=[RangeXY],
                         doc="""
        List of streams that are applied if dynamic=True, allowing
        for dynamic interaction with the plot.""")

    x_range = param.NumericTuple(default=None,
                                 length=2,
                                 doc="""
       The x_range as a tuple of min and max x-value. Auto-ranges
       if set to None.""")

    y_range = param.NumericTuple(default=None,
                                 length=2,
                                 doc="""
       The x_range as a tuple of min and max y-value. Auto-ranges
       if set to None.""")

    def _process_layer(self, element, key=None):
        if not isinstance(element, Dataset):
            raise ValueError("Cannot downsample non-Dataset types.")
        if element.interface not in column_interfaces:
            element = element.clone(tuple(element.columns().values()))

        xstart, xend = self.p.x_range if self.p.x_range else element.range(0)
        ystart, yend = self.p.y_range if self.p.y_range else element.range(1)

        # Slice element to current ranges
        xdim, ydim = element.dimensions(label=True)[0:2]
        sliced = element.select(**{xdim: (xstart, xend), ydim: (ystart, yend)})

        if len(sliced) > self.p.max_samples:
            prng = np.random.RandomState(self.p.random_seed)
            return sliced.iloc[prng.choice(len(sliced), self.p.max_samples,
                                           False)]
        return sliced

    def _process(self, element, key=None):
        return element.map(self._process_layer, Element)
Exemple #23
0
class Tabulator(BaseTable):
    """
    The Tabulator Pane wraps the [Tabulator](http://tabulator.info/)
    table to provide a full-featured interactive table.
    """

    frozen_columns = param.List(default=[],
                                doc="""
        List indicating the columns to freeze. The column(s) may be
        selected by name or index.""")

    frozen_rows = param.List(default=[],
                             doc="""
        List indicating the rows to freeze. If set, the
        first N rows will be frozen, which prevents them from scrolling
        out of frame; if set to a negative value the last N rows will be
        frozen.""")

    groups = param.Dict(default={},
                        doc="""
        Dictionary mapping defining the groups.""")

    groupby = param.List(default=[],
                         doc="""
        Groups rows in the table by one or more columns.""")

    hidden_columns = param.List(default=[],
                                doc="""
        List of columns to hide.""")

    layout = param.ObjectSelector(default='fit_data_table',
                                  objects=[
                                      'fit_data', 'fit_data_fill',
                                      'fit_data_stretch', 'fit_data_table',
                                      'fit_columns'
                                  ])

    pagination = param.ObjectSelector(default=None,
                                      allow_None=True,
                                      objects=['local', 'remote'])

    page = param.Integer(default=1,
                         doc="""
        Currently selected page (indexed starting at 1).""")

    page_size = param.Integer(default=20,
                              bounds=(1, None),
                              doc="""
        Number of rows to render per page.""")

    row_height = param.Integer(default=30,
                               doc="""
        The height of each table row.""")

    selectable = param.ObjectSelector(default=True,
                                      objects=[True, False, 'checkbox'],
                                      doc="""
        Whether a table's rows can be selected or not. Multiple
        selection is allowed and can be achieved by either clicking
        multiple checkboxes (if enabled) or using Shift + click on
        rows.""")

    sorters = param.List(default=[],
                         doc="""
        A list of sorters to apply during pagination.""")

    theme = param.ObjectSelector(default="simple",
                                 objects=TABULATOR_THEMES,
                                 doc="""
        Tabulator CSS theme to apply to table.""")

    _widget_type = _BkTabulator

    _data_params = ['value', 'page', 'page_size', 'pagination', 'sorters']

    _config_params = ['frozen_columns', 'groups', 'selectable']

    _manual_params = BaseTable._manual_params + _config_params

    def __init__(self, value=None, **params):
        configuration = params.pop('configuration', {})
        self.style = None
        super().__init__(value=value, **params)
        self._configuration = configuration

    def _validate(self, event):
        super()._validate(event)
        if self.value is not None:
            todo = []
            if self.style is not None:
                todo = self.style._todo
            self.style = self.value.style
            self.style._todo = todo

    def _process_param_change(self, msg):
        msg = super()._process_param_change(msg)
        if 'frozen_rows' in msg:
            length = self._length
            msg['frozen_rows'] = [
                length + r if r < 0 else r for r in msg['frozen_rows']
            ]
        if 'theme' in msg:
            if 'bootstrap' in self.theme:
                msg['theme_url'] = THEME_URL + 'bootstrap/'
            elif 'materialize' in self.theme:
                msg['theme_url'] = THEME_URL + 'materialize/'
            elif 'semantic-ui' in self.theme:
                msg['theme_url'] = THEME_URL + 'semantic-ui/'
            elif 'bulma' in self.theme:
                msg['theme_url'] = THEME_URL + 'bulma/'
            else:
                msg['theme_url'] = THEME_URL
            theme = 'tabulator' if self.theme == 'default' else 'tabulator_' + self.theme
            _BkTabulator.__css__ = [msg['theme_url'] + theme + '.min.css']
        return msg

    def _update_columns(self, event, model):
        if event.name not in self._config_params:
            super()._update_columns(event, model)
            if (event.name in ('editors', 'formatters') and not any(
                    isinstance(v, (str, dict)) for v in event.new.values())):
                # If no tabulator editor/formatter was changed we can skip
                # update to config
                return
        model.configuration = self._get_configuration(model.columns)

    def _sort_df(self, df):
        if not self.sorters:
            return df
        return df.sort_values(
            [s['field'] for s in self.sorters],
            ascending=[s['dir'] == 'asc' for s in self.sorters])

    def _get_data(self):
        if self.pagination != 'remote' or self.value is None:
            return super()._get_data()
        df = self._filter_dataframe(self.value)
        df = self._sort_df(df)
        nrows = self.page_size
        start = (self.page - 1) * nrows
        page_df = df.iloc[start:start + nrows]
        data = ColumnDataSource.from_df(page_df).items()
        return df, {k if isinstance(k, str) else str(k): v for k, v in data}

    @property
    def _length(self):
        return len(self._processed)

    def _get_style_data(self):
        if self.value is None:
            return {}
        if self.pagination == 'remote':
            nrows = self.page_size
            start = (self.page - 1) * nrows
            df = self.value.iloc[start:start + nrows]
        else:
            df = self.value

        styler = df.style
        styler._todo = self.style._todo
        styler._compute()
        offset = len(self.indexes) + int(self.selectable == 'checkbox')

        styles = {}
        for (r, c), s in styler.ctx.items():
            if r not in styles:
                styles[int(r)] = {}
            styles[int(r)][offset + int(c)] = s
        return styles

    def _update_style(self):
        styles = self._get_style_data()
        for ref, (m, _) in self._models.items():
            m.styles = styles
            push_on_root(ref)

    @updating
    def _stream(self, stream, rollover=None, follow=True):
        if self.pagination == 'remote':
            length = self._length
            nrows = self.page_size
            max_page = length // nrows + bool(length % nrows)
            if self.page != max_page:
                return
        super()._stream(stream, rollover)
        self._update_style()

    def stream(self,
               stream_value,
               rollover=None,
               reset_index=True,
               follow=True):
        for ref, (m, _) in self._models.items():
            m.follow = follow
            push_on_root(ref)
        if follow and self.pagination:
            length = self._length
            nrows = self.page_size
            self.page = length // nrows + bool(length % nrows)
        super().stream(stream_value, rollover, reset_index)

    @updating
    def _patch(self, patch):
        if self.pagination == 'remote':
            nrows = self.page_size
            start = (self.page - 1) * nrows
            end = start + nrows
            filtered = {}
            for c, values in patch.items():
                values = [(ind, val) for (ind, val) in values
                          if ind >= start and ind < end]
                if values:
                    filtered[c] = values
            patch = filtered
        if not patch:
            return
        super()._patch(patch)
        if self.pagination == 'remote':
            self._update_style()

    def _update_cds(self, *events):
        if self._updating:
            return
        super()._update_cds(*events)
        if self.pagination:
            self._update_max_page()
            self._update_selected()
        self._update_style()

    def _update_max_page(self):
        length = self._length
        nrows = self.page_size
        max_page = length // nrows + bool(length % nrows)
        self.param.page.bounds = (1, max_page)
        for ref, (m, _) in self._models.items():
            m.max_page = max_page
            push_on_root(ref)

    def _update_selected(self, *events, indices=None):
        if self._updating:
            return
        kwargs = {}
        if self.pagination == 'remote':
            index = self.value.iloc[self.selection].index
            indices = []
            for v in index.values:
                try:
                    indices.append(self._processed.index.get_loc(v))
                except KeyError:
                    continue
            nrows = self.page_size
            start = (self.page - 1) * nrows
            end = start + nrows
            kwargs['indices'] = [
                ind - start for ind in indices if ind >= start and ind < end
            ]
        super()._update_selected(*events, **kwargs)

    def _update_column(self, column, array):
        if self.pagination != 'remote':
            self.value[column] = array
            return
        nrows = self.page_size
        start = (self.page - 1) * nrows
        end = start + nrows
        if self.sorters:
            index = self._processed.iloc[start:end].index.values
            self.value[column].loc[index] = array
        else:
            self.value[column].iloc[start:end] = array

    def _update_selection(self, indices):
        if self.pagination != 'remote':
            self.selection = indices
        nrows = self.page_size
        start = (self.page - 1) * nrows
        index = self._processed.iloc[[start + ind for ind in indices]].index
        indices = []
        for v in index.values:
            try:
                indices.append(self.value.index.get_loc(v))
            except KeyError:
                continue
        self.selection = indices

    def _get_properties(self, source):
        props = {
            p: getattr(self, p)
            for p in list(Layoutable.param) if getattr(self, p) is not None
        }
        if self.pagination:
            length = self.page_size
        else:
            length = self._length
        if props.get('height', None) is None:
            props['height'] = length * self.row_height + 30
        props['source'] = source
        props['styles'] = self._get_style_data()
        props['columns'] = columns = self._get_columns()
        props['configuration'] = self._get_configuration(columns)
        props['page'] = self.page
        props['pagination'] = self.pagination
        props['page_size'] = self.page_size
        props['layout'] = self.layout
        props['groupby'] = self.groupby
        props['hidden_columns'] = self.hidden_columns
        props['editable'] = not self.disabled
        process = {'theme': self.theme, 'frozen_rows': self.frozen_rows}
        props.update(self._process_param_change(process))
        if self.pagination:
            length = 0 if self._processed is None else len(self._processed)
            props['max_page'] = length // self.page_size + bool(
                length % self.page_size)
        return props

    def _get_model(self, doc, root=None, parent=None, comm=None):
        model = super()._get_model(doc, root, parent, comm)
        if root is None:
            root = model
        self._link_props(model, ['page', 'sorters'], doc, root, comm)
        return model

    def _config_columns(self, column_objs):
        column_objs = list(column_objs)
        groups = {}
        columns = []
        if self.selectable == 'checkbox':
            columns.append({
                "formatter": "rowSelection",
                "titleFormatter": "rowSelection",
                "hozAlign": "center",
                "headerSort": False,
                "frozen": True
            })

        ordered = []
        for col in self.frozen_columns:
            if isinstance(col, int):
                ordered.append(column_objs.pop(col))
            else:
                cols = [c for c in column_objs if c.field == col]
                if cols:
                    ordered.append(cols[0])
                    column_objs.remove(cols[0])
        ordered += column_objs

        for i, column in enumerate(ordered):
            matching_groups = [
                group for group, group_cols in self.groups.items()
                if column.field in group_cols
            ]
            col_dict = {'field': column.field}
            formatter = self.formatters.get(column.field)
            if isinstance(formatter, str):
                col_dict['formatter'] = formatter
            elif isinstance(formatter, dict):
                formatter = dict(formatter)
                col_dict['formatter'] = formatter.pop('type')
                col_dict['formatterParams'] = formatter
            editor = self.editors.get(column.field)
            if isinstance(editor, str):
                col_dict['editor'] = editor
            elif isinstance(editor, dict):
                editor = dict(editor)
                col_dict['editor'] = editor.pop('type')
                col_dict['editorParams'] = editor
            if column.field in self.frozen_columns or i in self.frozen_columns:
                col_dict['frozen'] = True
            if matching_groups:
                group = matching_groups[0]
                if group in groups:
                    groups[group]['columns'].append(col_dict)
                    continue
                group_dict = {'title': group, 'columns': [col_dict]}
                groups[group] = group_dict
                columns.append(group_dict)
            else:
                columns.append(col_dict)
        return columns

    def _get_configuration(self, columns):
        """
        Returns the Tabulator configuration.
        """
        configuration = dict(self._configuration)
        if 'selectable' not in configuration:
            configuration['selectable'] = self.selectable
        if self.groups and 'columns' in configuration:
            raise ValueError("Groups must be defined either explicitly "
                             "or via the configuration, not both.")
        configuration['columns'] = self._config_columns(columns)
        return configuration

    def download(self, filename='table.csv'):
        """
        Triggers downloading of the table as a CSV or JSON.

        Arguments
        ---------
        filename: str
            The filename to save the table as.
        """
        for ref, (m, _) in self._models.items():
            m.filename = m.filename
            push_on_root(ref)
        for ref, (m, _) in self._models.items():
            m.download = not m.download
            push_on_root(ref)

    def download_menu(self, text_kwargs={}, button_kwargs={}):
        """
        Returns a menu containing a TextInput and Button widget to set
        the filename and trigger a client-side download of the data.

        Arguments
        ---------
        text_kwargs: dict
            Keyword arguments passed to the TextInput constructor
        button_kwargs: dict
            Keyword arguments passed to the Button constructor

        Returns
        -------
        filename: TextInput
            The TextInput widget setting a filename.
        button: Button
            The Button that triggers a download.
        """
        button_kwargs = dict(button_kwargs)
        if 'name' not in button_kwargs:
            button_kwargs['name'] = 'Download'
        button = Button(**button_kwargs)
        button.js_on_click({'table': self},
                           code="""
        table.download = !table.download
        """)

        text_kwargs = dict(text_kwargs)
        if 'name' not in text_kwargs:
            text_kwargs['name'] = 'Filename'
        if 'value' not in text_kwargs:
            text_kwargs['value'] = 'table.csv'
        filename = TextInput(name='Filename', value='table.csv')
        filename.jscallback({'table': self},
                            value="""
        table.filename = cb_obj.value
        """)
        return filename, button
Exemple #24
0
class Dimensioned(LabelledData):
    """
    Dimensioned is a base class that allows the data contents of a
    class to be associated with dimensions. The contents associated
    with dimensions may be partitioned into one of three types

    * key dimensions: These are the dimensions that can be indexed via
                      the __getitem__ method. Dimension objects
                      supporting key dimensions must support indexing
                      over these dimensions and may also support
                      slicing. This list ordering of dimensions
                      describes the positional components of each
                      multi-dimensional indexing operation.

                      For instance, if the key dimension names are
                      'weight' followed by 'height' for Dimensioned
                      object 'obj', then obj[80,175] indexes a weight
                      of 80 and height of 175.

                      Accessed using either kdims or key_dimensions.

    * value dimensions: These dimensions correspond to any data held
                        on the Dimensioned object not in the key
                        dimensions. Indexing by value dimension is
                        supported by dimension name (when there are
                        multiple possible value dimensions); no
                        slicing semantics is supported and all the
                        data associated with that dimension will be
                        returned at once. Note that it is not possible
                        to mix value dimensions and deep dimensions.

                        Accessed using either vdims or value_dimensions.


    * deep dimensions: These are dynamically computed dimensions that
                       belong to other Dimensioned objects that are
                       nested in the data. Objects that support this
                       should enable the _deep_indexable flag. Note
                       that it is not possible to mix value dimensions
                       and deep dimensions.

                       Accessed using either ddims or deep_dimensions.

    Dimensioned class support generalized methods for finding the
    range and type of values along a particular Dimension. The range
    method relies on the appropriate implementation of the
    dimension_values methods on subclasses.

    The index of an arbitrary dimension is its positional index in the
    list of all dimensions, starting with the key dimensions, followed
    by the value dimensions and ending with the deep dimensions.
    """

    cdims = param.Dict(default=OrderedDict(),
                       doc="""
       The constant dimensions defined as a dictionary of Dimension:value
       pairs providing additional dimension information about the object.

       Aliased with constant_dimensions.""")

    kdims = param.List(bounds=(0, None),
                       constant=True,
                       doc="""
       The key dimensions defined as list of dimensions that may be
       used in indexing (and potential slicing) semantics. The order
       of the dimensions listed here determines the semantics of each
       component of a multi-dimensional indexing operation.

       Aliased with key_dimensions.""")

    vdims = param.List(bounds=(0, None),
                       constant=True,
                       doc="""
       The value dimensions defined as the list of dimensions used to
       describe the components of the data. If multiple value
       dimensions are supplied, a particular value dimension may be
       indexed by name after the key dimensions.

       Aliased with value_dimensions.""")

    group = param.String(default='Dimensioned',
                         constant=True,
                         doc="""
       A string describing the data wrapped by the object.""")

    __abstract = True
    _sorted = False
    _dim_groups = ['kdims', 'vdims', 'cdims', 'ddims']
    _dim_aliases = dict(key_dimensions='kdims',
                        value_dimensions='vdims',
                        constant_dimensions='cdims',
                        deep_dimensions='ddims')

    # Long-name aliases

    @property
    def key_dimensions(self):
        return self.kdims

    @property
    def value_dimensions(self):
        return self.vdims

    @property
    def constant_dimensions(self):
        return self.cdims

    @property
    def deep_dimensions(self):
        return self.ddims

    def __init__(self, data, **params):
        for group in self._dim_groups + list(self._dim_aliases.keys()):
            if group in ['deep_dimensions', 'ddims']: continue
            if group in params:
                if group in self._dim_aliases:
                    params[self._dim_aliases[group]] = params.pop(group)
                    group = self._dim_aliases[group]
                if group == 'cdims':
                    dimensions = {
                        d if isinstance(d, Dimension) else Dimension(d): val
                        for d, val in params.pop(group).items()
                    }
                else:
                    dimensions = [
                        d if isinstance(d, Dimension) else Dimension(d)
                        for d in params.pop(group)
                    ]
                params[group] = dimensions
        super(Dimensioned, self).__init__(data, **params)
        self.ndims = len(self.kdims)
        cdims = [(d.name, val) for d, val in self.cdims.items()]
        self._cached_constants = OrderedDict(cdims)
        self._cached_index_names = [d.name for d in self.kdims]
        self._cached_value_names = [d.name for d in self.vdims]
        self._settings = None

    def _valid_dimensions(self, dimensions):
        """Validates key dimension input
        
        Returns kdims if no dimensions are specified"""
        if dimensions is None:
            dimensions = self.kdims
        elif not isinstance(dimensions, list):
            dimensions = [dimensions]

        valid_dimensions = []
        for dim in dimensions:
            if isinstance(dim, Dimension): dim = dim.name
            if dim not in self._cached_index_names:
                raise Exception("Supplied dimensions %s not found." % dim)
            valid_dimensions.append(dim)
        return valid_dimensions

    @property
    def ddims(self):
        "The list of deep dimensions"
        if self._deep_indexable and len(self):
            return self.values()[0].dimensions()
        else:
            return []

    def dimensions(self, selection='all', label=False):
        """
        Provides convenient access to Dimensions on nested
        Dimensioned objects. Dimensions can be selected
        by their type, i.e. 'key' or 'value' dimensions.
        By default 'all' dimensions are returned.
        """
        lambdas = {
            'k': (lambda x: x.kdims, {
                'full_breadth': False
            }),
            'v': (lambda x: x.vdims, {}),
            'c': (lambda x: x.cdims, {})
        }
        aliases = {'key': 'k', 'value': 'v', 'constant': 'c'}
        if selection == 'all':
            dims = [
                dim for group in self._dim_groups
                for dim in getattr(self, group)
            ]
        elif isinstance(selection, list):
            dims = [
                dim for group in selection
                for dim in getattr(self, '%sdims' % aliases.get(group))
            ]
        elif aliases.get(selection) in lambdas:
            selection = aliases.get(selection, selection)
            lmbd, kwargs = lambdas[selection]
            key_traversal = self.traverse(lmbd, **kwargs)
            dims = [dim for keydims in key_traversal for dim in keydims]
        else:
            raise KeyError("Invalid selection %r, valid selections include"
                           "'all', 'value' and 'key' dimensions" %
                           repr(selection))
        return [dim.name if label else dim for dim in dims]

    def get_dimension(self, dimension, default=None):
        "Access a Dimension object by name or index."
        all_dims = self.dimensions()
        if isinstance(dimension, Dimension):
            dimension = dimension.name
        if isinstance(dimension, int) and dimension < len(all_dims):
            return all_dims[dimension]
        else:
            return {dim.name: dim for dim in all_dims}.get(dimension, default)

    def get_dimension_index(self, dim):
        """
        Returns the index of the requested dimension.
        """
        if isinstance(dim, Dimension): dim = dim.name
        if isinstance(dim, int):
            if dim < len(self.dimensions()):
                return dim
            else:
                return IndexError('Dimension index out of bounds')
        try:
            sanitized = {
                sanitize_identifier(kd): kd
                for kd in self._cached_index_names
            }
            return [d.name
                    for d in self.dimensions()].index(sanitized.get(dim, dim))
        except ValueError:
            raise Exception("Dimension %s not found in %s." %
                            (dim, self.__class__.__name__))

    def get_dimension_type(self, dim):
        """
        Returns the specified Dimension type if specified or
        if the dimension_values types are consistent otherwise
        None is returned.
        """
        dim_obj = self.get_dimension(dim)
        if dim_obj and dim_obj.type is not None:
            return dim_obj.type
        dim_vals = [type(v) for v in self.dimension_values(dim)]
        if len(set(dim_vals)) == 1:
            return dim_vals[0]
        else:
            return None

    def __getitem__(self, key):
        """
        Multi-dimensional indexing semantics is determined by the list
        of key dimensions. For instance, the first indexing component
        will index the first key dimension.

        After the key dimensions are given, *either* a value dimension
        name may follow (if there are multiple value dimensions) *or*
        deep dimensions may then be listed (for applicable deep
        dimensions).
        """
        return self

    def select(self, selection_specs=None, **kwargs):
        """
        Allows slicing or indexing into the Dimensioned object
        by supplying the dimension and index/slice as key
        value pairs. Select descends recursively through the
        data structure applying the key dimension selection.
        The 'value' keyword allows selecting the
        value dimensions on objects which have any declared.

        The selection may also be selectively applied to
        specific objects by supplying the selection_specs
        as an iterable of type.group.label specs, types or
        functions.
        """

        # Apply all indexes applying on this object
        val_dim = ['value'] if self.vdims else []
        sanitized = {
            sanitize_identifier(kd): kd
            for kd in self._cached_index_names
        }
        local_dims = (self._cached_index_names + list(sanitized.keys()) +
                      val_dim)
        local_kwargs = {k: v for k, v in kwargs.items() if k in local_dims}

        # Check selection_spec applies
        if selection_specs is not None:
            matches = any(self.matches(spec) for spec in selection_specs)
        else:
            matches = True

        if local_kwargs and matches:
            select = [slice(None) for i in range(self.ndims)]
            for dim, val in local_kwargs.items():
                if dim == 'value':
                    select += [val]
                else:
                    if isinstance(val, tuple): val = slice(*val)
                    dim = sanitized.get(dim, dim)
                    select[self.get_dimension_index(dim)] = val
            if self._deep_indexable:
                selection = self.get(tuple(select),
                                     self.clone(shared_data=False))
            else:
                selection = self[tuple(select)]
        else:
            selection = self

        if type(selection) is not type(self):
            # Apply the selection on the selected object of a different type
            val_dim = ['value'] if selection.vdims else []
            key_dims = selection.dimensions('key', label=True) + val_dim
            if any(kw in key_dims for kw in kwargs):
                selection = selection.select(selection_specs, **kwargs)
        elif selection._deep_indexable:
            # Apply the deep selection on each item in local selection
            items = []
            for k, v in selection.items():
                val_dim = ['value'] if v.vdims else []
                dims = list(
                    zip(*[(sanitize_identifier(kd), kd)
                          for kd in v.dimensions('key', label=True)]))
                kdims, skdims = dims if dims else ([], [])
                key_dims = list(kdims) + list(skdims) + val_dim
                if any(kw in key_dims for kw in kwargs):
                    items.append((k, v.select(selection_specs, **kwargs)))
                else:
                    items.append((k, v))
            selection = selection.clone(items)
        return selection

    def dimension_values(self, dimension):
        """
        Returns the values along the specified dimension. This method
        must be implemented for all Dimensioned type.
        """
        val = self._cached_constants.get(dimension, None)
        if val:
            return val
        else:
            raise Exception("Dimension %s not found in %s." %
                            (dimension, self.__class__.__name__))

    def range(self, dimension, data_range=True):
        """
        Returns the range of values along the specified dimension.

        If data_range is True, the data may be used to try and infer
        the appropriate range. Otherwise, (None,None) is returned to
        indicate that no range is defined.
        """
        dimension = self.get_dimension(dimension)
        if dimension is None:
            return (None, None)
        if dimension.range != (None, None):
            return dimension.range
        elif not data_range:
            return (None, None)
        soft_range = [r for r in dimension.soft_range if r is not None]
        if dimension in self.kdims or dimension in self.vdims:
            dim_vals = self.dimension_values(dimension.name)
            return find_range(dim_vals, soft_range)
        dname = dimension.name
        match_fn = lambda x: dname in x.dimensions(['key', 'value'], True)
        range_fn = lambda x: x.range(dname)
        ranges = self.traverse(range_fn, [match_fn])
        drange = max_range(ranges)
        return drange

    def __repr__(self):
        return PrettyPrinter.pprint(self)

    def __call__(self, options=None, **kwargs):
        """
        Apply the supplied options to a clone of the object which is
        then returned. Note that if no options are supplied at all,
        all ids are reset.
        """
        groups = set(Store.options().groups.keys())
        if kwargs and set(kwargs) <= groups:
            if not all(isinstance(v, dict) for v in kwargs.values()):
                raise Exception(
                    "The %s options must be specified using dictionary groups"
                    % ','.join(repr(k) for k in kwargs.keys()))

            # Check whether the user is specifying targets (such as 'Image.Foo')
            entries = Store.options().children
            targets = [
                k.split('.')[0] in entries for grp in kwargs.values()
                for k in grp
            ]
            if any(targets) and not all(targets):
                raise Exception(
                    "Cannot mix target specification keys such as 'Image' with non-target keywords."
                )
            elif not any(targets):
                # Not targets specified - add current object as target
                sanitized_group = sanitize_identifier(self.group)
                if self.label:
                    identifier = ('%s.%s.%s' %
                                  (self.__class__.__name__, sanitized_group,
                                   sanitize_identifier(self.label)))
                elif sanitized_group != self.__class__.__name__:
                    identifier = '%s.%s' % (self.__class__.__name__,
                                            sanitized_group)
                else:
                    identifier = self.__class__.__name__

                kwargs = {k: {identifier: v} for k, v in kwargs.items()}

        if options is None and kwargs == {}:
            deep_clone = self.map(lambda x: x.clone(id=None))
        else:
            deep_clone = self.map(lambda x: x.clone(id=x.id))
        StoreOptions.set_options(deep_clone, options, **kwargs)
        return deep_clone
Exemple #25
0
class Graph(Dataset, Element2D):
    """
    Graph is high-level Element representing both nodes and edges.
    A Graph may be defined in an abstract form representing just
    the abstract edges between nodes and optionally may be made
    concrete by supplying a Nodes Element defining the concrete
    positions of each node. If the node positions are supplied
    the EdgePaths (defining the concrete edges) can be inferred
    automatically or supplied explicitly.

    The constructor accepts regular columnar data defining the edges
    or a tuple of the abstract edges and nodes, or a tuple of the
    abstract edges, nodes, and edgepaths.
    """

    group = param.String(default='Graph', constant=True)

    kdims = param.List(default=[Dimension('start'),
                                Dimension('end')],
                       bounds=(2, 2))

    def __init__(self, data, kdims=None, vdims=None, **params):
        if isinstance(data, tuple):
            data = data + (None, ) * (3 - len(data))
            edges, nodes, edgepaths = data
        else:
            edges, nodes, edgepaths = data, None, None
        if nodes is not None:
            node_info = None
            if isinstance(nodes, Nodes):
                pass
            elif not isinstance(nodes, Dataset) or nodes.ndims == 3:
                nodes = Nodes(nodes)
            else:
                node_info = nodes
                nodes = None
        else:
            node_info = None
        if edgepaths is not None and not isinstance(edgepaths, EdgePaths):
            edgepaths = EdgePaths(edgepaths)
        self._nodes = nodes
        self._edgepaths = edgepaths
        super(Graph, self).__init__(edges, kdims=kdims, vdims=vdims, **params)
        if node_info is not None:
            self._add_node_info(node_info)
        self._validate()
        self.redim = redim_graph(self, mode='dataset')

    def _add_node_info(self, node_info):
        nodes = self.nodes.clone(datatype=['pandas', 'dictionary'])
        if isinstance(node_info, Nodes):
            nodes = nodes.redim(**dict(
                zip(nodes.dimensions('key', label=True), node_info.kdims)))

        if not node_info.kdims and len(node_info) != len(nodes):
            raise ValueError("The supplied node data does not match "
                             "the number of nodes defined by the edges. "
                             "Ensure that the number of nodes match"
                             "or supply an index as the sole key "
                             "dimension to allow the Graph to merge "
                             "the data.")

        if pd is None:
            if node_info.kdims and len(node_info) != len(nodes):
                raise ValueError("Graph cannot merge node data on index "
                                 "dimension without pandas. Either ensure "
                                 "the node data matches the order of nodes "
                                 "as they appear in the edge data or install "
                                 "pandas.")
            dimensions = nodes.dimensions()
            for d in node_info.vdims:
                if d in dimensions:
                    continue
                nodes = nodes.add_dimension(d,
                                            len(nodes.vdims),
                                            node_info.dimension_values(d),
                                            vdim=True)
        else:
            left_on = nodes.kdims[-1].name
            node_info_df = node_info.dframe()
            node_df = nodes.dframe()
            if node_info.kdims:
                idx = node_info.kdims[-1]
            else:
                idx = Dimension('index')
                node_info_df = node_info_df.reset_index()
            if 'index' in node_info_df.columns and not idx.name == 'index':
                node_df = node_df.rename(columns={'index': '__index'})
                left_on = '__index'
            cols = [
                c for c in node_info_df.columns
                if c not in node_df.columns or c == idx.name
            ]
            node_info_df = node_info_df[cols]
            node_df = pd.merge(node_df,
                               node_info_df,
                               left_on=left_on,
                               right_on=idx.name,
                               how='left')
            nodes = nodes.clone(node_df,
                                kdims=nodes.kdims[:2] + [idx],
                                vdims=node_info.vdims)

        self._nodes = nodes

    def _validate(self):
        if self._edgepaths is None:
            return
        mismatch = []
        for kd1, kd2 in zip(self.nodes.kdims, self.edgepaths.kdims):
            if kd1 != kd2:
                mismatch.append('%s != %s' % (kd1, kd2))
        if mismatch:
            raise ValueError('Ensure that the first two key dimensions on '
                             'Nodes and EdgePaths match: %s' %
                             ', '.join(mismatch))
        npaths = len(self._edgepaths.data)
        nedges = len(self)
        if nedges != npaths:
            mismatch = True
            if npaths == 1:
                edges = self.edgepaths.split()[0]
                vals = edges.dimension_values(0)
                npaths = len(np.where(np.isnan(vals))[0])
                if not np.isnan(vals[-1]):
                    npaths += 1
                mismatch = npaths != nedges
            if mismatch:
                raise ValueError('Ensure that the number of edges supplied '
                                 'to the Graph (%d) matches the number of '
                                 'edgepaths (%d)' % (nedges, npaths))

    def clone(self,
              data=None,
              shared_data=True,
              new_type=None,
              *args,
              **overrides):
        if data is None:
            data = (self.data, self.nodes)
            if self._edgepaths:
                data = data + (self.edgepaths, )
            overrides['plot_id'] = self._plot_id
        elif not isinstance(data, tuple):
            data = (data, self.nodes)
            if self._edgepaths:
                data = data + (self.edgepaths, )
        return super(Graph, self).clone(data, shared_data, new_type, *args,
                                        **overrides)

    def select(self,
               selection_specs=None,
               selection_mode='edges',
               **selection):
        """
        Allows selecting data by the slices, sets and scalar values
        along a particular dimension. The indices should be supplied as
        keywords mapping between the selected dimension and
        value. Additionally selection_specs (taking the form of a list
        of type.group.label strings, types or functions) may be
        supplied, which will ensure the selection is only applied if the
        specs match the selected object.

        Selecting by a node dimensions selects all edges and nodes that are
        connected to the selected nodes. To select only edges between the
        selected nodes set the selection_mode to 'nodes'.
        """
        selection = {
            dim: sel
            for dim, sel in selection.items()
            if dim in self.dimensions('ranges') + ['selection_mask']
        }
        if (selection_specs
                and not any(self.matches(sp) for sp in selection_specs)
                or not selection):
            return self

        index_dim = self.nodes.kdims[2].name
        dimensions = self.kdims + self.vdims
        node_selection = {
            index_dim: v
            for k, v in selection.items() if k in self.kdims
        }
        nodes = self.nodes.select(**dict(selection, **node_selection))
        selection = {k: v for k, v in selection.items() if k in dimensions}

        # Compute mask for edges if nodes were selected on
        nodemask = None
        if len(nodes) != len(self.nodes):
            xdim, ydim = dimensions[:2]
            indices = list(nodes.dimension_values(2, False))
            if selection_mode == 'edges':
                mask1 = self.interface.select_mask(self, {xdim.name: indices})
                mask2 = self.interface.select_mask(self, {ydim.name: indices})
                nodemask = (mask1 | mask2)
                nodes = self.nodes
            else:
                nodemask = self.interface.select_mask(self, {
                    xdim.name: indices,
                    ydim.name: indices
                })

        # Compute mask for edge selection
        mask = None
        if selection:
            mask = self.interface.select_mask(self, selection)

        # Combine masks
        if nodemask is not None:
            if mask is not None:
                mask &= nodemask
            else:
                mask = nodemask

        # Apply edge mask
        if mask is not None:
            data = self.interface.select(self, mask)
            if not np.all(mask):
                new_graph = self.clone((data, nodes))
                source = new_graph.dimension_values(0, expanded=False)
                target = new_graph.dimension_values(1, expanded=False)
                unique_nodes = np.unique(np.concatenate([source, target]))
                nodes = new_graph.nodes[:, :, list(unique_nodes)]
            paths = None
            if self._edgepaths:
                edgepaths = self._split_edgepaths
                paths = edgepaths.clone(
                    edgepaths.interface.select_paths(edgepaths, mask))
                if len(self._edgepaths.data) == 1:
                    paths = paths.clone(
                        [paths.dframe() if pd else paths.array()])
        else:
            data = self.data
            paths = self._edgepaths
        return self.clone((data, nodes, paths))

    @property
    def _split_edgepaths(self):
        if len(self) == len(self.edgepaths.data):
            return self.edgepaths
        else:
            return self.edgepaths.clone(split_path(self.edgepaths))

    def range(self, dimension, data_range=True):
        if self.nodes and dimension in self.nodes.dimensions():
            node_range = self.nodes.range(dimension, data_range)
            if self._edgepaths:
                path_range = self._edgepaths.range(dimension, data_range)
                return max_range([node_range, path_range])
            return node_range
        return super(Graph, self).range(dimension, data_range)

    def dimensions(self, selection='all', label=False):
        dimensions = super(Graph, self).dimensions(selection, label)
        if selection == 'ranges':
            if self._nodes:
                node_dims = self.nodes.dimensions(selection, label)
            else:
                node_dims = Nodes.kdims + Nodes.vdims
                if label in ['name', True, 'short']:
                    node_dims = [d.name for d in node_dims]
                elif label in ['long', 'label']:
                    node_dims = [d.label for d in node_dims]
            return dimensions + node_dims
        return dimensions

    @property
    def nodes(self):
        """
        Computes the node positions the first time they are requested
        if no explicit node information was supplied.
        """
        if self._nodes is None:
            self._nodes = layout_nodes(self, only_nodes=True)
        return self._nodes

    @property
    def edgepaths(self):
        """
        Returns the fixed EdgePaths or computes direct connections
        between supplied nodes.
        """
        if self._edgepaths:
            return self._edgepaths
        if pd is None:
            paths = connect_edges(self)
        else:
            paths = connect_edges_pd(self)
        return EdgePaths(paths, kdims=self.nodes.kdims[:2])

    @classmethod
    def from_networkx(cls, G, layout_function, nodes=None, **kwargs):
        """
        Generate a HoloViews Graph from a networkx.Graph object and
        networkx layout function. Any keyword arguments will be passed
        to the layout function.
        """
        positions = layout_function(G, **kwargs)
        edges = G.edges()
        if nodes:
            idx_dim = nodes.kdims[-1].name
            xs, ys = zip(*[v for k, v in sorted(positions.items())])
            indices = list(nodes.dimension_values(idx_dim))
            edges = [(src, tgt) for (src, tgt) in edges
                     if src in indices and tgt in indices]
            nodes = nodes.select(**{
                idx_dim: [eid for e in edges for eid in e]
            }).sort()
            nodes = nodes.add_dimension('x', 0, xs)
            nodes = nodes.add_dimension('y', 1, ys).clone(new_type=Nodes)
        else:
            nodes = Nodes([
                tuple(pos) + (idx, ) for idx, pos in sorted(positions.items())
            ])
        return cls((edges, nodes))
Exemple #26
0
class ItemTable(Element):
    """
    A tabular element type to allow convenient visualization of either
    a standard Python dictionary, an OrderedDict or a list of tuples
    (i.e. input suitable for an OrderedDict constructor). If an
    OrderedDict is used, the headings will be kept in the correct
    order. Tables store heterogeneous data with different labels.

    Dimension objects are also accepted as keys, allowing dimensional
    information (e.g type and units) to be associated per heading.
    """

    kdims = param.List(default=[],
                       bounds=(0, 0),
                       doc="""
       ItemTables hold an index Dimension for each value they contain, i.e.
       they are equivalent to the keys.""")

    vdims = param.List(default=[Dimension('Default')],
                       bounds=(00, None),
                       doc="""
       ItemTables should have only index Dimensions.""")

    group = param.String(default="ItemTable", constant=True)

    @property
    def rows(self):
        return len(self.vdims)

    @property
    def cols(self):
        return 2

    def __init__(self, data, **params):
        if data is None:
            data = []
        if type(data) == dict:
            raise ValueError(
                "ItemTable cannot accept a standard Python  dictionary "
                "as a well-defined item ordering is required.")
        elif isinstance(data, dict):
            pass
        elif isinstance(data, list):
            data = OrderedDict(data)
        else:
            data = OrderedDict(list(data))  # Python 3
        if not 'vdims' in params:
            params['vdims'] = list(data.keys())
        str_keys = OrderedDict(
            (dimension_name(k), v) for (k, v) in data.items())
        super(ItemTable, self).__init__(str_keys, **params)

    def __getitem__(self, heading):
        """
        Get the value associated with the given heading (key).
        """
        if heading == ():
            return self
        if heading not in self.vdims:
            raise KeyError("%r not in available headings." % heading)
        return np.array(self.data.get(heading, np.NaN))

    @classmethod
    def collapse_data(cls, data, function, **kwargs):
        param.main.param.warning('ItemTable.collapse_data is deprecated and '
                                 'should no longer be used.')
        groups = np.vstack([np.array(odict.values()) for odict in data]).T
        return OrderedDict(
            zip(data[0].keys(), function(groups, axis=-1, **kwargs)))

    def dimension_values(self, dimension, expanded=True, flat=True):
        dimension = self.get_dimension(dimension, strict=True).name
        if dimension in self.dimensions('value', label=True):
            return np.array([self.data.get(dimension, np.NaN)])
        else:
            return super(ItemTable, self).dimension_values(dimension)

    def sample(self, samples=[]):
        if callable(samples):
            sampled_data = OrderedDict(item for item in self.data.items()
                                       if samples(item))
        else:
            sampled_data = OrderedDict(
                (s, self.data.get(s, np.NaN)) for s in samples)
        return self.clone(sampled_data)

    def reduce(self, dimensions=None, function=None, **reduce_map):
        raise NotImplementedError(
            'ItemTables are for heterogeneous data, which'
            'cannot be reduced.')

    def pprint_cell(self, row, col):
        """
        Get the formatted cell value for the given row and column indices.
        """
        if col > 2:
            raise Exception("Only two columns available in a ItemTable.")
        elif row >= self.rows:
            raise Exception("Maximum row index is %d" % self.rows - 1)
        elif col == 0:
            return self.dimensions('value')[row].pprint_label
        else:
            dim = self.get_dimension(row)
            heading = self.vdims[row]
            return dim.pprint_value(self.data.get(heading.name, np.NaN))

    def hist(self, *args, **kwargs):
        raise NotImplementedError("ItemTables are not homogeneous and "
                                  "don't support histograms.")

    def cell_type(self, row, col):
        """
        Returns the cell type given a row and column index. The common
        basic cell types are 'data' and 'heading'.
        """
        if col == 0: return 'heading'
        else: return 'data'
Exemple #27
0
class GeoPlot(ProjectionPlot, ElementPlot):
    """
    Plotting baseclass for geographic plots with a cartopy projection.
    """

    default_tools = param.List(default=['save', 'pan',
                                        WheelZoomTool(**({} if bokeh_version < '0.12.16' else
                                                         {'zoom_on_axis': False})),
                                        BoxZoomTool(match_aspect=True), 'reset'],
        doc="A list of plugin tools to use on the plot.")

    fixed_bounds = param.Boolean(default=False, doc="""
        Whether to prevent zooming beyond the projections defined bounds.""")

    global_extent = param.Boolean(default=False, doc="""
        Whether the plot should display the whole globe.""")

    infer_projection = param.Boolean(default=False, doc="""
        Whether the projection should be inferred from the element crs.""")

    show_grid = param.Boolean(default=False, doc="""
        Whether to show gridlines on the plot.""")

    show_bounds = param.Boolean(default=False, doc="""
        Whether to show gridlines on the plot.""")

    projection = param.Parameter(default=GOOGLE_MERCATOR, doc="""
        Allows supplying a custom projection to transform the axis
        coordinates during display. Defaults to GOOGLE_MERCATOR.""")

    # Project operation to apply to the element
    _project_operation = None

    _hover_code = """
        var projections = Bokeh.require("core/util/projections");
        var x = special_vars.data_x
        var y = special_vars.data_y
        var coords = projections.wgs84_mercator.inverse([x, y])
        return "" + (coords[%d]).toFixed(4)
    """

    def __init__(self, element, **params):
        super(GeoPlot, self).__init__(element, **params)
        self.geographic = is_geographic(self.hmap.last)
        if self.geographic and not isinstance(self.projection, (PlateCarree, Mercator)):
            self.xaxis = None
            self.yaxis = None
            self.show_frame = False
            show_bounds = self._traverse_options(element, 'plot', ['show_bounds'],
                                                 defaults=False)
            self.show_bounds = not any(not sb for sb in show_bounds.get('show_bounds', []))
            if self.show_grid:
                param.main.warning(
                    'Grid lines do not reflect {0}; to do so '
                    'multiply the current element by gv.feature.grid() '
                    'and disable the show_grid option.'.format(self.projection)
                )

    def _axis_properties(self, axis, key, plot, dimension=None,
                         ax_mapping={'x': 0, 'y': 1}):
        axis_props = super(GeoPlot, self)._axis_properties(axis, key, plot,
                                                           dimension, ax_mapping)
        proj = self.projection
        if self.geographic and proj is GOOGLE_MERCATOR:
            dimension = 'lon' if axis == 'x' else 'lat'
            axis_props['ticker'] = MercatorTicker(dimension=dimension)
            axis_props['formatter'] = MercatorTickFormatter(dimension=dimension)
        return axis_props

    def _update_ranges(self, element, ranges):
        super(GeoPlot, self)._update_ranges(element, ranges)
        if not self.geographic:
            return
        if self.fixed_bounds:
            self.handles['x_range'].bounds = self.projection.x_limits
            self.handles['y_range'].bounds = self.projection.y_limits
        if self.projection is GOOGLE_MERCATOR:
            # Avoid zooming in beyond tile and axis resolution (causing JS errors) 
            options = self._traverse_options(element, 'plot', ['default_span'], defaults=False)
            min_interval = options['default_span'][0] if options.get('default_span') else 5
            for r in ('x_range', 'y_range'):
                ax_range = self.handles[r]
                start, end = ax_range.start, ax_range.end
                if (end-start) < min_interval:
                    mid = (start+end)/2.
                    ax_range.start = mid - min_interval/2.
                    ax_range.start = mid + min_interval/2.
                ax_range.min_interval = min_interval

    def initialize_plot(self, ranges=None, plot=None, plots=None, source=None):
        opts = {} if isinstance(self, HvOverlayPlot) else {'source': source}
        fig = super(GeoPlot, self).initialize_plot(ranges, plot, plots, **opts)
        if self.geographic and self.show_bounds and not self.overlaid:
            from . import GeoShapePlot
            shape = Shape(self.projection.boundary, crs=self.projection).options(fill_alpha=0)
            shapeplot = GeoShapePlot(shape, projection=self.projection,
                                     overlaid=True, renderer=self.renderer)
            shapeplot.geographic = False
            shapeplot.initialize_plot(plot=fig)
        return fig

    def _postprocess_hover(self, renderer, source):
        super(GeoPlot, self)._postprocess_hover(renderer, source)
        hover = self.handles.get('hover')
        try:
            from bokeh.models import CustomJSHover
        except:
            CustomJSHover = None
        if (not self.geographic or None in (hover, CustomJSHover) or
            isinstance(hover.tooltips, basestring) or self.projection is not GOOGLE_MERCATOR
            or hover.tooltips is None or 'hv_created' not in hover.tags):
            return
        element = self.current_frame
        xdim, ydim = [dimension_sanitizer(kd.name) for kd in element.kdims]
        formatters, tooltips = dict(hover.formatters), []
        xhover = CustomJSHover(code=self._hover_code % 0)
        yhover = CustomJSHover(code=self._hover_code % 1)
        for name, formatter in hover.tooltips:
            customjs = None
            if formatter in ('@{%s}' % xdim, '$x'):
                dim = xdim
                customjs = xhover
            elif formatter in ('@{%s}' % ydim, '$y'):
                dim = ydim
                customjs = yhover
            if customjs:
                key = formatter if formatter in ('$x', '$y') else dim
                formatters[key] = customjs
                formatter += '{custom}'
            tooltips.append((name, formatter))
        hover.tooltips = tooltips
        hover.formatters = formatters

    def _update_hover(self, element):
        tooltips, hover_opts = self._hover_opts(element)
        hover = self.handles['hover']
        if 'hv_created' in hover.tags:
            tooltips = [(ttp.pprint_label, '@{%s}' % dimension_sanitizer(ttp.name))
                        if isinstance(ttp, Dimension) else ttp for ttp in tooltips]
            tooltips = [(l, t+'{custom}' if t in hover.formatters else t) for l, t in tooltips]
            hover.tooltips = tooltips
        else:
            super(GeoPlot, self)._update_hover(element)

    def get_data(self, element, ranges, style):
        if self._project_operation and self.geographic:
            element = self._project_operation(element, projection=self.projection)
        return super(GeoPlot, self).get_data(element, ranges, style)
Exemple #28
0
class Graph(Dataset, Element2D):
    """
    Graph is high-level Element representing both nodes and edges.
    A Graph may be defined in an abstract form representing just
    the abstract edges between nodes and optionally may be made
    concrete by supplying a Nodes Element defining the concrete
    positions of each node. If the node positions are supplied
    the EdgePaths (defining the concrete edges) can be inferred
    automatically or supplied explicitly.

    The constructor accepts regular columnar data defining the edges
    or a tuple of the abstract edges and nodes, or a tuple of the
    abstract edges, nodes, and edgepaths.
    """

    group = param.String(default='Graph', constant=True)

    kdims = param.List(default=[Dimension('start'), Dimension('end')],
                       bounds=(2, 2))

    node_type = Nodes

    edge_type = EdgePaths

    def __init__(self, data, kdims=None, vdims=None, **params):
        if isinstance(data, tuple):
            data = data + (None,)* (3-len(data))
            edges, nodes, edgepaths = data
        elif isinstance(data, type(self)):
            edges, nodes, edgepaths = data, data.nodes, data._edgepaths
        else:
            edges, nodes, edgepaths = data, None, None

        if nodes is not None:
            node_info = None
            if isinstance(nodes, self.node_type):
                pass
            elif not isinstance(nodes, Dataset) or nodes.ndims == 3:
                nodes = self.node_type(nodes)
            else:
                node_info = nodes
                nodes = None
        else:
            node_info = None
        if edgepaths is not None and not isinstance(edgepaths, self.edge_type):
            edgepaths = self.edge_type(edgepaths)

        self._nodes = nodes
        self._edgepaths = edgepaths
        super(Graph, self).__init__(edges, kdims=kdims, vdims=vdims, **params)
        if node_info is not None:
            self._add_node_info(node_info)
        self._validate()
        self.redim = RedimGraph(self, mode='dataset')


    def _add_node_info(self, node_info):
        nodes = self.nodes.clone(datatype=['pandas', 'dictionary'])
        if isinstance(node_info, self.node_type):
            nodes = nodes.redim(**dict(zip(nodes.dimensions('key', label=True),
                                           node_info.kdims)))

        if not node_info.kdims and len(node_info) != len(nodes):
            raise ValueError("The supplied node data does not match "
                             "the number of nodes defined by the edges. "
                             "Ensure that the number of nodes match"
                             "or supply an index as the sole key "
                             "dimension to allow the Graph to merge "
                             "the data.")

        if pd is None:
            if node_info.kdims and len(node_info) != len(nodes):
                raise ValueError("Graph cannot merge node data on index "
                                 "dimension without pandas. Either ensure "
                                 "the node data matches the order of nodes "
                                 "as they appear in the edge data or install "
                                 "pandas.")
            dimensions = nodes.dimensions()
            for d in node_info.vdims:
                if d in dimensions:
                    continue
                nodes = nodes.add_dimension(d, len(nodes.vdims),
                                            node_info.dimension_values(d),
                                            vdim=True)
        else:
            left_on = nodes.kdims[-1].name
            node_info_df = node_info.dframe()
            node_df = nodes.dframe()
            if node_info.kdims:
                idx = node_info.kdims[-1]
            else:
                idx = Dimension('index')
                node_info_df = node_info_df.reset_index()
            if 'index' in node_info_df.columns and not idx.name == 'index':
                node_df = node_df.rename(columns={'index': '__index'})
                left_on = '__index'
            cols = [c for c in node_info_df.columns if c not in
                    node_df.columns or c == idx.name]
            node_info_df = node_info_df[cols]
            node_df = pd.merge(node_df, node_info_df, left_on=left_on,
                               right_on=idx.name, how='left')
            nodes = nodes.clone(node_df, kdims=nodes.kdims[:2]+[idx],
                                vdims=node_info.vdims)

        self._nodes = nodes


    def _validate(self):
        if self._edgepaths is None:
            return
        mismatch = []
        for kd1, kd2 in zip(self.nodes.kdims, self.edgepaths.kdims):
            if kd1 != kd2:
                mismatch.append('%s != %s' % (kd1, kd2))
        if mismatch:
            raise ValueError('Ensure that the first two key dimensions on '
                             'Nodes and EdgePaths match: %s' % ', '.join(mismatch))
        npaths = len(self._edgepaths.data)
        nedges = len(self)
        if nedges != npaths:
            mismatch = True
            if npaths == 1:
                edges = self.edgepaths.split()[0]
                vals = edges.dimension_values(0)
                npaths = len(np.where(np.isnan(vals))[0])
                if not np.isnan(vals[-1]):
                    npaths += 1
                mismatch = npaths != nedges
            if mismatch:
                raise ValueError('Ensure that the number of edges supplied '
                                 'to the Graph (%d) matches the number of '
                                 'edgepaths (%d)' % (nedges, npaths))


    def clone(self, data=None, shared_data=True, new_type=None, link=True,
              *args, **overrides):
        if data is None:
            data = (self.data, self.nodes)
            if self._edgepaths is not None:
                data = data + (self.edgepaths,)
            overrides['plot_id'] = self._plot_id
        elif not isinstance(data, tuple):
            data = (data, self.nodes)
            if self._edgepaths:
                data = data + (self.edgepaths,)
        return super(Graph, self).clone(data, shared_data, new_type, link,
                                        *args, **overrides)


    def select(self, selection_expr=None, selection_specs=None, selection_mode='edges', **selection):
        """
        Allows selecting data by the slices, sets and scalar values
        along a particular dimension. The indices should be supplied as
        keywords mapping between the selected dimension and
        value. Additionally selection_specs (taking the form of a list
        of type.group.label strings, types or functions) may be
        supplied, which will ensure the selection is only applied if the
        specs match the selected object.

        Selecting by a node dimensions selects all edges and nodes that are
        connected to the selected nodes. To select only edges between the
        selected nodes set the selection_mode to 'nodes'.
        """
        from ..util.transform import dim
        if selection_expr is not None and not isinstance(selection_expr, dim):
            raise ValueError("""\
The first positional argument to the Dataset.select method is expected to be a
holoviews.util.transform.dim expression. Use the selection_specs keyword
argument to specify a selection specification""")
        
        selection = {dim: sel for dim, sel in selection.items()
                     if dim in self.dimensions('ranges')+['selection_mask']}
        if (selection_specs and not any(self.matches(sp) for sp in selection_specs)
            or (not selection and not selection_expr)):
            return self

        index_dim = self.nodes.kdims[2].name
        dimensions = self.kdims+self.vdims
        node_selection = {index_dim: v for k, v in selection.items()
                          if k in self.kdims}
        if selection_expr:
            mask = selection_expr.apply(self.nodes, compute=False, keep_index=True)
            nodes = self.nodes[mask]
        else:
            nodes = self.nodes.select(**dict(selection, **node_selection))
        selection = {k: v for k, v in selection.items() if k in dimensions}

        # Compute mask for edges if nodes were selected on
        nodemask = None
        if len(nodes) != len(self.nodes):
            xdim, ydim = dimensions[:2]
            indices = list(nodes.dimension_values(2, False))
            if selection_mode == 'edges':
                mask1 = self.interface.select_mask(self, {xdim.name: indices})
                mask2 = self.interface.select_mask(self, {ydim.name: indices})
                nodemask = (mask1 | mask2)
                nodes = self.nodes
            else:
                nodemask = self.interface.select_mask(self, {xdim.name: indices,
                                                             ydim.name: indices})

        # Compute mask for edge selection
        mask = None
        if selection:
            mask = self.interface.select_mask(self, selection)

        # Combine masks
        if nodemask is not None:
            if mask is not None:
                mask &= nodemask
            else:
                mask = nodemask

        # Apply edge mask
        if mask is not None:
            data = self.interface.select(self, mask)
            if not np.all(mask):
                new_graph = self.clone((data, nodes))
                source = new_graph.dimension_values(0, expanded=False)
                target = new_graph.dimension_values(1, expanded=False)
                unique_nodes = np.unique(np.concatenate([source, target]))
                nodes = new_graph.nodes[:, :, list(unique_nodes)]
            paths = None
            if self._edgepaths:
                edgepaths = self._split_edgepaths
                paths = edgepaths.clone(edgepaths.interface.select_paths(edgepaths, mask))
                if len(self._edgepaths.data) == 1:
                    paths = paths.clone([paths.dframe() if pd else paths.array()])
        else:
            data = self.data
            paths = self._edgepaths
        return self.clone((data, nodes, paths))


    @property
    def _split_edgepaths(self):
        if len(self) == len(self.edgepaths.data):
            return self.edgepaths
        else:
            return self.edgepaths.clone(split_path(self.edgepaths))


    def range(self, dimension, data_range=True, dimension_range=True):
        if self.nodes and dimension in self.nodes.dimensions():
            node_range = self.nodes.range(dimension, data_range, dimension_range)
            if self._edgepaths:
                path_range = self._edgepaths.range(dimension, data_range, dimension_range)
                return max_range([node_range, path_range])
            return node_range
        return super(Graph, self).range(dimension, data_range, dimension_range)


    def dimensions(self, selection='all', label=False):
        dimensions = super(Graph, self).dimensions(selection, label)
        if selection == 'ranges':
            if self._nodes is not None:
                node_dims = self.nodes.dimensions(selection, label)
            else:
                node_dims = self.node_type.kdims+self.node_type.vdims
                if label in ['name', True, 'short']:
                    node_dims = [d.name for d in node_dims]
                elif label in ['long', 'label']:
                    node_dims = [d.label for d in node_dims]
            return dimensions+node_dims
        return dimensions


    @property
    def nodes(self):
        """
        Computes the node positions the first time they are requested
        if no explicit node information was supplied.
        """
        
        if self._nodes is None:
            from ..operation.element import chain
            self._nodes = layout_nodes(self, only_nodes=True)
            self._nodes._dataset = None
            self._nodes._pipeline = chain.instance()
        return self._nodes


    @property
    def edgepaths(self):
        """
        Returns the fixed EdgePaths or computes direct connections
        between supplied nodes.
        """
        if self._edgepaths:
            return self._edgepaths
        if pd is None:
            paths = connect_edges(self)
        else:
            paths = connect_edges_pd(self)
        return self.edge_type(paths, kdims=self.nodes.kdims[:2])


    @classmethod
    def from_networkx(cls, G, positions, nodes=None, **kwargs):
        """
        Generate a HoloViews Graph from a networkx.Graph object and
        networkx layout function or dictionary of node positions.
        Any keyword arguments will be passed to the layout
        function. By default it will extract all node and edge
        attributes from the networkx.Graph but explicit node
        information may also be supplied. Any non-scalar attributes,
        such as lists or dictionaries will be ignored.

        Args:
            G (networkx.Graph): Graph to convert to Graph element
            positions (dict or callable): Node positions
                Node positions defined as a dictionary mapping from
                node id to (x, y) tuple or networkx layout function
                which computes a positions dictionary
            kwargs (dict): Keyword arguments for layout function

        Returns:
            Graph element
        """
        if not isinstance(positions, dict):
            positions = positions(G, **kwargs)

        # Unpack edges
        edges = defaultdict(list)
        for start, end in G.edges():
            for attr, value in sorted(G.adj[start][end].items()):
                if isinstance(value, (list, dict)):
                    continue # Cannot handle list or dict attrs
                edges[attr].append(value)

            # Handle tuple node indexes (used in 2D grid Graphs)
            if isinstance(start, tuple):
                start = str(start)
            if isinstance(end, tuple):
                end = str(end)
            edges['start'].append(start)
            edges['end'].append(end)
        edge_cols = sorted([k for k in edges if k not in ('start', 'end')
                            and len(edges[k]) == len(edges['start'])])
        edge_vdims = [str(col) if isinstance(col, int) else col for col in edge_cols]
        edge_data = tuple(edges[col] for col in ['start', 'end']+edge_cols)

        # Unpack user node info
        xdim, ydim, idim = cls.node_type.kdims[:3]
        if nodes:
            node_columns = nodes.columns()
            idx_dim = nodes.kdims[0].name
            info_cols, values = zip(*((k, v) for k, v in node_columns.items() if k != idx_dim))
            node_info = {i: vals for i, vals in zip(node_columns[idx_dim], zip(*values))}
        else:
            info_cols = []
            node_info = None
        node_columns = defaultdict(list)

        # Unpack node positions
        for idx, pos in sorted(positions.items()):
            node = G.nodes.get(idx)
            if node is None:
                continue
            x, y = pos
            node_columns[xdim.name].append(x)
            node_columns[ydim.name].append(y)
            for attr, value in node.items():
                if isinstance(value, (list, dict)):
                    continue
                node_columns[attr].append(value)
            for i, col in enumerate(info_cols):
                node_columns[col].append(node_info[idx][i])
            if isinstance(idx, tuple):
                idx = str(idx) # Tuple node indexes handled as strings
            node_columns[idim.name].append(idx)
        node_cols = sorted([k for k in node_columns if k not in cls.node_type.kdims
                            and len(node_columns[k]) == len(node_columns[xdim.name])])
        columns = [xdim.name, ydim.name, idim.name]+node_cols+list(info_cols)
        node_data = tuple(node_columns[col] for col in columns)

        # Construct nodes
        vdims = []
        for col in node_cols:
            if isinstance(col, int):
                dim = str(col)
            elif nodes is not None and col in nodes.vdims:
                dim = nodes.get_dimension(col)
            else:
                dim = col
            vdims.append(dim)
        nodes = cls.node_type(node_data, vdims=vdims)

        # Construct graph
        return cls((edge_data, nodes), vdims=edge_vdims)
Exemple #29
0
class Dataset(Element):
    """
    Dataset provides a general baseclass for Element types that
    contain structured data and supports a range of data formats.

    The Dataset class supports various methods offering a consistent way
    of working with the stored data regardless of the storage format
    used. These operations include indexing, selection and various ways
    of aggregating or collapsing the data with a supplied function.
    """

    datatype = param.List(
        datatypes,
        doc=""" A priority list of the data types to be used for storage
        on the .data attribute. If the input supplied to the element
        constructor cannot be put into the requested format, the next
        format listed will be used until a suitable format is found (or
        the data fails to be understood).""")

    # In the 1D case the interfaces should not automatically add x-values
    # to supplied data
    _1d = False

    # Define a class used to transform Datasets into other Element types
    _conversion_interface = DataConversion

    def __init__(self, data, **kwargs):
        if isinstance(data, Element):
            pvals = util.get_param_values(data)
            kwargs.update([(l, pvals[l]) for l in ['group', 'label']
                           if l in pvals and l not in kwargs])
        initialized = Interface.initialize(type(self),
                                           data,
                                           kwargs.get('kdims'),
                                           kwargs.get('vdims'),
                                           datatype=kwargs.get('datatype'))
        (data, self.interface, dims, extra_kws) = initialized
        super(Dataset,
              self).__init__(data, **dict(extra_kws, **dict(kwargs, **dims)))
        self.interface.validate(self)

    def __setstate__(self, state):
        """
        Restores OrderedDict based Dataset objects, converting them to
        the up-to-date NdElement format.
        """
        self.__dict__ = state
        if isinstance(self.data, OrderedDict):
            self.data = Dataset(self.data,
                                kdims=self.kdims,
                                vdims=self.vdims,
                                group=self.group,
                                label=self.label)
            self.interface = NdColumns
        elif isinstance(self.data, np.ndarray):
            self.interface = ArrayInterface
        elif util.is_dataframe(self.data):
            self.interface = PandasInterface

        super(Dataset, self).__setstate__(state)

    def closest(self, coords):
        """
        Given single or multiple samples along the first key dimension
        will return the closest actual sample coordinates.
        """
        if self.ndims > 1:
            NotImplementedError("Closest method currently only "
                                "implemented for 1D Elements")

        if not isinstance(coords, list): coords = [coords]
        xs = self.dimension_values(0)
        idxs = [np.argmin(np.abs(xs - coord)) for coord in coords]
        return [xs[idx] for idx in idxs] if len(coords) > 1 else xs[idxs[0]]

    def sort(self, by=[]):
        """
        Sorts the data by the values along the supplied dimensions.
        """
        if not by: by = self.kdims
        sorted_columns = self.interface.sort(self, by)
        return self.clone(sorted_columns)

    def range(self, dim, data_range=True):
        """
        Computes the range of values along a supplied dimension, taking
        into account the range and soft_range defined on the Dimension
        object.
        """
        dim = self.get_dimension(dim)
        if dim.range != (None, None):
            return dim.range
        elif dim in self.dimensions():
            if len(self):
                drange = self.interface.range(self, dim)
            else:
                drange = (np.NaN, np.NaN)
        if data_range:
            soft_range = [r for r in dim.soft_range if r is not None]
            if soft_range:
                return util.max_range([drange, soft_range])
            else:
                return drange
        else:
            return dim.soft_range

    def add_dimension(self, dimension, dim_pos, dim_val, vdim=False, **kwargs):
        """
        Create a new object with an additional key dimensions.  Requires
        the dimension name or object, the desired position in the key
        dimensions and a key value scalar or sequence of the same length
        as the existing keys.
        """
        if isinstance(dimension, str):
            dimension = Dimension(dimension)

        if dimension.name in self.kdims:
            raise Exception(
                '{dim} dimension already defined'.format(dim=dimension.name))

        if vdim:
            dims = self.vdims[:]
            dims.insert(dim_pos, dimension)
            dimensions = dict(vdims=dims)
            dim_pos += self.ndims
        else:
            dims = self.kdims[:]
            dims.insert(dim_pos, dimension)
            dimensions = dict(kdims=dims)

        data = self.interface.add_dimension(self, dimension, dim_pos, dim_val,
                                            vdim)
        return self.clone(data, **dimensions)

    def select(self, selection_specs=None, **selection):
        """
        Allows selecting data by the slices, sets and scalar values
        along a particular dimension. The indices should be supplied as
        keywords mapping between the selected dimension and
        value. Additionally selection_specs (taking the form of a list
        of type.group.label strings, types or functions) may be
        supplied, which will ensure the selection is only applied if the
        specs match the selected object.
        """
        if selection_specs and not any(
                self.matches(sp) for sp in selection_specs):
            return self

        data = self.interface.select(self, **selection)
        if np.isscalar(data):
            return data
        else:
            return self.clone(data)

    def reindex(self, kdims=None, vdims=None):
        """
        Create a new object with a re-ordered set of dimensions.  Allows
        converting key dimensions to value dimensions and vice versa.
        """
        if kdims is None:
            key_dims = [d for d in self.kdims if not vdims or d not in vdims]
        else:
            key_dims = [self.get_dimension(k) for k in kdims]

        if vdims is None:
            val_dims = [d for d in self.vdims if not kdims or d not in kdims]
        else:
            val_dims = [self.get_dimension(v) for v in vdims]

        data = self.interface.reindex(self, key_dims, val_dims)
        return self.clone(data, kdims=key_dims, vdims=val_dims)

    def __getitem__(self, slices):
        """
        Allows slicing and selecting values in the Dataset object.
        Supports multiple indexing modes:

           (1) Slicing and indexing along the values of each dimension
               in the columns object using either scalars, slices or
               sets of values.
           (2) Supplying the name of a dimension as the first argument
               will return the values along that dimension as a numpy
               array.
           (3) Slicing of all key dimensions and selecting a single
               value dimension by name.
           (4) A boolean array index matching the length of the Dataset
               object.
        """
        slices = util.process_ellipses(self, slices, vdim_selection=True)
        if isinstance(slices, np.ndarray) and slices.dtype.kind == 'b':
            if not len(slices) == len(self):
                raise IndexError(
                    "Boolean index must match length of sliced object")
            return self.clone(
                self.interface.select(self, selection_mask=slices))
        elif slices in [(), Ellipsis]:
            return self
        if not isinstance(slices, tuple): slices = (slices, )
        value_select = None
        if len(slices) == 1 and slices[0] in self.dimensions():
            return self.dimension_values(slices[0])
        elif len(slices) == self.ndims + 1 and slices[
                self.ndims] in self.dimensions():
            selection = dict(zip(self.dimensions('key', label=True), slices))
            value_select = slices[self.ndims]
        elif len(slices) == self.ndims + 1 and isinstance(
                slices[self.ndims], (Dimension, str)):
            raise Exception("%r is not an available value dimension'" %
                            slices[self.ndims])
        else:
            selection = dict(zip(self.dimensions(label=True), slices))
        data = self.select(**selection)
        if value_select:
            if len(data) == 1:
                return data[value_select][0]
            else:
                return data.reindex(vdims=[value_select])
        return data

    def sample(self, samples=[]):
        """
        Allows sampling of Dataset as an iterator of coordinates
        matching the key dimensions, returning a new object containing
        just the selected samples.
        """
        return self.clone(self.interface.sample(self, samples))

    def reduce(self,
               dimensions=[],
               function=None,
               spreadfn=None,
               **reduce_map):
        """
        Allows reducing the values along one or more key dimension with
        the supplied function. The dimensions may be supplied as a list
        and a function to apply or a mapping between the dimensions and
        functions to apply along each dimension.
        """
        if any(dim in self.vdims for dim in dimensions):
            raise Exception("Reduce cannot be applied to value dimensions")
        function, dims = self._reduce_map(dimensions, function, reduce_map)
        dims = [d for d in self.kdims if d not in dims]
        return self.aggregate(dims, function, spreadfn)

    def aggregate(self,
                  dimensions=None,
                  function=None,
                  spreadfn=None,
                  **kwargs):
        """
        Aggregates over the supplied key dimensions with the defined
        function.
        """
        if function is None:
            raise ValueError(
                "The aggregate method requires a function to be specified")
        if dimensions is None: dimensions = self.kdims
        elif not isinstance(dimensions, list): dimensions = [dimensions]
        aggregated = self.interface.aggregate(self, dimensions, function,
                                              **kwargs)
        aggregated = self.interface.unpack_scalar(self, aggregated)

        kdims = [self.get_dimension(d) for d in dimensions]
        vdims = self.vdims
        if spreadfn:
            error = self.interface.aggregate(self, dimensions, spreadfn)
            spread_name = spreadfn.__name__
            ndims = len(vdims)
            error = self.clone(error, kdims=kdims)
            combined = self.clone(aggregated, kdims=kdims)
            for i, d in enumerate(vdims):
                dim = d('_'.join([d.name, spread_name]))
                combined = combined.add_dimension(dim, ndims + i, error[d],
                                                  True)
            return combined

        if np.isscalar(aggregated):
            return aggregated
        else:
            return self.clone(aggregated, kdims=kdims, vdims=vdims)

    def groupby(self,
                dimensions=[],
                container_type=HoloMap,
                group_type=None,
                **kwargs):
        """
        Return the results of a groupby operation over the specified
        dimensions as an object of type container_type (expected to be
        dictionary-like).

        Keys vary over the columns (dimensions) and the corresponding
        values are collections of group_type (e.g list, tuple)
        constructed with kwargs (if supplied).
        """
        if not isinstance(dimensions, list): dimensions = [dimensions]
        if not len(dimensions): dimensions = self.dimensions('key', True)
        if group_type is None: group_type = type(self)

        dimensions = [
            self.get_dimension(d, strict=True).name for d in dimensions
        ]
        invalid_dims = list(
            set(dimensions) - set(self.dimensions('key', True)))
        if invalid_dims:
            raise Exception('Following dimensions could not be found:\n%s.' %
                            invalid_dims)
        return self.interface.groupby(self, dimensions, container_type,
                                      group_type, **kwargs)

    def __len__(self):
        """
        Returns the number of rows in the Dataset object.
        """
        return self.interface.length(self)

    @property
    def shape(self):
        "Returns the shape of the data."
        return self.interface.shape(self)

    def redim(self, specs=None, **dimensions):
        """
        Replace dimensions on the dataset and allows renaming
        dimensions in the dataset. Dimension mapping should map
        between the old dimension name and a dictionary of the new
        attributes, a completely new dimension or a new string name.
        """
        if specs is not None:
            if not isinstance(specs, list):
                specs = [specs]
            if not any(self.matches(spec) for spec in specs):
                return self

        kdims = replace_dimensions(self.kdims, dimensions)
        vdims = replace_dimensions(self.vdims, dimensions)
        zipped_dims = zip(self.kdims + self.vdims, kdims + vdims)
        renames = {pk.name: nk for pk, nk in zipped_dims if pk != nk}
        data = self.data
        if renames:
            data = self.interface.redim(self, renames)
        return self.clone(data, kdims=kdims, vdims=vdims)

    def dimension_values(self, dim, expanded=True, flat=True):
        """
        Returns the values along a particular dimension. If unique
        values are requested will return only unique values.
        """
        dim = self.get_dimension(dim, strict=True).name
        return self.interface.values(self, dim, expanded, flat)

    def get_dimension_type(self, dim):
        """
        Returns the specified Dimension type if specified or
        if the dimension_values types are consistent otherwise
        None is returned.
        """
        dim_obj = self.get_dimension(dim)
        if dim_obj and dim_obj.type is not None:
            return dim_obj.type
        return self.interface.dimension_type(self, dim)

    def dframe(self, dimensions=None):
        """
        Returns the data in the form of a DataFrame.
        """
        if dimensions:
            dimensions = [self.get_dimension(d).name for d in dimensions]
        return self.interface.dframe(self, dimensions)

    def columns(self, dimensions=None):
        if dimensions is None: dimensions = self.dimensions()
        dimensions = [self.get_dimension(d) for d in dimensions]
        return {d.name: self.dimension_values(d) for d in dimensions}

    @property
    def to(self):
        """
        Property to create a conversion interface with methods to
        convert to other Element types.
        """
        return self._conversion_interface(self)
Exemple #30
0
class SideHistogramPlot(HistogramPlot):

    style_opts = HistogramPlot.style_opts + ['cmap']

    height = param.Integer(default=125, doc="The height of the plot")

    width = param.Integer(default=125, doc="The width of the plot")

    show_title = param.Boolean(default=False,
                               doc="""
        Whether to display the plot title.""")

    default_tools = param.List(
        default=['save', 'pan', 'wheel_zoom', 'box_zoom', 'reset'],
        doc="A list of plugin tools to use on the plot.")

    _callback = """
    color_mapper.low = cb_data['geometry']['{axis}0'];
    color_mapper.high = cb_data['geometry']['{axis}1'];
    source.change.emit()
    main_source.change.emit()
    """

    def __init__(self, *args, **kwargs):
        super(SideHistogramPlot, self).__init__(*args, **kwargs)
        if self.invert_axes:
            self.default_tools.append('ybox_select')
        else:
            self.default_tools.append('xbox_select')

    def get_data(self, element, ranges, style):
        data, mapping, style = HistogramPlot.get_data(self, element, ranges,
                                                      style)
        color_dims = [
            d for d in self.adjoined.traverse(
                lambda x: x.handles.get('color_dim')) if d is not None
        ]
        dim = color_dims[0] if color_dims else None
        cmapper = self._get_colormapper(dim, element, {}, {})
        if cmapper and dim in element.dimensions():
            data[
                dim.
                name] = [] if self.static_source else element.dimension_values(
                    dim)
            mapping['fill_color'] = {'field': dim.name, 'transform': cmapper}
        return (data, mapping, style)

    def _init_glyph(self, plot, mapping, properties):
        """
        Returns a Bokeh glyph object.
        """
        ret = super(SideHistogramPlot,
                    self)._init_glyph(plot, mapping, properties)
        if not 'field' in mapping.get('fill_color', {}):
            return ret
        dim = mapping['fill_color']['field']
        sources = self.adjoined.traverse(
            lambda x: (x.handles.get('color_dim'), x.handles.get('source')))
        sources = [src for cdim, src in sources if cdim == dim]
        tools = [
            t for t in self.handles['plot'].tools
            if isinstance(t, BoxSelectTool)
        ]
        if not tools or not sources:
            return
        box_select, main_source = tools[0], sources[0]
        handles = {
            'color_mapper': self.handles['color_mapper'],
            'source': self.handles['source'],
            'cds': self.handles['source'],
            'main_source': main_source
        }
        axis = 'y' if self.invert_axes else 'x'
        callback = self._callback.format(axis=axis)
        if box_select.callback:
            box_select.callback.code += callback
            box_select.callback.args.update(handles)
        else:
            box_select.callback = CustomJS(args=handles, code=callback)
        return ret