Example #1
0
class Transform(schema.Transform):
    filter = T.Union([
        T.Unicode(allow_none=True, default_value=None),
        T.Instance(expr.Expression),
        T.Instance(schema.EqualFilter),
        T.Instance(schema.RangeFilter),
        T.Instance(schema.OneOfFilter),
        T.List(
            T.Union([
                T.Unicode(),
                T.Instance(expr.Expression),
                T.Instance(schema.EqualFilter),
                T.Instance(schema.RangeFilter),
                T.Instance(schema.OneOfFilter)
            ]))
    ],
                     allow_none=True,
                     default_value=None,
                     help=schema.Transform.filter.help)

    def _finalize(self, **kwargs):
        """Finalize object: convert filter expressions to string"""
        convert = lambda f: repr(f) if isinstance(f, expr.Expression) else f
        self.filter = convert(self.filter)
        if isinstance(self.filter, list):
            self.filter = [convert(f) for f in self.filter]
        super(Transform, self)._finalize(**kwargs)
Example #2
0
class FieldDef(BaseObject):
    """Wrapper for Vega-Lite FieldDef definition.
    
    Attributes
    ----------
    aggregate: AggregateOp
        Aggregation function for the field .
    bin: Union(Bool, Bin)
        Flag for binning a `quantitative` field, or a bin property object for binning parameters.
    field: Unicode
        Name of the field from which to pull a data value.
    timeUnit: TimeUnit
        Time unit for a `temporal` field .
    title: Unicode
        Title for axis or legend.
    type: Type
        The encoded field's type of measurement.
    value: Union(CFloat, Unicode, Bool)
        A constant value in visual domain.
    """
    aggregate = AggregateOp(allow_none=True, default_value=None, help="""Aggregation function for the field .""")
    bin = T.Union([T.Bool(allow_none=True, default_value=None), T.Instance(Bin, allow_none=True, default_value=None)])
    field = T.Unicode(allow_none=True, default_value=None, help="""Name of the field from which to pull a data value.""")
    timeUnit = TimeUnit(allow_none=True, default_value=None, help="""Time unit for a `temporal` field .""")
    title = T.Unicode(allow_none=True, default_value=None, help="""Title for axis or legend.""")
    type = Type(allow_none=True, default_value=None, help="""The encoded field's type of measurement.""")
    value = T.Union([T.CFloat(allow_none=True, default_value=None), T.Unicode(allow_none=True, default_value=None), T.Bool(allow_none=True, default_value=None)])
    
    def __init__(self, aggregate=None, bin=None, field=None, timeUnit=None, title=None, type=None, value=None, **kwargs):
        kwds = dict(aggregate=aggregate, bin=bin, field=field, timeUnit=timeUnit, title=title, type=type, value=value)
        kwargs.update({k:v for k, v in kwds.items() if v is not None})
        super(FieldDef, self).__init__(**kwargs)
Example #3
0
class Scatter(widgets.DOMWidget):
    _view_name = Unicode('ScatterView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('ScatterModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    x = Array(default_value=None).tag(sync=True, **create_array_binary_serialization('x'))
    y = Array(default_value=None).tag(sync=True, **create_array_binary_serialization('y'))
    z = Array(default_value=None).tag(sync=True, **create_array_binary_serialization('z'))
    vx = Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('vx'))
    vy = Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('vy'))
    vz = Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('vz'))
    selected = Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('selection', update_from_js=True))
    sequence_index = Integer(default_value=0).tag(sync=True)
    size = traitlets.Union([traitlets.Float().tag(sync=True),
                           Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('size'))],
                           default_value=5).tag(sync=True)
    size_selected = traitlets.Union([traitlets.Float().tag(sync=True),
                           Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('size_selected'))],
                           default_value=7).tag(sync=True)
    color = traitlets.Union([Unicode().tag(sync=True),
                             Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('color'))],
                             default_value="red").tag(sync=True)
    color_selected = traitlets.Union([Unicode().tag(sync=True),
                             Array(default_value=None,allow_none=True).tag(sync=True, **create_array_binary_serialization('color_selected'))],
                             default_value="green").tag(sync=True)
    geo = traitlets.Unicode('diamond').tag(sync=True)
Example #4
0
class Scatter(widgets.Widget):
    _view_name = Unicode('ScatterView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('ScatterModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)
    x = Array(default_value=None).tag(sync=True, **array_sequence_serialization)
    y = Array(default_value=None).tag(sync=True, **array_sequence_serialization)
    z = Array(default_value=None).tag(sync=True, **array_sequence_serialization)
    vx = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    vy = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    vz = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    selected = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    sequence_index = Integer(default_value=0).tag(sync=True)
    size = traitlets.Union([Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization),
                           traitlets.Float().tag(sync=True)],
                           default_value=5).tag(sync=True)
    size_selected = traitlets.Union([Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization),
                                    traitlets.Float().tag(sync=True)],
                                    default_value=7).tag(sync=True)
    color = Array(default_value="red", allow_none=True).tag(sync=True, **color_serialization)
    color_selected = traitlets.Union([Array(default_value=None, allow_none=True).tag(sync=True, **color_serialization),
                                     Unicode().tag(sync=True)],
                                     default_value="green").tag(sync=True)
    geo = traitlets.Unicode('diamond').tag(sync=True)
    connected = traitlets.CBool(default_value=False).tag(sync=True)
    visible = traitlets.CBool(default_value=True).tag(sync=True)

    texture = traitlets.Union([
        traitlets.Instance(ipywebrtc.MediaStream),
        Unicode(),
        traitlets.List(Unicode, [], allow_none=True),
        Image(default_value=None, allow_none=True),
        traitlets.List(Image(default_value=None, allow_none=True))
    ]).tag(sync=True, **texture_serialization)

    material = traitlets.Instance(pythreejs.ShaderMaterial, help='A :any:`pythreejs.ShaderMaterial` that is used for the mesh')\
                                  .tag(sync=True, **ipywidgets.widget_serialization)
    @traitlets.default('material')
    def _default_material(self):
        return pythreejs.ShaderMaterial()

    line_material = traitlets.Instance(pythreejs.ShaderMaterial, help='A :any:`pythreejs.ShaderMaterial` that is used for the lines/wireframe')\
                                       .tag(sync=True, **ipywidgets.widget_serialization)
    @traitlets.default('line_material')
    def _default_line_material(self):
        return pythreejs.ShaderMaterial()
Example #5
0
class Scatter(widgets.DOMWidget):
    _view_name = Unicode('ScatterView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('ScatterModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)
    x = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    y = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    z = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    vx = Array(default_value=None,
               allow_none=True).tag(sync=True, **array_sequence_serialization)
    vy = Array(default_value=None,
               allow_none=True).tag(sync=True, **array_sequence_serialization)
    vz = Array(default_value=None,
               allow_none=True).tag(sync=True, **array_sequence_serialization)
    selected = Array(default_value=None,
                     allow_none=True).tag(sync=True,
                                          **array_sequence_serialization)
    sequence_index = Integer(default_value=0).tag(sync=True)
    size = traitlets.Union([
        Array(default_value=None, allow_none=True).tag(
            sync=True, **array_sequence_serialization),
        traitlets.Float().tag(sync=True)
    ],
                           default_value=5).tag(sync=True)
    size_selected = traitlets.Union([
        Array(default_value=None, allow_none=True).tag(
            sync=True, **array_sequence_serialization),
        traitlets.Float().tag(sync=True)
    ],
                                    default_value=7).tag(sync=True)
    color = Array(default_value="red",
                  allow_none=True).tag(sync=True, **color_serialization)
    color_selected = traitlets.Union([
        Array(default_value=None, allow_none=True).tag(sync=True,
                                                       **color_serialization),
        Unicode().tag(sync=True)
    ],
                                     default_value="green").tag(sync=True)
    geo = traitlets.Unicode('diamond').tag(sync=True)
    connected = traitlets.CBool(default_value=False).tag(sync=True)
    visible = traitlets.CBool(default_value=True).tag(sync=True)
    visible_lines = traitlets.CBool(default_value=False).tag(sync=True)
    visible_markers = traitlets.CBool(default_value=True).tag(sync=True)
Example #6
0
class FacetSpec(BaseObject):
    """Wrapper for Vega-Lite FacetSpec definition.
    
    Attributes
    ----------
    config: Config
        Configuration object.
    data: Data
        An object describing the data source.
    description: Unicode
        An optional description of this mark for commenting purpose.
    facet: Facet
        
    name: Unicode
        Name of the visualization for later reference.
    spec: Union(LayerSpec, UnitSpec)
        
    transform: Transform
        An object describing filter and new field calculation.
    """
    config = T.Instance(Config, allow_none=True, default_value=None, help="""Configuration object.""")
    data = T.Instance(Data, allow_none=True, default_value=None, help="""An object describing the data source.""")
    description = T.Unicode(allow_none=True, default_value=None, help="""An optional description of this mark for commenting purpose.""")
    facet = T.Instance(Facet, allow_none=True, default_value=None)
    name = T.Unicode(allow_none=True, default_value=None, help="""Name of the visualization for later reference.""")
    spec = T.Union([T.Instance(LayerSpec, allow_none=True, default_value=None), T.Instance(UnitSpec, allow_none=True, default_value=None)])
    transform = T.Instance(Transform, allow_none=True, default_value=None, help="""An object describing filter and new field calculation.""")
    
    def __init__(self, config=None, data=None, description=None, facet=None, name=None, spec=None, transform=None, **kwargs):
        kwds = dict(config=config, data=data, description=description, facet=facet, name=name, spec=spec, transform=transform)
        kwargs.update({k:v for k, v in kwds.items() if v is not None})
        super(FacetSpec, self).__init__(**kwargs)
Example #7
0
class Shelf(BaseObject):

    skip = ['shorthand', 'config']

    def __init__(self, shorthand, **kwargs):
        kwargs['shorthand'] = shorthand
        super(Shelf, self).__init__(**kwargs)

    def _infer_type(self, data):
        if self.type is None and self.name in data:
            self.type = infer_vegalite_type(data[self.name])

    def _shorthand_changed(self, name, old, new):
        D = parse_shorthand(self.shorthand)
        for key, val in D.items():
            setattr(self, key, val)

    shorthand = T.Unicode('')
    name = T.Unicode('', config=True)
    type = T.Enum(['N', 'O', 'Q', 'T'],
                  default_value=None,
                  allow_none=True,
                  config=True)
    timeUnit = T.Enum(
        ['year', 'month', 'day', 'date', 'hours', 'minutes', 'seconds'],
        default_value=None,
        allow_none=True)
    bin = T.Union([T.Bool(), T.Instance(Bin)], default_value=False)
    sort = T.List(T.Instance(SortItems), default_value=None, allow_none=True)
    aggregate = T.Enum(['avg', 'sum', 'median', 'min', 'max', 'count'],
                       default_value=None,
                       allow_none=True,
                       config=True)
Example #8
0
class VisualizerBase(W.VBox):
    """
    The basic Visualization class that takes the shape of an ipywidgets.VBox

    :param graph: an rdflib.graph.Graph object or a networkx.classes.graph.Graph object to visualize.
    :param edge_color: a string, the desired color of edges.
    :param node_color: a string, the desired color of nodes.
    :param selected_nodes: a tuple of URIRefs of nodes currently selected either via tap or box select.
    :param selected_edges: a list of edges currently selected, currently only working with ipycytoscape.
    """

    graph = T.Union((T.Instance(Graph), T.Instance(nx.classes.graph.Graph)),
                    allow_none=True)
    _vis = T.Instance(W.Box, allow_none=True)
    edge_color = T.Unicode(default_value="pink")
    node_color = T.Unicode(default_value="grey")
    selected_nodes = W.trait_types.TypedTuple(trait=T.Instance(URIRef))
    selected_edges = T.List()
    hovered_nodes = W.trait_types.TypedTuple(trait=T.Instance(URIRef))
    hovered_edges = T.List()
    graph_layout = T.Unicode()
    graph_layout_options = W.trait_types.TypedTuple(trait=T.Unicode())
    graph_layout_params = T.Dict()

    @T.default("graph_layout_params")
    def make_params(self):
        return {}
Example #9
0
class RangeFilter(BaseObject):
    """Wrapper for Vega-Lite RangeFilter definition.
    
    Attributes
    ----------
    field: Unicode
        Field to be filtered.
    range: List(Union(CFloat, DateTime))
        Array of inclusive minimum and maximum values for a field value of a data item to be included in the filtered data.
    timeUnit: TimeUnit
        time unit for the field to be filtered.
    """
    field = T.Unicode(allow_none=True,
                      default_value=None,
                      help="""Field to be filtered.""")
    range = T.List(
        T.Union([
            T.CFloat(allow_none=True, default_value=None),
            T.Instance(DateTime, allow_none=True, default_value=None)
        ]),
        allow_none=True,
        default_value=None,
        maxlen=2,
        minlen=2,
        help=
        """Array of inclusive minimum and maximum values for a field value of a data item to be included in the filtered data."""
    )
    timeUnit = TimeUnit(allow_none=True,
                        default_value=None,
                        help="""time unit for the field to be filtered.""")

    def __init__(self, field=None, range=None, timeUnit=None, **kwargs):
        kwds = dict(field=field, range=range, timeUnit=timeUnit)
        kwargs.update({k: v for k, v in kwds.items() if v is not None})
        super(RangeFilter, self).__init__(**kwargs)
Example #10
0
class TimeScale(QuantitativeScale):

    type = T.Unicode('time')
    nice = T.Union([
        T.Unicode, T.CFloat,
        T.Enum(['second', 'minute', 'hour', 'day', 'week', 'month', 'year'])
    ])
Example #11
0
class QuantitativeScale(Scale):

    type = T.Unicode('quantitative')
    clamp = T.Bool()
    interploate = T.Unicode()
    nice = T.Union([T.Bool(), T.CFloat])
    zero = T.Bool()
Example #12
0
class OneOfFilter(BaseObject):
    """Wrapper for Vega-Lite OneOfFilter definition.
    
    Attributes
    ----------
    field: Unicode
        Field to be filtered.
    oneOf: List(Union(Unicode, CFloat, Bool, DateTime))
        A set of values that the `field`'s value should be a member of, for a data item included in the filtered data.
    timeUnit: TimeUnit
        time unit for the field to be filtered.
    """
    field = T.Unicode(allow_none=True,
                      default_value=None,
                      help="""Field to be filtered.""")
    oneOf = T.List(
        T.Union([
            T.Unicode(allow_none=True, default_value=None),
            T.CFloat(allow_none=True, default_value=None),
            T.Bool(allow_none=True, default_value=None),
            T.Instance(DateTime, allow_none=True, default_value=None)
        ]),
        allow_none=True,
        default_value=None,
        help=
        """A set of values that the `field`'s value should be a member of, for a data item included in the filtered data."""
    )
    timeUnit = TimeUnit(allow_none=True,
                        default_value=None,
                        help="""time unit for the field to be filtered.""")

    def __init__(self, field=None, oneOf=None, timeUnit=None, **kwargs):
        kwds = dict(field=field, oneOf=oneOf, timeUnit=timeUnit)
        kwargs.update({k: v for k, v in kwds.items() if v is not None})
        super(OneOfFilter, self).__init__(**kwargs)
Example #13
0
class Reproject(Interpolate):
    """
    Create a Algorithm that evalutes a Node with one set of coordinates, and then interpolates it.
    This can be used to bilinearly interpolate an averaged dataset, for example.

    Attributes
    ----------
    source : Node
        The source node. This node will use it's own, specified interpolation scheme
    interpolation : str
        Type of interpolation method to use for the interpolation
    coordinates: Coordinates, Node, str, dict
        Coordinates used to evaluate the source. These can be specified as a dictionary, json-formatted string,
        PODPAC Coordinates, or a PODPAC Node, where the node MUST implement the 'coordinates' attribute.
    """

    coordinates = tl.Union(
        [NodeTrait(),
         tl.Dict(),
         tl.Unicode(),
         tl.Instance(Coordinates)],
        help=
        """Coordinates used to evaluate the source. These can be specified as a dictionary,
                           json-formatted string, PODPAC Coordinates, or a PODPAC Node, where the node MUST implement
                           the 'coordinates' attribute""",
    ).tag(attr=True)

    @tl.validate("coordinates")
    def _validate_coordinates(self, d):
        if isinstance(d["value"],
                      Node) and not hasattr(d["value"], "coordinates"):
            raise ValueError(
                "When specifying the coordinates as a PODPAC Node, this Node must have a 'coordinates' attribute"
            )
        return d["value"]

    @property
    def _coordinates(self):
        if isinstance(self.coordinates, Coordinates):
            return self.coordinates
        elif isinstance(self.coordinates, Node):
            return self.coordinates.coordinates
        elif isinstance(self.coordinates, dict):
            return Coordinates.from_definition(self.coordinates)
        elif isinstance(self.coordinates, string_types):
            return Coordinates.from_json(self.coordinates)
        else:
            raise TypeError("The coordinates attribute is of the wrong type.")

    def _source_eval(self, coordinates, selector, output=None):
        return self.source.eval(self._coordinates,
                                output=output,
                                _selector=selector)

    @property
    def base_ref(self):
        return "{}_reprojected".format(self.source.base_ref)
Example #14
0
class PathLoader(W.HBox, BaseLoader):
    """Loader that selects from a list of files in a path"""

    label = T.Instance(W.Label)
    path = T.Union([T.Unicode(), T.Instance(Path)], allow_none=True)
    file_picker = T.Instance(W.Dropdown)

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.children = tuple([self.label, self.file_picker])
        T.link((self, "description"), (self.label, "value"))
        self.file_picker.observe(self._file_picked, "value")
        self._path_changed()
        self._file_picked

    @T.default("description")
    def make_default_description(self):
        return "Select file"

    @T.default("label")
    def make_default_label(self):
        return W.Label()

    @T.default("file_picker")
    def make_default_file_picker(self):
        """TODO: revisit for multiple files, e.g. checkboxes"""
        return W.Dropdown()

    def _file_picked(self, change=None):
        value = self.file_picker.value
        if not value:
            self.file_upload_value = {}
            return

        text = value.read_text(encoding="utf-8")
        self.file_upload_value = {
            value.name: dict(metadata=dict(size=len(text)), content=text)
        }

    @T.observe("path")
    def _path_changed(self, change=None):
        options = {"Select a file...": ""}

        if not self.path:
            self.file_picker.options = options
            return

        path = Path(self.path)

        if path.is_dir():
            globs = [
                sorted(path.glob(f"*.{ext}")) for ext in SUFFIX_FORMAT_MAP
            ]
            options.update(**{p.name: p for p in sorted(sum([*globs], []))})

        self.file_picker.options = options
Example #15
0
class FacetedChart(schema.FacetSpec, TopLevelMixin):
    _data = None

    # Use specialized version of Facet, spec, and Transform
    facet = T.Instance(Facet, allow_none=True, default_value=None,
                       help=schema.FacetSpec.facet.help)
    spec = T.Union([T.Instance(LayeredChart), T.Instance(Chart)],
                   allow_none=True, default_value=None,
                   help=schema.FacetSpec.spec.help)
    transform = T.Instance(Transform, allow_none=True, default_value=None,
                           help=schema.FacetSpec.transform.help)

    @property
    def data(self):
        return self._data

    @data.setter
    def data(self, new):
        if isinstance(new, string_types):
            self._data = Data(url=new)
        elif (new is None or isinstance(new, pd.DataFrame)
              or isinstance(new, expr.DataFrame) or isinstance(new, Data)):
            self._data = new
        else:
            raise TypeError('Expected DataFrame or altair.Data, got: {0}'.format(new))

    skip = ['data', '_data']

    def __init__(self, data=None, **kwargs):
        super(FacetedChart, self).__init__(**kwargs)
        self.data = data

    def __dir__(self):
        base = super(Chart, self).__dir__()
        methods = [
            'to_dict', 'from_dict', 'to_altair', 'display',
            'configure', 'configure_axis', 'configure_cell',
            'configure_legend', 'configure_mark', 'configure_scale',
            'configure_facet_axis', 'configure_facet_cell',
            'configure_facet_grid', 'configure_facet_scale',
            'transform_data',
            'set_facet',
        ]
        return base + methods

    @use_signature(Facet)
    def set_facet(self, *args, **kwargs):
        """Define the facet encoding for the Chart."""
        return self._update_subtraits('facet', *args, **kwargs)

    def _finalize(self, **kwargs):
        self._finalize_data()
        # data comes from wrappers, but self.data overrides this if defined
        if self.data is not None:
            kwargs['data'] = self.data
        super(FacetedChart, self)._finalize(**kwargs)
Example #16
0
class FacetedChart(schema.FacetSpec, TopLevelMixin):
    _data = None

    # Use specialized version of Facet, spec, and Transform
    facet = T.Instance(Facet,
                       allow_none=True,
                       default_value=None,
                       help=schema.FacetSpec.facet.help)
    spec = T.Union(
        [T.Instance(LayeredChart), T.Instance(Chart)],
        allow_none=True,
        default_value=None,
        help=schema.FacetSpec.spec.help)
    transform = T.Instance(Transform,
                           allow_none=True,
                           default_value=None,
                           help=schema.FacetSpec.transform.help)
    max_rows = T.Int(default_value=DEFAULT_MAX_ROWS,
                     help="Maximum number of rows in the dataset to accept.")

    @property
    def data(self):
        return self._data

    @data.setter
    def data(self, new):
        if isinstance(new, string_types):
            self._data = Data(url=new)
        elif (new is None or isinstance(new, pd.DataFrame)
              or isinstance(new, expr.DataFrame) or isinstance(new, Data)):
            self._data = new
        else:
            raise TypeError(
                'Expected DataFrame or altair.Data, got: {0}'.format(new))

    skip = ['data', '_data', 'max_rows']

    def __init__(self, data=None, **kwargs):
        super(FacetedChart, self).__init__(**kwargs)
        self.data = data

    def __dir__(self):
        return [m for m in dir(self.__class__) if m not in dir(T.HasTraits)]

    @use_signature(Facet)
    def set_facet(self, *args, **kwargs):
        """Define the facet encoding for the Chart."""
        return self._update_subtraits('facet', *args, **kwargs)

    def _finalize(self, **kwargs):
        self._finalize_data()
        # data comes from wrappers, but self.data overrides this if defined
        if self.data is not None:
            kwargs['data'] = self.data
        super(FacetedChart, self)._finalize(**kwargs)
Example #17
0
class MultiToggleButtons(Box):
    description = traitlets.Unicode()
    value = traitlets.Tuple()
    options = traitlets.Union([traitlets.List(), traitlets.Dict()])
    style = traitlets.Dict()

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self._selection_obj = widget_selection._MultipleSelection()
        traitlets.link((self, 'options'), (self._selection_obj, 'options'))
        traitlets.link((self, 'value'), (self._selection_obj, 'value'))

        @observer(self, 'options')
        def _(*_):
            self.buttons = [
                ToggleButton(description=label,
                             layout=Layout(margin='1', width='auto'))
                for label in self._selection_obj._options_labels
            ]
            if self.description:
                self.label = Label(
                    self.description,
                    layout=Layout(
                        width=self.style.get('description_width', '100px')))
            else:
                self.label = Label(
                    self.description,
                    layout=Layout(
                        width=self.style.get('description_width', '0px')))
            self.children = [self.label] + self.buttons

            @observer(self.buttons, 'value')
            def _(*_):
                self.value = tuple(value for btn, value in zip(
                    self.buttons, self._selection_obj._options_values)
                                   if btn.value)

        self.add_class('btn-group')

    def reset(self):
        opts = self.options
        self.options = []
        self.options = opts

    def set_value(self, x):
        for b, opt in zip(self.buttons, self.options):
            b.value = (opt in x)

    def set_all_on(self):
        for b, opt in zip(self.buttons, self.options):
            b.value = True

    def set_all_off(self):
        for b, opt in zip(self.buttons, self.options):
            b.value = False
Example #18
0
class Transform(schema.Transform):
    filter = T.Union([T.Unicode(), T.Instance(expr.Expression)],
                     allow_none=True,
                     default_value=None,
                     help=schema.Transform.filter.help)

    def _finalize(self, **kwargs):
        """Finalize object: convert filter expression to string"""
        if isinstance(self.filter, expr.Expression):
            self.filter = repr(self.filter)
        super(Transform, self)._finalize(**kwargs)
Example #19
0
class Mesh(widgets.Widget):
    _view_name = Unicode('MeshView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('MeshModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)
    x = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    y = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    z = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    u = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    v = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    triangles = Array(default_value=None,
                      allow_none=True).tag(sync=True, **array_serialization)
    lines = Array(default_value=None,
                  allow_none=True).tag(sync=True, **array_serialization)
    texture = traitlets.Union([
        traitlets.Instance(ipywebrtc.MediaStream),
        Unicode(),
        traitlets.List(Unicode, [], allow_none=True),
        Image(default_value=None, allow_none=True),
        traitlets.List(Image(default_value=None, allow_none=True)),
    ]).tag(sync=True, **texture_serialization)

    sequence_index = Integer(default_value=0).tag(sync=True)
    color = Array(default_value="red",
                  allow_none=True).tag(sync=True, **color_serialization)
    visible = traitlets.CBool(default_value=True).tag(sync=True)

    material = traitlets.Instance(
        pythreejs.ShaderMaterial,
        help='A :any:`pythreejs.ShaderMaterial` that is used for the mesh'
    ).tag(sync=True, **widgets.widget_serialization)

    @traitlets.default('material')
    def _default_material(self):
        return pythreejs.ShaderMaterial(side=pythreejs.enums.Side.DoubleSide)

    line_material = traitlets.Instance(
        pythreejs.ShaderMaterial,
        help=
        'A :any:`pythreejs.ShaderMaterial` that is used for the lines/wireframe'
    ).tag(sync=True, **widgets.widget_serialization)

    @traitlets.default('line_material')
    def _default_line_material(self):
        return pythreejs.ShaderMaterial()
Example #20
0
class Mesh(widgets.DOMWidget):
    _view_name = Unicode('MeshView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('MeshModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)
    x = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    y = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    z = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    u = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    v = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    triangles = Array(default_value=None,
                      allow_none=True).tag(sync=True, **array_serialization)
    lines = Array(default_value=None,
                  allow_none=True).tag(sync=True, **array_serialization)
    texture = traitlets.Union([
        traitlets.Instance(ipywebrtc.MediaStream),
        Unicode(),
        traitlets.List(Unicode, [], allow_none=True),
        Image(default_value=None, allow_none=True),
        traitlets.List(Image(default_value=None, allow_none=True))
    ]).tag(sync=True, **texture_serialization)

    #    selected = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    sequence_index = Integer(default_value=0).tag(sync=True)
    color = Array(default_value="red",
                  allow_none=True).tag(sync=True, **color_serialization)
    #    color_selected = traitlets.Union([Array(default_value=None, allow_none=True).tag(sync=True, **color_serialization),
    #                                     Unicode().tag(sync=True)],
    #                                     default_value="green").tag(sync=True)
    #    geo = traitlets.Unicode('diamond').tag(sync=True)
    visible = traitlets.CBool(default_value=True).tag(sync=True)

    material = traitlets.Instance(pythreejs.ShaderMaterial).tag(
        sync=True, **ipywidgets.widget_serialization)

    @traitlets.default('material')
    def _default_material(self):
        return pythreejs.ShaderMaterial(side=pythreejs.Side.DoubleSide)

    line_material = traitlets.Instance(pythreejs.ShaderMaterial).tag(
        sync=True, **ipywidgets.widget_serialization)

    @traitlets.default('line_material')
    def _default_line_material(self):
        return pythreejs.ShaderMaterial()
Example #21
0
class Formula(schema.Formula):
    expr = T.Union([T.Unicode(), T.Instance(expr.Expression)],
                    allow_none=True, default_value=None,
                    help=schema.Formula.expr.help)

    def __init__(self, field, expr=None, **kwargs):
        super(Formula, self).__init__(field=field, expr=expr, **kwargs)

    def _finalize(self, **kwargs):
        """Finalize object: convert expr expression to string if necessary"""
        if isinstance(self.expr, expr.Expression):
            self.expr = repr(self.expr)
        super(Formula, self)._finalize(**kwargs)
Example #22
0
class Scale(T.HasTraits):

    name = T.Unicode('')
    type = T.Unicode('')
    domain = T.Union([T.List, T.Tuple])
    domainMax = T.CFloat()
    domainMin = T.CFloat()
    domainMid = T.CFloat()
    domainRaw = T.Union([T.List, T.Tuple])
    range = T.Union([
        T.List, T.Tuple,
        T.Enum([
            'width', 'height', 'symbol', 'category', 'diverging', 'ordinal',
            'ramp', 'heatmap'
        ])
    ])
    reverse = T.Bool()
    round = T.Bool()

    def __call__(self, *args, **kwargs):
        raise NotImplementedError(
            'Scale operation not implemented, use a subclass.')
Example #23
0
def interpolation_trait(default_value=INTERPOLATION_DEFAULT):
    """Create a new interpolation trait
    
    Returns
    -------
    tl.Union
        Union trait for an interpolation definition
    """
    return tl.Union([
        tl.Dict(),
        tl.Enum(INTERPOLATION_SHORTCUTS),
        tl.Instance(Interpolation)
    ],
                    allow_none=True,
                    default_value=default_value)
Example #24
0
File: api.py Project: mindis/altair
class Shelf(BaseObject):
    # TODO: Supported enums & supported types for aggregate
    # TODO: Supported types for timeunit
    # TODO: Supported types for bin
    # TODO: supported role?
    # TODO: supported mark types?
    # TODO: assert name and type are required

    skip = ['shorthand', 'config']

    def __init__(self, shorthand, **kwargs):
        kwargs['shorthand'] = shorthand
        super(Shelf, self).__init__(**kwargs)

    def _infer_type(self, data):
        if self.type is None and self.name in data:
            self.type = infer_vegalite_type(data[self.name])

    def _shorthand_changed(self, name, old, new):
        # TODO: if name of shorthand changed, should it reset all properties of obj?
        D = parse_shorthand(self.shorthand)
        for key, val in D.items():
            setattr(self, key, val)

    def to_dict(self):
        if not self.name:
            return None
        return super(Shelf, self).to_dict()

    shorthand = T.Unicode('')
    name = T.Unicode('', config=True)
    type = T.Enum(['N', 'O', 'Q', 'T'],
                  default_value=None,
                  allow_none=True,
                  config=True)
    timeUnit = T.Enum(
        ['year', 'month', 'day', 'date', 'hours', 'minutes', 'seconds'],
        default_value=None,
        allow_none=True)
    bin = T.Union([T.Bool(), T.Instance(Bin)], default_value=False)
    sort = T.List(T.Instance(SortItems), default_value=None, allow_none=True)
    aggregate = T.Enum(['avg', 'sum', 'median', 'min', 'max', 'count'],
                       default_value=None,
                       allow_none=True,
                       config=True)
Example #25
0
class Mesh(widgets.DOMWidget):
    _view_name = Unicode('MeshView').tag(sync=True)
    _view_module = Unicode('ipyvolume').tag(sync=True)
    _model_name = Unicode('MeshModel').tag(sync=True)
    _model_module = Unicode('ipyvolume').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)
    x = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    y = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    z = Array(default_value=None).tag(sync=True,
                                      **array_sequence_serialization)
    u = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    v = Array(default_value=None,
              allow_none=True).tag(sync=True, **array_sequence_serialization)
    triangles = Array(default_value=None,
                      allow_none=True).tag(sync=True, **array_serialization)
    lines = Array(default_value=None,
                  allow_none=True).tag(sync=True, **array_serialization)
    texture = traitlets.Union([
        traitlets.Instance(ipywebrtc.MediaStream),
        Unicode(),
        traitlets.List(Unicode, [], allow_none=True),
        Image(default_value=None, allow_none=True),
        traitlets.List(Image(default_value=None, allow_none=True))
    ]).tag(sync=True, **texture_serialization)

    #    selected = Array(default_value=None, allow_none=True).tag(sync=True, **array_sequence_serialization)
    sequence_index = Integer(default_value=0).tag(sync=True)
    color = Array(default_value="red",
                  allow_none=True).tag(sync=True, **color_serialization)
    #    color_selected = traitlets.Union([Array(default_value=None, allow_none=True).tag(sync=True, **color_serialization),
    #                                     Unicode().tag(sync=True)],
    #                                     default_value="green").tag(sync=True)
    #    geo = traitlets.Unicode('diamond').tag(sync=True)
    visible = traitlets.CBool(default_value=True).tag(sync=True)
    visible_lines = traitlets.CBool(default_value=True).tag(sync=True)
    visible_faces = traitlets.CBool(default_value=True).tag(sync=True)

    side = traitlets.CaselessStrEnum(['front', 'back', 'both'],
                                     'both').tag(sync=True)
Example #26
0
class Transform(BaseObject):
    """Wrapper for Vega-Lite Transform definition.
    
    Attributes
    ----------
    calculate: List(Formula)
        Calculate new field(s) using the provided expresssion(s).
    filter: Union(Unicode, EqualFilter, RangeFilter, OneOfFilter, List(Union(Unicode, EqualFilter, RangeFilter, OneOfFilter)))
        A string containing the filter Vega expression.
    filterInvalid: Bool
        Whether to filter invalid values (`null` and `NaN`) from the data.
    """
    calculate = T.List(T.Instance(Formula), allow_none=True, default_value=None, help="""Calculate new field(s) using the provided expresssion(s).""")
    filter = T.Union([T.Unicode(allow_none=True, default_value=None), T.Instance(EqualFilter, allow_none=True, default_value=None), T.Instance(RangeFilter, allow_none=True, default_value=None), T.Instance(OneOfFilter, allow_none=True, default_value=None), T.List(T.Union([T.Unicode(allow_none=True, default_value=None), T.Instance(EqualFilter, allow_none=True, default_value=None), T.Instance(RangeFilter, allow_none=True, default_value=None), T.Instance(OneOfFilter, allow_none=True, default_value=None)]), allow_none=True, default_value=None)])
    filterInvalid = T.Bool(allow_none=True, default_value=None, help="""Whether to filter invalid values (`null` and `NaN`) from the data.""")
    
    def __init__(self, calculate=None, filter=None, filterInvalid=None, **kwargs):
        kwds = dict(calculate=calculate, filter=filter, filterInvalid=filterInvalid)
        kwargs.update({k:v for k, v in kwds.items() if v is not None})
        super(Transform, self).__init__(**kwargs)
Example #27
0
class EqualFilter(BaseObject):
    """Wrapper for Vega-Lite EqualFilter definition.
    
    Attributes
    ----------
    equal: Union(Unicode, CFloat, Bool, DateTime)
        Value that the field should be equal to.
    field: Unicode
        Field to be filtered.
    timeUnit: TimeUnit
        Time unit for the field to be filtered.
    """
    equal = T.Union([T.Unicode(allow_none=True, default_value=None), T.CFloat(allow_none=True, default_value=None), T.Bool(allow_none=True, default_value=None), T.Instance(DateTime, allow_none=True, default_value=None)])
    field = T.Unicode(allow_none=True, default_value=None, help="""Field to be filtered.""")
    timeUnit = TimeUnit(allow_none=True, default_value=None, help="""Time unit for the field to be filtered.""")
    
    def __init__(self, equal=None, field=None, timeUnit=None, **kwargs):
        kwds = dict(equal=equal, field=field, timeUnit=timeUnit)
        kwargs.update({k:v for k, v in kwds.items() if v is not None})
        super(EqualFilter, self).__init__(**kwargs)
Example #28
0
File: api.py Project: mindis/altair
class ColorScale(Scale):
    """Scale object that adds additional properties to the Scale property for Color"""

    range = T.Union([T.Unicode(), T.List(T.Unicode)],
                    default_value=None,
                    allow_none=True)
    c10palette = T.Enum([
        'category10', 'category10k', 'Pastel1', 'Pastel2', 'Set1', 'Set2',
        'Set3'
    ],
                        default_value='category10')
    c20palette = T.Enum(['category20', 'category20b', 'category20c'],
                        default_value='category20')
    ordinalPalette = T.Enum([
        'YlGn', 'YlGnBu', 'GnBu', 'BuGn', 'PuBuGn', 'PuBu', 'BuPu', 'RdPu',
        'PuRd', 'OrRd', 'YlOrRd', 'YlOrBr', 'Purples', 'Blues', 'Greens',
        'Oranges', 'Reds', 'Greys', 'PuOr', 'BrBG', 'PRGn', 'PiYG', 'RdBu',
        'RdGy', 'RdYlBu', 'Spectral', 'RdYlGn', 'Accent', 'Dark2', 'Paired',
        'Pastel1', 'Pastel2', 'Set1', 'Set2', 'Set3'
    ],
                            default_value='BuGn')
Example #29
0
class FractionalSelectionModel(traitlets.HasTraits):
	response_fn = SubclassName(SelectionResponseFn)

	@property
	def response_impl(self):
	   return resolve_subclass(SelectionResponseFn, self.response_fn)()

	#sel_k = traitlets.Dict(
	#    traits = dict(
	#        __class__ = SubclassName(Continuous)
	#    ),
	#    default_value = dict(
	#        __class__="FlatNormal",
	#        mu=1.63, #4.0, #1.63,
	#        sd=0.00002, #0.0001, #0.00002,
	#        w=1.02 #2.5 #1.02,
	#    )
	#)
	#
	#@property
	#def sel_k_class(self):
	#    return resolve_subclass(Continuous, self.sel_k["__class__"])
	#
	#@property
	#def sel_k_kwargs(self):
	#    kwargs = dict(self.sel_k)
	#    kwargs.pop("__class__")
	#    return kwargs

	sel_k = traitlets.Float()#min_selection_rate

	min_selection_rate = traitlets.Union([
		traitlets.Bool(),
		traitlets.Float()
	])

	min_selection_mass = traitlets.Union([
		traitlets.Enum(["global", "per_selection"]),
		traitlets.Float()
	])

	homogenous_k = traitlets.Bool(default_value=True)

	outlier_detection_opt_cycles = traitlets.Integer(default_value=1)

	def __init__(self, **kwargs):
		# Override 'super' error-handling logic in HasTraits base __init__
		# __init__ swallows errors from unused kwargs until v4.3
		for key in kwargs:
			if not self.has_trait(key):
				raise TypeError("__init__() got an unexpected keyword argument '%s'" % key )
		traitlets.HasTraits.__init__(self, **kwargs)

	@staticmethod
	def lognorm_params(sd, mode):
		return dict(
			tau = sd**-2.,
			mu = numpy.log(mode) + sd ** 2,
		)

	@staticmethod
	def parent_depth(start_key, pop_specs):
		seen_keys = set()
		cur_depth = 0
		key = start_key

		while pop_specs[key]["parent"] is not None:
			seen_keys.add(key)
			parent = pop_specs[key]["parent"]

			if parent in seen_keys:
				raise ValueError(
					"Cycle in population parents. start_key: %s cycle_key: %s" %
					(start_key, pop_specs[key]["parent"]))
			if parent not in pop_specs:
				raise ValueError(
					"Invalid parent specified: %s pop_specs: %s" %
					(parent, list(pop_specs.keys())))
			cur_depth += 1
			key = parent

		return cur_depth

	def generate_data(self, pop_specs, sel_k, sel_ec50, init_pop):
		populations = {}

		for pkey in sorted(list(pop_specs.keys()), key=lambda pkey: self.parent_depth(pkey, pop_specs)):
			p = pop_specs[pkey]
			if p["parent"] is None:
				start_pop = init_pop
			else:
				start_pop = populations[p["parent"]]["P_sel"]

			start_dist = unorm(start_pop)
			if p["selection_level"] is not None:
				src_dist = start_dist * self.response_impl.selection_mass(
					sel_level = p["selection_level"], sel_k = sel_k, sel_ec50 = sel_ec50,
					**{param : p[param] for param in self.response_impl.population_params}
				)
				fraction_selected = src_dist.sum() / start_dist.sum()
			else:
				src_dist = start_dist
				fraction_selected = 1

			selected = numpy.random.multinomial(p["P_sel"], unorm(src_dist)).astype(float)

			populations[pkey] = {}
			populations[pkey].update(p)
			populations[pkey].update({
				"P_sel" : selected,
				"Frac_sel_pop" : fraction_selected
			})

		return populations

	def add_fit_param(self, name, dist):
		var = self.model.Var(name, dist, data=None)

		if dist.transform:
			forward_trans = dist.transform.forward(var)
			self.to_trans[name] = (
				"%s_%s_" % (name, dist.transform.name),
				lambda v: forward_trans.eval({var : v})
			)


		self.fit_params[name] = var

		return var

	def build_model(self, population_data):
		for k, p in list(population_data.items()):
			unused_keys = set(p.keys()).difference(
				["P_sel", "Frac_sel_pop", "selection_level", "parent"] +
				list(self.response_impl.population_params)
			)
			if unused_keys:
				logger.warning("Unused keys in population_data[%r] : %s", k, unused_keys)

		num_members = set( len(p["P_sel"]) for p in list(population_data.values()) )
		assert len(num_members) == 1, "Different observed population memberships: %s" % num_members
		self.num_members = num_members.pop()
		selected_observations = {
			v["selection_level"] : v["P_sel"]
			for v in list(population_data.values()) if v["selection_level"] is not None
		}

		start_ec50 = numpy.full_like(list(selected_observations.values())[0], min(selected_observations) - 1)
		for sl in selected_observations:
			start_ec50[ ((sl - 1) > start_ec50) & (selected_observations[sl] > 0) ] = sl - 1

		self.model = pymc3.Model()

		self.to_trans = {}
		self.fit_params = {}

		self.model_populations = {}
		self.population_data = population_data

		pops_by_depth = sorted(
			list(population_data.keys()),
			key=lambda pkey: self.parent_depth(pkey, population_data))
		self.modeled_populations = [ p for p in pops_by_depth if population_data[p]["parent"] is not None ]

		with self.model:
			#sel_k = self.add_fit_param(
			#    "sel_k",
			#    self.sel_k_class.dist(**self.sel_k_kwargs))

			#sel_k = self.add_fit_param(
			#            "sel_k",
			#            FlatNormal.dist(**default_selk_dict[self.sel_k_dict]))

			sel_k=self.sel_k

			sel_values = set(
				float(p["selection_level"])
				for p in list(self.population_data.values())
				if p["selection_level"] is not None
			)
			sel_mag = max(sel_values) - min(sel_values)
			self.sel_range = dict(lower=min(sel_values) - sel_mag * .5, upper=max(sel_values)+sel_mag*.5)
			logger.info("Inferred sel_ec50 range: %s", self.sel_range)

			sel_ec50 = self.add_fit_param(
				"sel_ec50",
				pymc3.Uniform.dist(
					shape=self.num_members,
					testval=start_ec50,
					**self.sel_range)
				)

			if self.min_selection_rate:
				if isinstance(self.min_selection_rate, bool):
					logger.info("Adding adaptive min_selection_rate.")
					min_selection_rate = self.add_fit_param(
						"min_selection_rate",
						pymc3.HalfNormal.dist(sd=.0002, testval=.0001))
				else:
					logger.info("Adding const min_selection_rate: %.03f" % self.min_selection_rate)
					min_selection_rate = float(self.min_selection_rate)
			else:
				min_selection_rate = 0.0

			if self.min_selection_mass:
				if self.min_selection_mass == "global":
					logger.info("Adding global adaptive min_selection_mass.")
					min_selection_mass = self.add_fit_param(
						"min_selection_mass",
						pymc3.HalfNormal.dist(sd=1e-3, testval=1e-12))
				elif self.min_selection_mass == "per_selection":
					logger.info("Adding per-selection adaptive min_selection_mass.")
					min_selection_mass = self.add_fit_param(
						"min_selection_mass",
						pymc3.HalfNormal.dist(shape=len(self.modeled_populations), sd=1e-3, testval=1e-12))
				else:
					logger.info("Adding const min_selection_mass: %.03f" % self.min_selection_mass)
					min_selection_mass = float(self.min_selection_mass)
			else:
				min_selection_mass = 0.0

			for pidx, pkey in enumerate(self.modeled_populations): # Sum over all rounds as in Eq. 15
				pdat = population_data[pkey]
				p_min_selection_mass = (
					min_selection_mass[pidx]
						if self.min_selection_mass == "per_selection" else
					min_selection_mass
				)

				start_pop = population_data[pdat["parent"]]["P_sel"].astype(float)
				P_in = unorm(start_pop)

				if pdat["selection_level"] is not None:
					Frac_sel = self.response_impl.selection_mass(                                  # Eq. 10, where "sel_k" is K_sel and
						sel_level = pdat["selection_level"], sel_k = sel_k, sel_ec50 = sel_ec50,   # sel_ec50 is the vector of EC_50s, and
						**{param : pdat[param] for param in self.response_impl.population_params}  # selection_level is the enzyme "concentration" on a
					)                                                                              # log scale, with base specified in the input.

					Frac_sel_star = min_selection_rate + (Frac_sel * (1 - min_selection_rate))     # Eq. 11, with min_selection_rate as "a"
					P_cleave_prenormalized = P_in * Frac_sel_star    # The numerator of Eq. 12, normalized shortly

					Frac_sel_pop = P_cleave_prenormalized.sum() / P_in.sum() # Precalculate this for Eq. 14


				else:
					P_cleave = P_in
					Frac_sel_pop = 1.0

				#multinomial formula returns nan if any p == 0, file bug?
				# Add epsilon to selection prob to avoid nan-results when p==0

				P_cleave = unorm(                                                                     # Finish calculating P_cleave per Eq. 12 by normalizingNormalize P_cleave and ensure all probabilities
					T.clip(P_cleave_prenormalized, (p_min_selection_mass + 1e-9) * Frac_sel_pop, 1))  # it to sum to 1. A lower threshold is added to all probabilities to
				pop_mask = numpy.flatnonzero(start_pop > 0)                                           # prevent crashes. This low threshold is rarely relevant because

				n_sel = pdat["P_sel"][pop_mask].sum()
				selected = pymc3.distributions.Multinomial(     # Eq. 13
					name = "selected_%s" % pkey,                #
					n=n_sel ,                                   #
					p=P_cleave[pop_mask],
					observed=pdat["P_sel"][pop_mask]
				)

				if pdat.get("Frac_sel_pop", None) is not None:
					n_sel = pdat["P_sel"].sum()
					n_assay = numpy.floor(float(n_sel) / pdat["Frac_sel_pop"])
					total_selected = pymc3.distributions.Binomial(    #
						name = "total_selected_%s" % pkey,            # Eq. 14
						n = n_assay,                                  #
						p = Frac_sel_pop,                             #
						observed = n_sel)                             #
				else:
					total_selected = pdat["P_sel"].sum()

				self.model_populations[pkey] = {
					"selection_mass" : self._function(P_cleave * Frac_sel_pop),
					"P_cleave" : self._function(P_cleave),
					"Frac_sel_pop" : self._function(Frac_sel_pop),
					"P_sel" : self._function(selected),
					"n_sel" : self._function(total_selected),
				}

		self.fit_params = { k : self._function(v) for k, v in list(self.fit_params.items()) }
		self.logp = self._function(self.model.logpt)

		return self

	def optimize_params(self, start = None):
		logger.info("optimize_params: %i members", self.num_members)
		if start is not None:
			start = self.to_transformed(start)
			for k in self.model.test_point:
				if k not in start:
					start[k] = self.model.test_point[k]
		MAP = pymc3.find_MAP(start=start, model=self.model, fmin=scipy.optimize.fmin_l_bfgs_b)

		return { k : v(MAP) for k, v in list(self.fit_params.items()) }

	def opt_ec50_cred_outliers(self, src_params):
		logger.info("scan_ec50_outliers: %i members", self.num_members)
		params = copy.deepcopy(src_params)

		num_outlier = 0

		for i in range(self.num_members):
			if i % 1000 == 0:
				logger.info("scan_ec50_outliers: %i / %i  outlier count: %s", i, self.num_members, num_outlier)

			cred_summary = self.estimate_ec50_cred(params, i)
			current = numpy.searchsorted(cred_summary["xs"], cred_summary["sel_ec50"], "left")
			#rb = numpy.searchsorted(cred_summary["xs"], cred_summary["sel_ec50"], "right")

			m_pmf = cred_summary["pmf"].argmax()

			if m_pmf < current - 1 or m_pmf > current:
				num_outlier += 1
				params["sel_ec50"][i] = cred_summary["xs"][m_pmf]

		logger.info(
			"Modified %.3f outliers. (%i/%i)",
			 num_outlier / self.num_members, num_outlier, self.num_members)

		return params

	def find_MAP(self, start = None):
		params = self.optimize_params(start)

		for _ in range(self.outlier_detection_opt_cycles):

			resampled = self.opt_ec50_cred_outliers(params)
			params = self.optimize_params(resampled)

		return params

	def ec50_logp_trace(self, base_params, sample_i, ec50_range, include_global_terms=True):
		llh_by_ec50_gen = numpy.zeros((len(ec50_range), len(self.model_populations)))

		if self.min_selection_rate:
			if self.min_selection_rate == True:
				min_selection_rate = base_params["min_selection_rate"]
			else:
				min_selection_rate = self.min_selection_rate
		else:
			min_selection_rate = 0

		if self.min_selection_mass:
			if isinstance(self.min_selection_mass, str):
				min_selection_mass = base_params["min_selection_mass"]
			else:
				min_selection_mass = self.min_selection_mass
		else:
			min_selection_mass = 0

		for pidx, pkey in enumerate(self.modeled_populations):
			pdat = self.population_data[pkey]

			p_min_selection_mass = (
				min_selection_mass[pidx]
					if self.min_selection_mass == "per_selection" else
				min_selection_mass
			)

			parent_pop_fraction = unorm(self.population_data[pdat['parent']]["P_sel"])[sample_i]

			if parent_pop_fraction == 0:
				continue

			# calculate selection results for full ec50 range
			# base_params['sel_k'] * (pdat['conc_factor'] ** (pdat['selection_level'] - ec50_range) - 1.0 ))
			if "sel_k" in base_params:
				sel_k = base_params["sel_k"]
			else:
				sel_k = self.sel_k

			selected_fraction = self.response_impl.selection_mass(
				sel_level = pdat["selection_level"], sel_k = sel_k, sel_ec50 = ec50_range,
				**{param : pdat[param] for param in self.response_impl.population_params}
			)

			selected_fraction = min_selection_rate + (selected_fraction * (1 - min_selection_rate))

			sel_pop_fraction = parent_pop_fraction * selected_fraction / self.model_populations[pkey]["Frac_sel_pop"](base_params)

			sample_llhs = scipy.stats.binom.logpmf(
				pdat["P_sel"][sample_i],
				n=pdat["P_sel"].sum(),
				p=numpy.clip(sel_pop_fraction, p_min_selection_mass + 1e-9, 1.0)
			)

			if include_global_terms and pdat.get("Frac_sel_pop") is not None:
				prev_selected_fraction = self.response_impl.selection_mass(
					sel_level = pdat["selection_level"], sel_k = sel_k, sel_ec50 = base_params['sel_ec50'][sample_i],
					**{param : pdat[param] for param in self.response_impl.population_params}
				)

				prev_selected_mass = parent_pop_fraction * prev_selected_fraction

				selected_mass = parent_pop_fraction * selected_fraction

				selected_count = pdat["P_sel"].sum()
				source_count = numpy.floor(float(selected_count) / pdat["Frac_sel_pop"])

				modified_global_selection_fractions = (
					self.model_populations[pkey]["Frac_sel_pop"](base_params)
					+ selected_mass - prev_selected_mass
				)

				sample_llhs += scipy.stats.binom.logpmf(
					selected_count,
					n=source_count,
					p=modified_global_selection_fractions
				)


			llh_by_ec50_gen[:,pidx] = sample_llhs

		llh_by_ec50 = llh_by_ec50_gen.sum(axis=1)

		return llh_by_ec50 - numpy.nanmax(llh_by_ec50)

	def estimate_ec50_cred(self, base_params, ec50_i, cred_spans = [.68, .95]):
		"""Estimate EC50 credible interval for a single ec50 parameter via model probability."""
		#xs = numpy.arange(self.sel_range["lower"]+0.1, self.sel_range["upper"]-0.1, .1)
		xs=numpy.linspace(self.sel_range['lower']+1,self.sel_range['upper']-1, (self.sel_range['upper'] - self.sel_range['lower'] - 2)*10 + 1)

		logp = numpy.nan_to_num(self.ec50_logp_trace(base_params, ec50_i, xs))
		pmf = numpy.exp(logp) / numpy.sum(numpy.exp(logp))
		cdf = numpy.cumsum(pmf)

		cred_intervals = {}
		for cred_i in cred_spans:
			cdf_b = (1 - cred_i) / 2
			l_b = xs[numpy.searchsorted(cdf, cdf_b, side="left")]
			u_b = xs[numpy.searchsorted(cdf, 1 - cdf_b, side="right")]
			cred_intervals[cred_i] = (l_b, u_b)

		return dict(
			xs = xs,
			pmf = pmf,
			cdf = cdf,
			logp = logp,
			sel_ec50 = base_params["sel_ec50"][ec50_i],
			cred_intervals = cred_intervals
		)

	@staticmethod
	def plot_cred_summary(ec50_cred, ax=None):
		if ax is None:
			from matplotlib import pylab
			ax = pylab.gca()

		ax.plot( ec50_cred["xs"], ec50_cred["pmf"], label="pmf" )
		ax.plot( ec50_cred["xs"], ec50_cred["cdf"], label="cdf" )
		ax.axvline(ec50_cred["sel_ec50"], alpha=.5, label="sel_e50: %.2f" % ec50_cred["sel_ec50"])

		for ci, (cl, cu) in list(ec50_cred["cred_intervals"].items()):
			ax.axvspan(cl, cu, color="red", alpha=.2, label="%.2f cred" % ci)

	def model_selection_summary(self, params):
		def normed_pop(v):
			return v / v.sum()

		return {
			pkey : {
				"P_sel"  : self.population_data[pkey]["P_sel"],
				"selected_fraction" : normed_pop(self.population_data[pkey]["P_sel"].astype(float)),
				"P_cleave" : normed_pop(self.model_populations[pkey]["P_cleave"](params))
			}
			for pkey in self.model_populations
		}


	def plot_fit_summary(model, i, fit):
		import scipy.stats
		from matplotlib import pylab

		sel_sum = model.model_selection_summary(fit)

		sel_levels = {
			k : p["selection_level"] if p["selection_level"] else 0
			for k, p in list(model.population_data.items())}

		sel_fracs = {
			k : p["P_sel"][i] / p["P_sel"].sum()
			for k, p in list(model.population_data.items())}

		pylab.xticks(
			list(sel_levels.values()), list(sel_levels.keys()))
		pylab.xlim((-1, 7))

#		porder = [
#			k for k, p in
#			sorted(model.population_data.items(), key=lambda (k, p): p["selection_level"])]

		pylab.plot(
			[sel_levels[k] for k in porder],
			[sel_fracs[k] for k in porder],
			"-o",
			color="black", label="observed")

		lbl = False
		for k in sel_sum:
			n = sel_sum[k]["P_sel"].sum()
			p = sel_sum[k]["pop_fraction"][i]
			sel_level = model.population_data[k]["selection_level"]
			counts=sel_sum[k]["P_sel"][i]
			plt.text(sel_levels[k] + 0.2, sel_fracs[k], '%.0f' % counts)

			if p<=0:
				continue

			bn = scipy.stats.binom(n=n, p=p)

			parkey = model.population_data[k]["parent"]
			pylab.plot(
				[sel_levels[parkey], sel_levels[k]],
				[sel_fracs[parkey], float(bn.ppf(.5)) / n],
				"--", color="red", alpha=.25
			)




			for ci in (.68, .95, .99):
				pylab.plot(
					[sel_level] * 2, bn.ppf([ci, 1-ci]) / n,
					linewidth=10, color="red", alpha=.25,
					label="predicted" if not lbl else None
				)
				lbl=True

		pylab.legend(fontsize="large", loc="best")

		pylab.twinx()
		xs = numpy.linspace(-2, 8)
		sel_ec50 = fit["sel_ec50"][i]
		sel_k = fit["sel_k"][i] if len(fit["sel_k"]) > 1 else fit["sel_k"]
		pylab.plot(xs, scipy.special.expit(-sel_k * (xs - sel_ec50)), alpha=.75)
		pylab.yticks([], [])

		pylab.title("%s - ec50: %.2f - k: %.2f" % (i, sel_ec50, sel_k))

	def model_outlier_summary(self, params):
		selection_summary = self.model_selection_summary(params)

		for v in list(selection_summary.values()):
			logpmf = scipy.stats.binom.logpmf(
				v["P_sel"],
				n=v["P_sel"].sum(),
				p=v["P_cleave"])

			max_logpmf = scipy.stats.binom.logpmf(
				numpy.round(v["P_sel"].sum() * v["P_cleave"]),
				n=v["P_sel"].sum(),
				p=v["P_cleave"])

			sel_llh = logpmf - max_logpmf
			v["sel_log_likelihood"] = numpy.where(sel_llh != -numpy.inf, sel_llh, numpy.nan)

			sel_error = (v["P_sel"] / v["P_sel"].sum()) - v["P_cleave"]
			v["sel_log_likelihood_signed"] = -v["sel_log_likelihood"] * numpy.sign(sel_error)

		return selection_summary

	def to_transformed(self, val_dict):
		r = {}
		for n, val in list(val_dict.items()):
			if n in self.to_trans:
				k, f = self.to_trans[n]
				r[k] = f(val)
			else:
				r[n] = val

		return r

	def _function(self, f):
		if isinstance(f, theano.tensor.TensorVariable):
			fn = theano.function(self.model.free_RVs, f, on_unused_input="ignore")

			def call_fn(val_dict):
				val_dict = self.to_transformed(val_dict)
				return fn(*[val_dict[str(n)] for n in self.model.free_RVs])

			return call_fn
		else:
			return lambda _: f
Example #30
0
class XarrayInterpolator(Interpolator):
    """Xarray interpolation Interpolation

    Attributes
    ----------
    {interpolator_attributes}

    fill_nan: bool
        Default is False. If True, nan values will be filled before interpolation.
    fill_value: float,str
        Default is None. The value that will be used to fill nan values. This can be a number, or "extrapolate", see `scipy.interpn`/`scipy/interp1d`
    kwargs: dict
        Default is {{"bounds_error": False}}. Additional values to pass to xarray's `interp` method.

    """

    dims_supported = ["lat", "lon", "alt", "time"]
    methods_supported = [
        "nearest",
        "linear",
        "bilinear",
        "quadratic",
        "cubic",
        "zero",
        "slinear",
        "next",
        "previous",
        "splinef2d",
    ]

    # defined at instantiation
    method = tl.Unicode(default_value="nearest")
    fill_value = tl.Union([tl.Unicode(), tl.Float()],
                          default_value=None,
                          allow_none=True)
    fill_nan = tl.Bool(False)

    kwargs = tl.Dict({"bounds_error": False})

    def __repr__(self):
        rep = super(XarrayInterpolator, self).__repr__()
        # rep += '\n\tspatial_tolerance: {}\n\ttime_tolerance: {}'.format(self.spatial_tolerance, self.time_tolerance)
        return rep

    @common_doc(COMMON_INTERPOLATOR_DOCS)
    def can_interpolate(self, udims, source_coordinates, eval_coordinates):
        """
        {interpolator_interpolate}
        """
        udims_subset = self._filter_udims_supported(udims)

        # confirm that udims are in both source and eval coordinates
        if self._dim_in(udims_subset, source_coordinates, unstacked=True):
            for d in source_coordinates.udims:  # Cannot handle stacked dimensions
                if source_coordinates.is_stacked(d):
                    return tuple()
            return udims_subset
        else:
            return tuple()

    @common_doc(COMMON_INTERPOLATOR_DOCS)
    def interpolate(self, udims, source_coordinates, source_data,
                    eval_coordinates, output_data):
        """
        {interpolator_interpolate}
        """
        coords = {}
        nn_coords = {}

        for d in udims:
            # Note: This interpolator cannot handle stacked source -- and this is handled in the can_interpolate function
            if source_coordinates[d].size == 1:
                # If the source only has a single coordinate, xarray will automatically throw an error asking for at least 2 coordinates
                # So, we prevent this. Main problem is that this won't respect any tolerances.
                new_dim = [dd for dd in eval_coordinates.dims if d in dd][0]
                nn_coords[d] = xr.DataArray(
                    eval_coordinates[d].coordinates,
                    dims=[new_dim],
                    coords=[eval_coordinates.xcoords[new_dim]],
                )
                continue
            if not source_coordinates.is_stacked(
                    d) and eval_coordinates.is_stacked(d):
                new_dim = [dd for dd in eval_coordinates.dims if d in dd][0]
                coords[d] = xr.DataArray(
                    eval_coordinates[d].coordinates,
                    dims=[new_dim],
                    coords=[eval_coordinates.xcoords[new_dim]])
            else:
                # TODO: Check dependent coordinates
                coords[d] = eval_coordinates[d].coordinates

        kwargs = self.kwargs.copy()
        kwargs.update({"fill_value": self.fill_value})

        coords["kwargs"] = kwargs

        if self.method == "bilinear":
            self.method = "linear"

        if self.fill_nan:
            for d in source_coordinates.dims:
                if not np.any(np.isnan(source_data)):
                    break
                # use_coordinate=False allows for interpolation when dimension is not monotonically increasing
                source_data = source_data.interpolate_na(method=self.method,
                                                         dim=d,
                                                         use_coordinate=False)

        if nn_coords:
            source_data = source_data.sel(method="nearest", **nn_coords)

        output_data = source_data.interp(method=self.method, **coords)

        return output_data.transpose(*eval_coordinates.dims)