def set_UVC(self, U, V, C=None): self.u = ma.masked_invalid(U, copy=False).ravel() self.v = ma.masked_invalid(V, copy=False).ravel() if C is not None: c = ma.masked_invalid(C, copy=False).ravel() x, y, u, v, c = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v, c) else: x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v) magnitude = np.hypot(u, v) flags, barbs, halves, empty = self._find_tails(magnitude, self.rounding, **self.barb_increments) # Get the vertices for each of the barbs plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty, self._length, self._pivot, self.sizes, self.fill_empty, self.flip) self.set_verts(plot_barbs) # Set the color array if C is not None: self.set_array(c) # Update the offsets in case the masked data changed xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis])) self._offsets = xy
def test_string_seq(self): a1 = ['a', 'b', 'c', 'd', 'e', 'f'] a2 = [1, 2, 3, np.nan, np.nan, 6] result1, result2 = delete_masked_points(a1, a2) ind = [0, 1, 2, 5] assert_array_equal(result1, np.array(a1)[ind]) assert_array_equal(result2, np.array(a2)[ind])
def test_rgba(self): a_masked = np.ma.array([1, 2, 3, np.nan, np.nan, 6], mask=[False, False, True, True, False, False]) a_rgba = mcolors.to_rgba_array(['r', 'g', 'b', 'c', 'm', 'y']) actual = delete_masked_points(a_masked, a_rgba) ind = [0, 1, 5] assert_array_equal(actual[0], a_masked[ind].compressed()) assert_array_equal(actual[1], a_rgba[ind])
def set_UVC(self, U, V, C=None): # We need to ensure we have a copy, not a reference to an array that # might change before draw(). self.u = ma.masked_invalid(U, copy=True).ravel() self.v = ma.masked_invalid(V, copy=True).ravel() # Flip needs to have the same number of entries as everything else. # Use broadcast_to to avoid a bloated array of identical values. # (can't rely on actual broadcasting) if len(self.flip) == 1: flip = np.broadcast_to(self.flip, self.u.shape) else: flip = self.flip if C is not None: c = ma.masked_invalid(C, copy=True).ravel() x, y, u, v, c, flip = cbook.delete_masked_points( self.x.ravel(), self.y.ravel(), self.u, self.v, c, flip.ravel()) _check_consistent_shapes(x, y, u, v, c, flip) else: x, y, u, v, flip = cbook.delete_masked_points( self.x.ravel(), self.y.ravel(), self.u, self.v, flip.ravel()) _check_consistent_shapes(x, y, u, v, flip) magnitude = np.hypot(u, v) flags, barbs, halves, empty = self._find_tails(magnitude, self.rounding, **self.barb_increments) # Get the vertices for each of the barbs plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty, self._length, self._pivot, self.sizes, self.fill_empty, flip) self.set_verts(plot_barbs) # Set the color array if C is not None: self.set_array(c) # Update the offsets in case the masked data changed xy = np.column_stack((x, y)) self._offsets = xy self.stale = True
def set_UVC(self, U, V, C=None): self.u = ma.masked_invalid(U, copy=False).ravel() self.v = ma.masked_invalid(V, copy=False).ravel() if C is not None: c = ma.masked_invalid(C, copy=False).ravel() x, y, u, v, c = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v, c) else: x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v) magnitude = np.sqrt(u * u + v * v) flags, barbs, halves, empty = self._find_tails(magnitude, self.rounding, **self.barb_increments) plot_barbs = self._make_barbs( u, v, flags, barbs, halves, empty, self._length, self._pivot, self.sizes, self.fill_empty, self.flip ) self.set_verts(plot_barbs) if C is not None: self.set_array(c) xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis])) self._offsets = xy
def test_datetime(self): dates = [datetime(2008, 1, 1), datetime(2008, 1, 2), datetime(2008, 1, 3), datetime(2008, 1, 4), datetime(2008, 1, 5), datetime(2008, 1, 6)] a_masked = np.ma.array([1, 2, 3, np.nan, np.nan, 6], mask=[False, False, True, True, False, False]) actual = delete_masked_points(dates, a_masked) ind = [0, 1, 5] assert_array_equal(actual[0], np.array(dates)[ind]) assert_array_equal(actual[1], a_masked[ind].compressed())
def text_plot(ax, x, y, data, format='%.0f', loc=None, **kw): from matplotlib.cbook import delete_masked_points from matplotlib import transforms # Default to centered on point if loc is not None: x0, y0 = loc trans = ax.transData + transforms.Affine2D().translate(x0, y0) else: trans = ax.transData # Handle both callables and strings for format if is_string_like(format): formatter = lambda s: format % s else: formatter = format # Handle masked arrays x, y, data = delete_masked_points(x, y, data) # If there is nothing left after deleting the masked points, return None if not data.any(): return None # Make the TextCollection object texts = [formatter(d) for d in data] text_obj = TextCollection(x, y, texts, horizontalalignment='center', verticalalignment='center', clip_on=True, transform=trans, **kw) # Add it to the axes ax.add_artist(text_obj) # Update plot range minx = np.min(x) maxx = np.max(x) miny = np.min(y) maxy = np.max(y) w = maxx - minx h = maxy - miny # the pad is a little hack to deal with the fact that we don't # want to transform all the symbols whose scales are in points # to data coords to get the exact bounding box for efficiency # reasons. It can be done right if this is deemed important padx, pady = 0.05 * w, 0.05 * h corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) ax.update_datalim(corners) ax.autoscale_view() return text_obj
def set_offsets(self, xy): """ Set the offsets for the barb polygons. This saves the offets passed in and actually sets version masked as appropriate for the existing U/V data. *offsets* should be a sequence. ACCEPTS: sequence of pairs of floats """ self.x = xy[:, 0] self.y = xy[:, 1] x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v) xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis])) collections.PolyCollection.set_offsets(self, xy)
def text_plot(ax, x, y, data, format='%.0f', loc=None, **kw): from matplotlib.cbook import delete_masked_points from matplotlib import transforms # Default to centered on point if loc is not None: x0,y0 = loc trans = ax.transData + transforms.Affine2D().translate(x0, y0) else: trans = ax.transData # Handle both callables and strings for format if is_string_like(format): formatter = lambda s: format % s else: formatter = format # Handle masked arrays x,y,data = delete_masked_points(x, y, data) # If there is nothing left after deleting the masked points, return None if not data.any(): return None # Make the TextCollection object texts = [formatter(d) for d in data] text_obj = TextCollection(x, y, texts, horizontalalignment='center', verticalalignment='center', clip_on=True, transform=trans, **kw) # Add it to the axes ax.add_artist(text_obj) # Update plot range minx = np.min(x) maxx = np.max(x) miny = np.min(y) maxy = np.max(y) w = maxx - minx h = maxy - miny # the pad is a little hack to deal with the fact that we don't # want to transform all the symbols whose scales are in points # to data coords to get the exact bounding box for efficiency # reasons. It can be done right if this is deemed important padx, pady = 0.05*w, 0.05*h corners = (minx-padx, miny-pady), (maxx+padx, maxy+pady) ax.update_datalim(corners) ax.autoscale_view() return text_obj
def set_offsets(self, xy): """ Set the offsets for the barb polygons. This saves the offsets passed in and actually sets version masked as appropriate for the existing U/V data. *offsets* should be a sequence. ACCEPTS: sequence of pairs of floats """ self.x = xy[:, 0] self.y = xy[:, 1] x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u, self.v) _check_consistent_shapes(x, y, u, v) xy = np.column_stack((x, y)) mcollections.PolyCollection.set_offsets(self, xy) self.stale = True
def set_offsets(self, xy): """ Set the offsets for the barb polygons. This saves the offsets passed in and masks them as appropriate for the existing U/V data. Parameters ---------- xy : sequence of pairs of floats """ self.x = xy[:, 0] self.y = xy[:, 1] x, y, u, v = cbook.delete_masked_points( self.x.ravel(), self.y.ravel(), self.u, self.v) _check_consistent_shapes(x, y, u, v) xy = np.column_stack((x, y)) mcollections.PolyCollection.set_offsets(self, xy) self.stale = True
def scattertext(self, x, y, texts, loc=(0, 0), **kw): """Add text to the axes. Add text in string `s` to axis at location `x`, `y`, data coordinates. Parameters ---------- x, y : array_like, shape (n, ) Input positions texts : array_like, shape (n, ) Collection of text that will be plotted at each (x,y) location loc : length-2 tuple Offset (in screen coordinates) from x,y position. Allows positioning text relative to original point. Other parameters ---------------- kwargs : `~matplotlib.text.TextCollection` properties. Other miscellaneous text parameters. Examples -------- Individual keyword arguments can be used to override any given parameter:: >>> scattertext(x, y, texts, fontsize=12) The default setting to to center the text at the specified x,y locations in data coordinates, and to take the data and format as float without any decimal places. The example below places the text above and to the right by 10 pixels, with 2 decimal places:: >>> scattertext([0.25, 0.75], [0.25, 0.75], [0.5, 1.0], ... loc=(10, 10)) """ # Start with default args and update from kw new_kw = { 'verticalalignment': 'center', 'horizontalalignment': 'center', 'transform': self.transData, 'clip_on': False} new_kw.update(kw) # Default to centered on point--special case it to keep transform # simpler. # t = new_kw['transform'] # if loc == (0, 0): # trans = t # else: # x0, y0 = loc # trans = t + mtransforms.Affine2D().translate(x0, y0) # new_kw['transform'] = trans # Handle masked arrays x, y, texts = cbook.delete_masked_points(x, y, texts) # If there is nothing left after deleting the masked points, return None if x.size == 0: return None # Make the TextCollection object text_obj = TextCollection(x, y, texts, offset=loc, **new_kw) # The margin adjustment is a hack to deal with the fact that we don't # want to transform all the symbols whose scales are in points # to data coords to get the exact bounding box for efficiency # reasons. It can be done right if this is deemed important. # Also, only bother with this padding if there is anything to draw. if self._xmargin < 0.05: self.set_xmargin(0.05) if self._ymargin < 0.05: self.set_ymargin(0.05) # Add it to the axes and update range self.add_artist(text_obj) self.update_datalim(text_obj.get_datalim(self.transData)) self.autoscale_view() return text_obj
def test_bad_first_arg(self): with pytest.raises(ValueError): delete_masked_points('a string', np.arange(1.0, 7.0))
def scattertext(self, x, y, texts, loc=(0, 0), **kw): """Add text to the axes. Add text in string `s` to axis at location `x`, `y`, data coordinates. Parameters ---------- x, y : array_like, shape (n, ) Input positions texts : array_like, shape (n, ) Collection of text that will be plotted at each (x,y) location loc : length-2 tuple Offset (in screen coordinates) from x,y position. Allows positioning text relative to original point. Other Parameters ---------------- kwargs : `~matplotlib.text.TextCollection` properties. Other miscellaneous text parameters. Examples -------- Individual keyword arguments can be used to override any given parameter:: >>> ax = plt.gca() >>> ax.scattertext([0.25, 0.75], [0.25, 0.75], ['aa', 'bb'], ... fontsize=12) #doctest: +ELLIPSIS TextCollection The default setting to to center the text at the specified x, y locations in data coordinates. The example below places the text above and to the right by 10 pixels:: >>> ax = plt.gca() >>> ax.scattertext([0.25, 0.75], [0.25, 0.75], ['aa', 'bb'], ... loc=(10, 10)) #doctest: +ELLIPSIS TextCollection """ # Start with default args and update from kw new_kw = { 'verticalalignment': 'center', 'horizontalalignment': 'center', 'transform': self.transData, 'clip_on': False } new_kw.update(kw) # Default to centered on point--special case it to keep transform # simpler. # t = new_kw['transform'] # if loc == (0, 0): # trans = t # else: # x0, y0 = loc # trans = t + mtransforms.Affine2D().translate(x0, y0) # new_kw['transform'] = trans # Handle masked arrays x, y, texts = cbook.delete_masked_points(x, y, texts) # If there is nothing left after deleting the masked points, return None if x.size == 0: return None # Make the TextCollection object text_obj = TextCollection(x, y, texts, offset=loc, **new_kw) # The margin adjustment is a hack to deal with the fact that we don't # want to transform all the symbols whose scales are in points # to data coords to get the exact bounding box for efficiency # reasons. It can be done right if this is deemed important. # Also, only bother with this padding if there is anything to draw. if self._xmargin < 0.05: self.set_xmargin(0.05) if self._ymargin < 0.05: self.set_ymargin(0.05) # Add it to the axes and update range self.add_artist(text_obj) self.update_datalim(text_obj.get_datalim(self.transData)) self.autoscale_view() return text_obj
def rectbin(x, y, C=None, gridsize=100, bins=None, xscale='linear', yscale='linear', extent=None, cmap=None, norm=None, vmin=None, vmax=None, alpha=None, linewidths=None, edgecolors='none', reduce_C_function=np.mean, mincnt=None, marginals=False, **kwargs): """ Make a rectangular binning plot. Created by NL 20161012, copied exactly then slightly adapted from matplotlib's hexbin method. Call signature:: rectbin(x, y, C = None, gridsize = 100, bins = None, xscale = 'linear', yscale = 'linear', cmap=None, norm=None, vmin=None, vmax=None, alpha=None, linewidths=None, edgecolors='none' reduce_C_function = np.mean, mincnt=None, marginals=True **kwargs) Make a rectangular binning plot of *x* versus *y*, where *x*, *y* are 1-D sequences of the same length, *N*. If *C* is *None* (the default), this is a histogram of the number of occurences of the observations at (x[i],y[i]). If *C* is specified, it specifies values at the coordinate (x[i],y[i]). These values are accumulated for each rectangular bin and then reduced according to *reduce_C_function*, which defaults to numpy's mean function (np.mean). (If *C* is specified, it must also be a 1-D sequence of the same length as *x* and *y*.) *x*, *y* and/or *C* may be masked arrays, in which case only unmasked points will be plotted. Optional keyword arguments: *gridsize*: [ 100 | integer ] The number of hexagons in the *x*-direction, default is 100. The corresponding number of hexagons in the *y*-direction is chosen such that the hexagons are approximately regular. Alternatively, gridsize can be a tuple with two elements specifying the number of hexagons in the *x*-direction and the *y*-direction. *bins*: [ *None* | 'log' | integer | sequence ] If *None*, no binning is applied; the color of each hexagon directly corresponds to its count value. If 'log', use a logarithmic scale for the color map. Internally, :math:`log_{10}(i+1)` is used to determine the hexagon color. If an integer, divide the counts in the specified number of bins, and color the hexagons accordingly. If a sequence of values, the values of the lower bound of the bins to be used. *xscale*: [ 'linear' | 'log' ] Use a linear or log10 scale on the horizontal axis. *scale*: [ 'linear' | 'log' ] Use a linear or log10 scale on the vertical axis. *mincnt*: [ *None* | a positive integer ] If not *None*, only display cells with more than *mincnt* number of points in the cell *marginals*: [ *True* | *False* ] if marginals is *True*, plot the marginal density as colormapped rectagles along the bottom of the x-axis and left of the y-axis *extent*: [ *None* | scalars (left, right, bottom, top) ] The limits of the bins. The default assigns the limits based on gridsize, x, y, xscale and yscale. """ ax = plt.gca() if not ax._hold: ax.cla() #if not self._hold: # self.cla() ax._process_unit_info(xdata=x, ydata=y, kwargs=kwargs) x, y, C = cbook.delete_masked_points(x, y, C) # Set the size of the hexagon grid if iterable(gridsize): nx, ny = gridsize else: nx = gridsize ny = int(nx / math.sqrt(3)) # Count the number of data in each hexagon x = np.array(x, float) y = np.array(y, float) if xscale == 'log': if np.any(x <= 0.0): raise ValueError("x contains non-positive values, so can not" " be log-scaled") x = np.log10(x) if yscale == 'log': if np.any(y <= 0.0): raise ValueError("y contains non-positive values, so can not" " be log-scaled") y = np.log10(y) if extent is not None: xmin, xmax, ymin, ymax = extent else: xmin, xmax = (np.amin(x), np.amax(x)) if len(x) else (0, 1) ymin, ymax = (np.amin(y), np.amax(y)) if len(y) else (0, 1) # to avoid issues with singular data, expand the min/max pairs xmin, xmax = mtrans.nonsingular(xmin, xmax, expander=0.1) ymin, ymax = mtrans.nonsingular(ymin, ymax, expander=0.1) # In the x-direction, the hexagons exactly cover the region from # xmin to xmax. Need some padding to avoid roundoff errors. xpadding = 1.e-9 * (xmax - xmin) xmin -= xpadding xmax += xpadding ypadding = 1.e-9 * (ymax - ymin) ymin -= ypadding ymax += ypadding sx = (xmax - xmin) / nx sy = (ymax - ymin) / ny if marginals: xorig = x.copy() yorig = y.copy() x = (x - xmin) / sx y = (y - ymin) / sy ix1 = np.round(x).astype(int) iy1 = np.round(y).astype(int) nx1 = nx + 1 ny1 = ny + 1 nx2 = nx ny2 = ny nsq = nx1 * ny1 # total number of rectangular bins if we are doing rectangles. Do 1+ the desired number of bins just to make sure we cover all the edges if C is None: # Create appropriate views into "accum" array. accumsq = np.zeros(nsq) lattice.shape = (nx1, ny1) for i in xrange(len(x)): if ((ix1[i] >= 0) and (ix1[i] < nx1) and (iy1[i] >= 0) and (iy1[i] < ny1)): lattice[ix1[i], iy1[i]] += 1 # threshold if mincnt is not None: for i in xrange(nx1): for j in xrange(ny1): if lattice[i, j] < mincnt: lattice[i, j] = np.nan accum = lattice.astype(float).ravel() good_idxs_sq = ~np.isnan(accum) else: if mincnt is None: mincnt = 0 # create accumulation arrays lattice = np.empty((nx1, ny1), dtype=object) for i in xrange(nx1): for j in xrange(ny1): lattice[i, j] = [] for i in xrange(len(x)): if ((ix1[i] >= 0) and (ix1[i] < nx1) and (iy1[i] >= 0) and (iy1[i] < ny1)): lattice[ix1[i], iy1[i]].append(C[i]) for i in xrange(nx1): for j in xrange(ny1): vals = lattice[i, j] if len(vals) > mincnt: lattice[i, j] = reduce_C_function(vals) else: lattice[i, j] = np.nan accum = lattice.astype(float).ravel() good_idxs_sq = ~np.isnan(accum) offset = np.zeros((nsq, 2), float) offset[:, 0] = np.repeat(np.arange(nx1), ny1) offset[:, 1] = np.tile(np.arange(ny1), nx1) offset[:, 0] *= sx offset[:, 1] *= sy offset[:, 0] += xmin offset[:, 1] += ymin # remove accumulation bins with no data offset = offset[good_idxs_sq, :] accum = accum[good_idxs_sq] polygon = np.zeros((4, 2), float) polygon[:, 0] = sx * np.array([0.5, 0.5, -0.5, -0.5]) polygon[:, 1] = sy * np.array([-0.5, 0.5, 0.5, -0.5]) if edgecolors == 'none': edgecolors = 'face' if xscale == 'log' or yscale == 'log': polygons = np.expand_dims(polygon, 0) + np.expand_dims(offsets_sq, 1) if xscale == 'log': polygons[:, :, 0] = 10.0**polygons[:, :, 0] xmin = 10.0**xmin xmax = 10.0**xmax self.set_xscale(xscale) if yscale == 'log': polygons[:, :, 1] = 10.0**polygons[:, :, 1] ymin = 10.0**ymin ymax = 10.0**ymax self.set_yscale(yscale) collection = mcoll.PolyCollection( polygons, edgecolors=edgecolors, linewidths=linewidths, ) else: collection = mcoll.PolyCollection( [polygon], edgecolors=edgecolors, linewidths=linewidths, offsets=offset, transOffset=mtransforms.IdentityTransform(), offset_position="data") if isinstance(norm, mcolors.LogNorm): if (accum == 0).any(): # make sure we have not zeros accum += 1 # autoscale the norm with curren accum values if it hasn't # been set if norm is not None: if norm.vmin is None and norm.vmax is None: norm.autoscale(accum) # Transform accum if needed if bins == 'log': accum = np.log10(accum + 1) elif bins is not None: if not iterable(bins): minimum, maximum = min(accum), max(accum) bins -= 1 # one less edge than bins bins = minimum + (maximum - minimum) * np.arange(bins) / bins bins = np.sort(bins) accum = bins.searchsorted(accum) if norm is not None and not isinstance(norm, mcolors.Normalize): msg = "'norm' must be an instance of 'mcolors.Normalize'" raise ValueError(msg) collection.set_array(accum) collection.set_cmap(cmap) collection.set_norm(norm) collection.set_alpha(alpha) collection.update(kwargs) if vmin is not None or vmax is not None: collection.set_clim(vmin, vmax) else: collection.autoscale_None() corners = ((xmin, ymin), (xmax, ymax)) ax.update_datalim(corners) ax.autoscale_view(tight=True) # add the collection last ax.add_collection(collection, autolim=False) if not marginals: return collection if C is None: C = np.ones(len(x)) def coarse_bin(x, y, coarse): ind = coarse.searchsorted(x).clip(0, len(coarse) - 1) mus = np.zeros(len(coarse)) for i in range(len(coarse)): yi = y[ind == i] if len(yi) > 0: mu = reduce_C_function(yi) else: mu = np.nan mus[i] = mu return mus coarse = np.linspace(xmin, xmax, gridsize) xcoarse = coarse_bin(xorig, C, coarse) valid = ~np.isnan(xcoarse) verts, values = [], [] for i, val in enumerate(xcoarse): thismin = coarse[i] if i < len(coarse) - 1: thismax = coarse[i + 1] else: thismax = thismin + np.diff(coarse)[-1] if not valid[i]: continue verts.append([(thismin, 0), (thismin, 0.05), (thismax, 0.05), (thismax, 0)]) values.append(val) values = np.array(values) trans = ax.get_xaxis_transform(which='grid') hbar = mcoll.PolyCollection(verts, transform=trans, edgecolors='face') hbar.set_array(values) hbar.set_cmap(cmap) hbar.set_norm(norm) hbar.set_alpha(alpha) hbar.update(kwargs) ax.add_collection(hbar, autolim=False) coarse = np.linspace(ymin, ymax, gridsize) ycoarse = coarse_bin(yorig, C, coarse) valid = ~np.isnan(ycoarse) verts, values = [], [] for i, val in enumerate(ycoarse): thismin = coarse[i] if i < len(coarse) - 1: thismax = coarse[i + 1] else: thismax = thismin + np.diff(coarse)[-1] if not valid[i]: continue verts.append([(0, thismin), (0.0, thismax), (0.05, thismax), (0.05, thismin)]) values.append(val) values = np.array(values) trans = ax.get_yaxis_transform(which='grid') vbar = mcoll.PolyCollection(verts, transform=trans, edgecolors='face') vbar.set_array(values) vbar.set_cmap(cmap) vbar.set_norm(norm) vbar.set_alpha(alpha) vbar.update(kwargs) ax.add_collection(vbar, autolim=False) collection.hbar = hbar collection.vbar = vbar def on_changed(collection): hbar.set_cmap(collection.get_cmap()) hbar.set_clim(collection.get_clim()) vbar.set_cmap(collection.get_cmap()) vbar.set_clim(collection.get_clim()) collection.callbacksSM.connect('changed', on_changed) return collection
def scatter(self, xs, ys, zs=0, zdir='z', s=20, c='b', *args, **kwargs): ''' Create a scatter plot. ========== ========================================================== Argument Description ========== ========================================================== *xs*, *ys* Positions of data points. *zs* Either an array of the same length as *xs* and *ys* or a single value to place all points in the same plane. Default is 0. *zdir* Which direction to use as z ('x', 'y' or 'z') when plotting a 2d set. *s* size in points^2. It is a scalar or an array of the same length as *x* and *y*. *c* a color. *c* can be a single color format string, or a sequence of color specifications of length *N*, or a sequence of *N* numbers to be mapped to colors using the *cmap* and *norm* specified via kwargs (see below). Note that *c* should not be a single numeric RGB or RGBA sequence because that is indistinguishable from an array of values to be colormapped. *c* can be a 2-D array in which the rows are RGB or RGBA, however. ========== ========================================================== Keyword arguments are passed on to :func:`~matplotlib.axes.Axes.scatter`. Returns a :class:`~mpl_toolkits.mplot3d.art3d.Patch3DCollection` ''' had_data = self.has_data() xs = np.ma.ravel(xs) ys = np.ma.ravel(ys) zs = np.ma.ravel(zs) if xs.size != ys.size: raise ValueError("x and y must be the same size") if xs.size != zs.size and zs.size == 1: zs = np.array(zs[0] * xs.size) s = np.ma.ravel(s) # This doesn't have to match x, y in size. cstr = cbook.is_string_like(c) or cbook.is_sequence_of_strings(c) if not cstr: c = np.asanyarray(c) if c.size == xs.size: c = np.ma.ravel(c) xs, ys, zs, s, c = cbook.delete_masked_points(xs, ys, zs, s, c) patches = Axes.scatter(self, xs, ys, s=s, c=c, *args, **kwargs) if not cbook.iterable(zs): is_2d = True zs = np.ones(len(xs)) * zs else: is_2d = False art3d.patch_collection_2d_to_3d(patches, zs=zs, zdir=zdir) #FIXME: why is this necessary? if not is_2d: self.auto_scale_xyz(xs, ys, zs, had_data) return patches
def hexplot(axis, x, y, z, extent=None, cmap=None, norm=None, vmin=None, vmax=None, alpha=None, linewidths=None, edgecolors='none', **kwargs): if not axis._hold: axis.cla() axis._process_unit_info(xdata=x, ydata=y, kwargs=kwargs) x, y, z = cbook.delete_masked_points(x, y, z) x = np.array(x, float) y = np.array(y, float) # hardcoded sx = (2 * 0.465) * 0.99 sy = (2 * 0.268) * 0.99 if extent is not None: xmin, xmax, ymin, ymax = extent else: xmin, xmax = (np.amin(x-sx), np.amax(x+sx)) if len(x) else (0, 1) ymin, ymax = (np.amin(y-sy), np.amax(y+sy)) if len(y) else (0, 1) # to avoid issues with singular data, expand the min/max pairs xmin, xmax = mtrans.nonsingular(xmin, xmax, expander=0.1) ymin, ymax = mtrans.nonsingular(ymin, ymax, expander=0.1) padding = 1.e-9 * (xmax - xmin) xmin -= padding xmax += padding n = len(x) polygon = np.zeros((6, 2), float) polygon[:, 0] = sx * np.array([-0.5, 0.5, 1.0, 0.5, -0.5, -1.0]) / 3.0 polygon[:, 1] = sy * np.array([0.5, 0.5, 0.0, -0.5, -0.5, 0.0]) #S = math.sqrt(3) / 2 #polygon[:, 0] = sx * np.array([-0.5, 0.5, 1.0, 0.5, -0.5, -1.0]) #polygon[:, 1] = sy * np.array([S, S, 0.0, -S, -S, 0.0]) offsets = np.zeros((n, 2), float) offsets[:, 0] = x offsets[:, 1] = y collection = mcoll.PolyCollection( [polygon], edgecolors=edgecolors, linewidths=linewidths, offsets=offsets, transOffset=mtransforms.IdentityTransform(), offset_position="data" ) if isinstance(norm, mcolors.LogNorm): if (z == 0).any(): # make sure we have not zeros z += 1 if norm is not None: if norm.vmin is None and norm.vmax is None: norm.autoscale(z) if norm is not None and not isinstance(norm, mcolors.Normalize): msg = "'norm' must be an instance of 'mcolors.Normalize'" raise ValueError(msg) collection.set_array(z) collection.set_cmap(cmap) collection.set_norm(norm) collection.set_alpha(alpha) collection.update(kwargs) if vmin is not None or vmax is not None: collection.set_clim(vmin, vmax) else: collection.autoscale_None() corners = ((xmin, ymin), (xmax, ymax)) axis.update_datalim(corners) axis.autoscale_view(tight=True) # add the collection last axis.add_collection(collection, autolim=False) return collection