def fill_between(self, x, y1, y2=0, where=None, **kwargs): """ Make filled polygons between two curves (y1 and y2) where ``where==True``. :param x: (*array_like*) An N-length array of the x data. :param y1: (*array_like*) An N-length array (or scalar) of the y data. :param y2: (*array_like*) An N-length array (or scalar) of the y data. :param where: (*array_like*) If None, default to fill between everywhere. If not None, it is an N-length boolean array and the fill will only happen over the regions where ``where==True``. """ #Get dataset global gca #Add data series label = kwargs.pop('label', 'S_0') dn = len(x) xdata = plotutil.getplotdata(x) if isinstance(y1, (int, long, float)): yy = [] for i in range(dn): yy.append(y1) y1 = np.array(yy).array else: y1 = plotutil.getplotdata(y1) if isinstance(y2, (int, long, float)): yy = [] for i in range(dn): yy.append(y2) y2 = np.array(yy).array else: y2 = plotutil.getplotdata(y2) if not where is None: if isinstance(where, (tuple, list)): where = np.array(where) where = where.asarray() #Set plot data styles if not 'fill' in kwargs: kwargs['fill'] = True if not 'edge' in kwargs: kwargs['edge'] = False pb, isunique = plotutil.getlegendbreak('polygon', **kwargs) pb.setCaption(label) #Create graphics offset = kwargs.pop('offset', 0) zdir = kwargs.pop('zdir', 'z') if zdir == 'xy': y = kwargs.pop('y', x) ydata = plotutil.getplotdata(y) graphics = GraphicFactory.createFillBetweenPolygons(xdata, ydata, y1, y2, where, pb, \ offset, zdir) else: graphics = GraphicFactory.createFillBetweenPolygons(xdata, y1, y2, where, pb, \ offset, zdir) visible = kwargs.pop('visible', True) if visible: self.add_graphic(graphics) return graphics
def run(self, context): # Turn raw python array into ndarray for easier math if self.norm_device: data = np.array(context.getData(self.pos_device, self.sig_device, self.norm_device)) x = data[0] y = data[1] n = data[2] print "x = ", x print "y = ", y print "n = ", n print "norm: ", self.norm_value y = y * float(self.norm_value) / n context.logData("normalized", y.nda) else: data = np.array(context.getData(self.pos_device, self.sig_device)) x = data[0] y = data[1] print "x = ", x print "y = ", y # Compute fit g = Gaussian.fromCentroid(x, y) print g fit = g.values(x) # Log the 'fit' data for later comparison with raw data context.logData("fit", fit.nda) # Set PVs with result context.write(self.pv_pos, g.center) context.write(self.pv_height, g.height) context.write(self.pv_width, g.width)
def inpolygon(x, y, polygon): ''' Check if x/y points are inside a polygon or not. :param x: (*array_like*) X coordinate of the points. :param y: (*array_like*) Y coordinate of the points. :param polygon: (*PolygonShape list*) The polygon list. :returns: (*boolean array*) Inside or not. ''' if isinstance(x, numbers.Number): return GeoComputation.pointInPolygon(polygon, x, y) if isinstance(x, (list, tuple)): x = np.array(x) if isinstance(y, (list, tuple)): y = np.array(y) if isinstance(polygon, tuple): x_p = polygon[0] y_p = polygon[1] if isinstance(x_p, (list, tuple)): x_p = np.array(x_p) if isinstance(y_p, (list, tuple)): y_p = np.array(y_p) return np.NDArray(GeoComputation.inPolygon(x._array, y._array, x_p._array, y_p._array)) else: if isinstance(polygon, MILayer): polygon = polygon.shapes() elif isinstance(polygon, PolygonShape): polygon = [polygon] return np.NDArray(GeoComputation.inPolygon(x._array, y._array, polygon))
def upcast(*args): """Returns the nearest supported sparse dtype for the combination of one or more types. upcast(t0, t1, ..., tn) -> T where T is a supported dtype Examples -------- >>> upcast('int32') <type 'numpy.int32'> >>> upcast('bool') <type 'numpy.int8'> >>> upcast('int32','float32') <type 'numpy.float64'> >>> upcast('bool',complex,float) <type 'numpy.complex128'> """ sample = np.array([0],dtype=args[0]) for t in args[1:]: sample = sample + np.array([0],dtype=t) upcast = sample.dtype for t in supported_dtypes: if np.can_cast(sample.dtype,t): return t raise TypeError,'no supported conversion for types: %s' % args
def testFIR(self): smooth = np.array([0.25, 0.5, 0.25]) d = np.arange(10) filtered = fir(d, smooth, zero_edge=False) print d, '--', smooth, '(keep edge) -->', filtered self.assertTrue(np.all( filtered == d)) filtered = fir(d, smooth, zero_edge=True) print d, '--', smooth, '-->', filtered self.assertEquals(0, filtered[9]) d = np.zeros(10) d[5] = 10 filtered = fir(d, smooth) print d, '--', smooth, '-->', filtered self.assertEquals(5.0, filtered[5]) d = np.zeros(10) d[5:10] = np.ones(5) filtered = fir(d, smooth) print d, '--', smooth, '-->', filtered self.assertEquals(0.0, filtered[3]) self.assertEquals(0.25, filtered[4]) self.assertEquals(0.75, filtered[5]) self.assertEquals(1.0, filtered[6]) diff = np.array([-0.5, 0, 0.5]) filtered = fir(d, diff) print d, '--', diff, '-->', filtered
def run(self, context): # Turn raw python array into ndarray for easier math if self.norm_device: data = np.array( context.getData(self.pos_device, self.sig_device, self.norm_device)) x = data[0] y = data[1] n = data[2] print "x = ", x print "y = ", y print "n = ", n print "norm: ", self.norm_value y = y * float(self.norm_value) / n context.logData("normalized", y.nda) else: data = np.array(context.getData(self.pos_device, self.sig_device)) x = data[0] y = data[1] print "x = ", x print "y = ", y # Compute fit g = Gaussian.fromCentroid(x, y) print g fit = g.values(x) # Log the 'fit' data for later comparison with raw data context.logData("fit", fit.nda) # Set PVs with result context.write(self.pv_pos, g.center) context.write(self.pv_height, g.height) context.write(self.pv_width, g.width)
def __init__(self, data=None, index=None, columns=None, dataframe=None): if dataframe is None: if not data is None: if isinstance(data, dict): columns = data.keys() dlist = [] n = 1 for v in data.values(): if isinstance(v, (list, tuple)): n = len(v) v = np.array(v) elif isinstance(v, NDArray): n = len(v) dlist.append(v) for i in range(len(dlist)): d = dlist[i] if not isinstance(d, NDArray): d = [d] * n d = np.array(d) dlist[i] = d data = dlist if isinstance(data, NDArray): n = len(data) data = data._array else: dlist = [] n = len(data[0]) for dd in data: dlist.append(dd._array) data = dlist if index is None: index = range(0, n) else: if n != len(index): raise ValueError('Wrong length of index!') if isinstance(index, (NDArray, DimArray)): index = index.tolist() if isinstance(index, Index): self._index = index else: self._index = Index.factory(index) if data is None: self._dataframe = MIDataFrame(self._index._index) else: self._dataframe = MIDataFrame(data, self._index._index, columns) else: self._dataframe = dataframe self._index = Index.factory(index=self._dataframe.getIndex())
def getplotdata(data): if isinstance(data, np.NDArray): return data.asarray() elif isinstance(data, (list, tuple)): if isinstance(data[0], datetime.datetime): dd = [] for d in data: v = np.miutil.date2num(d) dd.append(v) return np.array(dd)._array else: return np.array(data)._array else: return np.array([data])._array
def __init__(self, data=None, index=None, name=None, series=None): ''' One-dimensional array with axis labels (including time series). :param data: (*array_like*) One-dimensional array data. :param index: (*list*) Data index list. Values must be unique and hashable, same length as data. :param name: (*string*) Series name. ''' if series is None: if isinstance(data, (list, tuple)): data = np.array(data) if index is None: index = range(0, len(data)) else: if len(data) != len(index): raise ValueError('Wrong length of index!') if isinstance(index, (NDArray, DimArray)): index = index.tolist() if isinstance(index, Index): self._index = index else: self._index = Index.factory(index) self._data = data self._series = MISeries(data._array, self._index._index, name) else: self._series = series self._data = NDArray(self._series.getData()) self._index = Index.factory(index=self._series.getIndex())
def run(self, context): print("ComputeAverage of %s into %s" % (self.data_pv, self.avg_pv)) # Fetching data for N channels returns N x samples data = np.array(context.getData(self.data_pv)) # Get values for the first (only) channel values = data[0] print("Data : " + str(values)) if len(values) > 0: avg = np.sum(values) / len(values) else: avg = np.nan print("Average: " + str(avg)) context.write(self.avg_pv, avg)
def insert(self, loc, column, value): ''' Insert column into DataFrame at specified location. :param loc: (*int*) Insertation index. :param column: (*string*) Label of inserted column. :param value: (*array_like*) Column values. ''' if isinstance(value, datetime.datetime): value = miutil.jdatetime(value) if isinstance(value, (list, tuple)): if isinstance(value[0], datetime.datetime): value = miutil.jdatetime(value) value = np.array(value) if isinstance(value, NDArray): value = value._array self._dataframe.addColumn(loc, column, value)
def fir(data, fir, zero_edge=True): """FIR filter @param data: Array data @param fir: FIR filter elements @param zero_edge: Set edge elements in result to 0, or preserve original data? @return: Filtered array """ L = len(fir) N = len(data) mid = int(len(fir)/2) if zero_edge: result = np.zeros(N) else: result = np.array(data) for i in range(mid, N-mid): # index into 'data' at left edge of filter i0 = i-mid # print fir, data[i0:(i0+L)], fir * data[i0:(i0+L)], sum(fir * data[i0:(i0+L)]) result[i] = sum(fir * data[i0:(i0+L)]) return result
def run(self, context): # print "\nWrite data for %s to %s" % (self.device, self.pv) if self.norm_device: data = np.array(context.getData(self.device, self.norm_device)) d = data[0] n = data[1] try: normed = d * float(self.norm_value) / n # Convert np.array back into Java array for 'write' data = array(normed, 'd') except: # On zero division use original data data = context.getData(self.device)[0] else: data = context.getData(self.device) data = data[0] # print "Data: ", data.__class__.__name__ try: context.write(self.pv, data, False) except Exception, e: print "WriteDataToPV(%s, %s) exception:" % (self.pv, data) print e
def ncwrite(fn, data, varname, dims=None, attrs=None, gattrs=None, largefile=False): """ Write a netCDF data file from an array. :param: fn: (*string*) netCDF data file path. :param data: (*array_like*) A numeric array variable of any dimensionality. :param varname: (*string*) Variable name. :param dims: (*list of dimensions*) Dimension list. :param attrs: (*dict*) Variable attributes. :param gattrs: (*dict*) Global attributes. :param largefile: (*boolean*) Create netCDF as large file or not. """ if dims is None: if isinstance(data, NDArray): dims = [] for s in data.shape: dimvalue = np.arange(s) dimname = 'dim' + str(len(dims)) dims.append(dimension(dimvalue, dimname)) else: dims = data.dims #New netCDF file ncfile = addfile(fn, 'c', largefile=largefile) #Add dimensions ncdims = [] for dim in dims: ncdims.append(ncfile.adddim(dim.getShortName(), dim.getLength())) #Add global attributes ncfile.addgroupattr('Conventions', 'CF-1.6') ncfile.addgroupattr('Tools', 'Created using meteothink') if not gattrs is None: for key in gattrs: ncfile.addgroupattr(key, gattrs[key]) #Add dimension variables dimvars = [] wdims = [] for dim, midim in zip(ncdims, dims): dimtype = midim.getDimType() dimname = dim.getShortName() if dimtype == DimensionType.T: var = ncfile.addvar(dimname, 'int', [dim]) var.addattr('units', 'hours since 1900-01-01 00:00:0.0') var.addattr('long_name', 'Time') var.addattr('standard_name', 'time') var.addattr('axis', 'T') tvar = var elif dimtype == DimensionType.Z: var = ncfile.addvar(dimname, 'float', [dim]) var.addattr('axis', 'Z') elif dimtype == DimensionType.Y: var = ncfile.addvar(dimname, 'float', [dim]) var.addattr('axis', 'Y') elif dimtype == DimensionType.X: var = ncfile.addvar(dimname, 'float', [dim]) var.addattr('axis', 'X') else: var = None if not var is None: dimvars.append(var) wdims.append(midim) #Add variable var = ncfile.addvar(varname, data.dtype, ncdims) if attrs is None: var.addattr('name', varname) else: for key in attrs: var.addattr(key, attrs[key]) #Create netCDF file ncfile.create() #Write variable data for dimvar, dim in zip(dimvars, wdims): if dim.getDimType() == DimensionType.T: sst = datetime.datetime(1900, 1, 1) tt = miutil.nums2dates(dim.getDimValue()) hours = [] for t in tt: hours.append((t - sst).total_seconds() // 3600) ncfile.write(dimvar, np.array(hours)) else: ncfile.write(dimvar, np.array(dim.getDimValue())) ncfile.write(var, data) #Close netCDF file ncfile.close()
def grads2nc(infn, outfn, big_endian=None, largefile=False): """ Convert GrADS data file to netCDF data file. :param infn: (*string*) Input GrADS data file name. :param outfn: (*string*) Output netCDF data file name. :param big_endian: (*boolean*) Is GrADS data big_endian or not. :param largefile: (*boolean*) Create netCDF as large file or not. """ #Open GrADS file f = addfile_grads(infn) if not big_endian is None: f.bigendian(big_endian) #New netCDF file ncfile = addfile(outfn, 'c', largefile=largefile) #Add dimensions dims = [] for dim in f.dimensions(): dims.append(ncfile.adddim(dim.getShortName(), dim.getLength())) xdim = f.finddim('X') ydim = f.finddim('Y') tdim = f.finddim('T') xnum = xdim.getLength() ynum = ydim.getLength() tnum = tdim.getLength() #Add global attributes ncfile.addgroupattr('Conventions', 'CF-1.6') for attr in f.attributes(): ncfile.addgroupattr(attr.getName(), attr.getValues()) #Add dimension variables dimvars = [] for dim in dims: dname = dim.getShortName() if dname == 'T': var = ncfile.addvar('time', 'int', [dim]) var.addattr('units', 'hours since 1900-01-01 00:00:0.0') var.addattr('long_name', 'Time') var.addattr('standard_name', 'time') var.addattr('axis', dname) tvar = var elif dname == 'Z': var = ncfile.addvar('level', 'float', [dim]) var.addattr('axis', dname) else: var = ncfile.addvar(dim.getShortName(), 'float', [dim]) if 'Z' in dname: var.addattr('axis', 'Z') else: var.addattr('axis', dname) dimvars.append(var) #Add variables variables = [] for var in f.variables(): print 'Variable: ' + var.getShortName() vdims = [] for vdim in var.getDimensions(): for dim in dims: if vdim.getShortName() == dim.getShortName(): vdims.append(dim) #print vdims nvar = ncfile.addvar(var.getShortName(), var.getDataType(), vdims) nvar.addattr('fill_value', -9999.0) for attr in var.getAttributes(): nvar.addattr(attr.getName(), attr.getValues()) variables.append(nvar) #Create netCDF file ncfile.create() #Write variable data for dimvar, dim in zip(dimvars, f.dimensions()): if dim.getShortName() != 'T': ncfile.write(dimvar, np.array(dim.getDimValue())) sst = datetime.datetime(1900, 1, 1) for t in range(0, tnum): st = f.gettime(t) print st.strftime('%Y-%m-%d %H:00') hours = (st - sst).total_seconds() // 3600 origin = [t] ncfile.write(tvar, np.array([hours]), origin=origin) for var in variables: print 'Variable: ' + var.name if var.ndim == 3: data = f[str(var.name)][t, :, :] data[data == np.nan] = -9999.0 origin = [t, 0, 0] shape = [1, ynum, xnum] data = data.reshape(shape) ncfile.write(var, data, origin=origin) else: znum = var.dims[1].getLength() for z in range(0, znum): data = f[str(var.name)][t, z, :, :] data[data == np.nan] = -9999.0 origin = [t, z, 0, 0] shape = [1, 1, ynum, xnum] data = data.reshape(shape) ncfile.write(var, data, origin=origin) #Close netCDF file ncfile.close() print 'Convert finished!'
def convert2nc(infn, outfn, version='netcdf3', writedimvar=False, largefile=False): """ Convert data file (Grib, HDF...) to netCDF data file. :param infn: (*string or DimDataFile*) Input data file (or file name). :param outfn: (*string*) Output netCDF data file name. :param writedimvar: (*boolean*) Write dimension variables or not. :param largefile: (*boolean*) Create netCDF as large file or not. """ if isinstance(infn, DimDataFile): f = infn else: #Open input data file f = addfile(infn) #New netCDF file ncfile = addfile(outfn, 'c', version=version, largefile=largefile) #Add dimensions dims = [] dimnames = [] for dim in f.dimensions(): dimname = dim.getShortName() if not dimname in dimnames: dims.append(ncfile.adddim(dimname, dim.getLength())) dimnames.append(dimname) #Add global attributes for attr in f.attributes(): ncfile.addgroupattr(attr.getName(), attr.getValues()) #Add dimension variables tvar = None if writedimvar: dimvars = [] for i in range(len(f.dimensions())): dim = f.dimensions()[i] dname = dim.getShortName() if dim.getDimType() == DimensionType.T: var = ncfile.addvar(dname, 'int', [dims[i]]) var.addattr('units', 'hours since 1900-01-01 00:00:0.0') var.addattr('long_name', 'Time') var.addattr('standard_name', 'time') var.addattr('axis', 'T') tvar = var elif dim.getDimType() == DimensionType.Z: var = ncfile.addvar(dname, 'float', [dims[i]]) var.addattr('long_name', 'Level') var.addattr('axis', 'Z') elif dim.getDimType() == DimensionType.Y: var = ncfile.addvar(dname, 'float', [dims[i]]) var.addattr('long_name', dname) var.addattr('axis', 'Y') elif dim.getDimType() == DimensionType.X: var = ncfile.addvar(dname, 'float', [dims[i]]) var.addattr('long_name', dname) var.addattr('axis', 'X') else: var = ncfile.addvar(dname, 'float', [dims[i]]) var.addattr('long_name', dname) var.addattr('axis', dname) dimvars.append(var) #Add variables variables = [] for var in f.variables(): #print 'Variable: ' + var.getShortName() if var.hasNullDimension(): continue if var.getDataType() == DataType.STRUCTURE: continue vdims = [] missdim = False for vdim in var.getDimensions(): isvalid = False for dim in dims: if dim.getShortName() == vdim.getShortName(): vdims.append(dim) isvalid = True break if not isvalid: missdim = True break if missdim: continue nvar = ncfile.addvar(var.getShortName(), var.getDataType(), vdims) for attr in var.getAttributes(): nvar.addattr(attr.getName(), attr.getValues()) variables.append(nvar) #Create netCDF file ncfile.create() #Write dimension variable data if writedimvar: for dimvar, dim in zip(dimvars, f.dimensions()): if dim.getDimType() != DimensionType.T: ncfile.write(dimvar, np.array(dim.getDimValue())) #Write time dimension variable data if writedimvar and not tvar is None: sst = datetime.datetime(1900, 1, 1) tnum = f.timenum() hours = [] for t in range(0, tnum): st = f.gettime(t) hs = (st - sst).total_seconds() // 3600 hours.append(hs) ncfile.write(tvar, np.array(hours)) #Write variable data for var in variables: print 'Variable: ' + var.name data = f[str(var.name)].read() ncfile.write(var, data) #Close netCDF file ncfile.close() print 'Convert finished!'
def stem(self, x, y, z, s=8, c='b', marker='o', alpha=None, linewidth=None, verts=None, **kwargs): """ Make a 3D scatter plot of x, y and z, where x, y and z are sequence like objects of the same lengths. :param x: (*array_like*) Input x data. :param y: (*array_like*) Input y data. :param z: (*array_like*) Input z data. :param s: (*int*) Size of points. :param c: (*Color*) Color of the points. Or z vlaues. :param alpha: (*int*) The alpha blending value, between 0 (transparent) and 1 (opaque). :param marker: (*string*) Marker of the points. :param label: (*string*) Label of the points series. :param levs: (*array_like*) Optional. A list of floating point numbers indicating the level points to draw, in increasing order. :returns: Points legend break. """ #Add data series label = kwargs.pop('label', 'S_0') xdata = plotutil.getplotdata(x) ydata = plotutil.getplotdata(y) zdata = plotutil.getplotdata(z) #Set plot data styles pb, isunique = plotutil.getlegendbreak('point', **kwargs) pb.setCaption(label) pstyle = plotutil.getpointstyle(marker) pb.setStyle(pstyle) bottom = kwargs.pop('bottom', 0) samestemcolor = kwargs.pop('samestemcolor', False) isvalue = False if len(c) > 1: if isinstance(c, (np.NDArray, DimArray)): isvalue = True elif isinstance(c[0], (int, long, float)): isvalue = True if isvalue: ls = kwargs.pop('symbolspec', None) if ls is None: if isinstance(c, (list, tuple)): c = np.array(c) levels = kwargs.pop('levs', None) if levels is None: levels = kwargs.pop('levels', None) if levels is None: cnum = kwargs.pop('cnum', None) if cnum is None: ls = plotutil.getlegendscheme([], c.min(), c.max(), **kwargs) else: ls = plotutil.getlegendscheme([cnum], c.min(), c.max(), **kwargs) else: ls = plotutil.getlegendscheme([levels], c.min(), c.max(), **kwargs) ls = plotutil.setlegendscheme_point(ls, **kwargs) if isinstance(s, int): for lb in ls.getLegendBreaks(): lb.setSize(s) else: n = len(s) for i in range(0, n): ls.getLegendBreaks()[i].setSize(s[i]) linefmt = kwargs.pop('linefmt', None) if linefmt is None: linefmt = PolylineBreak() linefmt.setColor(Color.black) else: linefmt = plotutil.getlegendbreak('line', **linefmt)[0] #Create graphics graphics = GraphicFactory.createStems3D(xdata, ydata, zdata, c.asarray(), \ ls, linefmt, bottom, samestemcolor) else: colors = plotutil.getcolors(c, alpha) pbs = [] if isinstance(s, int): pb.setSize(s) if len(colors) == 1: pb.setColor(colors[0]) pb.setOutlineColor(colors[0]) pbs.append(pb) else: n = len(colors) for i in range(0, n): npb = pb.clone() npb.setColor(colors[i]) npb.setOutlineColor(colors[i]) pbs.append(npb) else: n = len(s) if len(colors) == 1: pb.setColor(colors[0]) pb.setOutlineColor(colors[0]) for i in range(0, n): npb = pb.clone() npb.setSize(s[i]) pbs.append(npb) else: for i in range(0, n): npb = pb.clone() npb.setSize(s[i]) npb.setColor(colors[i]) npb.setOutlineColor(colors[i]) pbs.append(npb) linefmt = kwargs.pop('linefmt', None) if linefmt is None: linefmt = PolylineBreak() linefmt.setColor(colors[0]) else: linefmt = plotutil.getlegendbreak('line', **linefmt)[0] #Create graphics graphics = GraphicFactory.createStems3D(xdata, ydata, zdata, pbs, linefmt, \ bottom, samestemcolor) visible = kwargs.pop('visible', True) if visible: self.add_graphic(graphics[0]) self.add_graphic(graphics[1]) return graphics[0], graphics[1]
def plot(self, x, y, z, *args, **kwargs): """ Plot 3D lines and/or markers to the axes. *args* is a variable length argument, allowing for multiple *x, y* pairs with an optional format string. :param x: (*array_like*) Input x data. :param y: (*array_like*) Input y data. :param z: (*array_like*) Input z data. :param style: (*string*) Line style for plot. :returns: Legend breaks of the lines. The following format string characters are accepted to control the line style or marker: ========= =========== Character Description ========= =========== '-' solid line style '--' dashed line style '-.' dash-dot line style ':' dotted line style '.' point marker ',' pixel marker 'o' circle marker 'v' triangle_down marker '^' triangle_up marker '<' triangle_left marker '>' triangle_right marker 's' square marker 'p' pentagon marker '*' star marker 'x' x marker 'D' diamond marker ========= =========== The following color abbreviations are supported: ========= ===== Character Color ========= ===== 'b' blue 'g' green 'r' red 'c' cyan 'm' magenta 'y' yellow 'k' black ========= ===== """ xdata = plotutil.getplotdata(x) ydata = plotutil.getplotdata(y) zdata = plotutil.getplotdata(z) style = None if len(args) > 0: style = args[0] #Set plot data styles label = kwargs.pop('label', 'S_1') mvalues = kwargs.pop('mvalues', None) if mvalues is None: if style is None: line = plotutil.getlegendbreak('line', **kwargs)[0] line.setCaption(label) else: line = plotutil.getplotstyle(style, label, **kwargs) colors = kwargs.pop('colors', None) if not colors is None: colors = plotutil.getcolors(colors) cbs = [] for color in colors: cb = line.clone() cb.setColor(color) cbs.append(cb) else: ls = kwargs.pop('symbolspec', None) if ls is None: if isinstance(mvalues, (list, tuple)): mvalues = np.array(mvalues) levels = kwargs.pop('levs', None) if levels is None: levels = kwargs.pop('levels', None) if levels is None: cnum = kwargs.pop('cnum', None) if cnum is None: ls = plotutil.getlegendscheme([], mvalues.min(), mvalues.max(), **kwargs) else: ls = plotutil.getlegendscheme([cnum], mvalues.min(), mvalues.max(), **kwargs) else: ls = plotutil.getlegendscheme([levels], mvalues.min(), mvalues.max(), **kwargs) ls = plotutil.setlegendscheme_line(ls, **kwargs) #Add graphics if mvalues is None: if colors is None: graphics = GraphicFactory.createLineString3D( xdata, ydata, zdata, line) else: graphics = GraphicFactory.createLineString3D( xdata, ydata, zdata, cbs) else: mdata = plotutil.getplotdata(mvalues) graphics = GraphicFactory.createLineString3D( xdata, ydata, zdata, mdata, ls) visible = kwargs.pop('visible', True) if visible: self.add_graphic(graphics) return graphics
def set_data(self, value): value = np.array(value) self._dataframe.setData(value._array)
def set_values(self, value): self._data = np.array(value) self._series.setData(self._data._array)
def __setitem__(self, key, value): if isinstance(value, datetime.datetime): value = miutil.jdatetime(value) if isinstance(value, (list, tuple)): if isinstance(value[0], datetime.datetime): value = miutil.jdatetime(value) value = np.array(value) if isinstance(value, NDArray): value = value._array if isinstance(key, basestring): if isinstance(value, series.Series): value = value.values._array self._dataframe.setColumn(key, value) return hascolkey = True if isinstance(key, tuple): ridx = key[0] cidx = key[1] if isinstance(ridx, int) and isinstance(cidx, int): if ridx < 0: ridx = self.shape[0] + ridx if cidx < 0: cidx = self.shape[1] + cidx self._dataframe.setValue(ridx, cidx, value) return elif isinstance(ridx, int) and isinstance(cidx, basestring): if ridx < 0: ridx = self.shape[0] + ridx self._dataframe.setValue(ridx, cidx, value) return else: key = (key, slice(None)) hascolkey = False k = key[0] if isinstance(k, int): if k < 0: k = self.shape[0] + k rowkey = k elif isinstance(k, basestring): sidx = self._index.index(k) if sidx < 0: return None eidx = sidx step = 1 rowkey = Range(sidx, eidx, step) elif isinstance(k, slice): if isinstance(k.start, basestring): sidx = self._index.index(k.start) if sidx < 0: sidx = 0 else: sidx = 0 if k.start is None else k.start if sidx < 0: sidx = self.shape[0] + sidx if isinstance(k.stop, basestring): eidx = self._index.index(k.stop) if eidx < 0: eidx = self.shape[0] + eidx else: eidx = self.shape[0] - 1 if k.stop is None else k.stop - 1 if eidx < 0: eidx = self.shape[0] + eidx step = 1 if k.step is None else k.step rowkey = Range(sidx, eidx, step) elif isinstance(k, list): if isinstance(k[0], int): rowkey = k else: tlist = [] for tstr in k: idx = self._index.index(tstr) if idx >= 0: tlist.append(idx) rowkey = tlist else: return if not hascolkey: colkey = Range(0, self.shape[1] - 1, 1) else: k = key[1] if isinstance(k, int): sidx = k if sidx < 0: sidx = self.shape[1] + sidx eidx = sidx step = 1 colkey = Range(sidx, eidx, step) elif isinstance(k, slice): sidx = 0 if k.start is None else k.start if sidx < 0: sidx = self.shape[1] + sidx eidx = self.shape[1] - 1 if k.stop is None else k.stop - 1 if eidx < 0: eidx = self.shape[1] + eidx step = 1 if k.step is None else k.step colkey = Range(sidx, eidx, step) elif isinstance(k, list): if isinstance(k[0], int): colkey = k else: colkey = self.columns.indexOfName(k) elif isinstance(k, basestring): col = self.columns.indexOf(k) colkey = Range(col, col + 1, 1) else: return self._dataframe.setValues(rowkey, colkey, value)