def bparrotate(ctx, **kwargs): """Rotate an array parallel to the unit vectors of the magnetic field. For two arrays u and b, where b is the unit vector in the direction of the magnetic field, the operation is (u dot b_hat) b_hat. Note that the magnetic field is a three-component field, so the output is a new vector whose components are (u_{b_x}, u_{b_y}, u_{b_z}), i.e., the x, y, and z components of the vector u parallel to the magnetic field. """ vlog(ctx, 'Starting rotation parallel to magnetic field') pushChain(ctx, 'arrayBpar', **kwargs) data = ctx.obj['data'] # shortcut for a, rot in zip(data.iterator(kwargs['array']), data.iterator(kwargs['field'])): # Magnetic field is components 3, 4, & 5 in field array grid, outrot = diag.parrotate(a, rot, '3:6') # Create new GData structure with appropriate outtag and labels to store output. out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=a.meta) out.push(grid, outrot) data.add(out) #end data.deactivateAll(tag=kwargs['array']) data.deactivateAll(tag=kwargs['field']) vlog(ctx, 'Finishing rotation parallel to magnetic field')
def perprotate(ctx, **kwargs): """Rotate an array perpendicular to the unit vectors of a second array. For two arrays u and v, where v is the rotator, operation is u - (u dot v_hat) v_hat. """ vlog(ctx, 'Starting rotation perpendicular to rotator array') pushChain(ctx, 'rotarraypar', **kwargs) data = ctx.obj['data'] # shortcut for a, rot in zip(data.iterator(kwargs['array']), data.iterator(kwargs['rotator'])): grid, outrot = diag.perprotate(a, rot) # Create new GData structure with appropriate outtag and labels to store output. out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=a.meta) out.push(outrot, grid) data.add(out) #end data.deactivateAll(tag=kwargs['array']) data.deactivateAll(tag=kwargs['rotator']) vlog(ctx, 'Finishing rotation perpendicular to rotator array')
def bperprotate(ctx, **kwargs): """Rotate an array perpendicular to the unit vectors of the magnetic field. For two arrays u and b, where b is the unit vector in the direction of the magnetic field, the operation is u - (u dot b_hat) b_hat. """ vlog(ctx, 'Starting rotation perpendicular to magnetic field') pushChain(ctx, 'arrayBpar', **kwargs) data = ctx.obj['data'] # shortcut for a, rot in zip(data.iterator(kwargs['array']), data.iterator(kwargs['field'])): # Magnetic field is components 3, 4, & 5 in field array grid, outrot = diag.perprotate(a, rot, '3:6') # Create new GData structure with appropriate outtag and labels to store output. out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=a.meta) out.push(grid, outrot) data.add(out) #end data.deactivateAll(tag=kwargs['array']) data.deactivateAll(tag=kwargs['field']) vlog(ctx, 'Finishing rotation perpendicular to magnetic field')
def recovery(ctx, **kwargs): vlog(ctx, 'Starting recovery') pushChain(ctx, 'recovery', **kwargs) data = ctx.obj['data'] if kwargs['basistype'] is not None: if kwargs['basistype'] == 'ms' or kwargs['basistype'] == 'ns': basisType = 'serendipity' elif kwargs['basistype'] == 'mo': basisType = 'maximal-order' else: basisType = None #end for dat in data.iterator(kwargs['use']): dg = GInterpModal(dat, kwargs['polyorder'], basisType, kwargs['interp'], kwargs['periodic']) numNodes = dg.numNodes numComps = int(dat.getNumComps() / numNodes) #vlog(ctx, 'interplolate: interpolating dataset #{:d}'.format(s)) #dg.recovery(tuple(range(numComps)), stack=True) if kwargs['tag']: out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) grid, values = dg.recovery(0, kwargs['c1']) out.push(grid, values) data.add(out) else: dg.recovery(0, kwargs['c1'], overwrite=True) #end #end vlog(ctx, 'Finishing recovery')
def energetics(ctx, **kwargs): vlog(ctx, 'Starting energetics decomposition') pushChain(ctx, 'energetics', **kwargs) data = ctx.obj['data'] # shortcut for elc, ion, em in zip(data.iterator(kwargs['elc']), data.iterator(kwargs['ion']), data.iterator(kwargs['field'])): grid = em.getGrid() outEnergetics = np.zeros(em.getValues()[..., 0:7].shape) out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=em.meta) grid, outEnergetics = diag.energetics(elc, ion, em) out.push(grid, outEnergetics) data.add(out) #end data.deactivateAll(tag=kwargs['elc']) data.deactivateAll(tag=kwargs['ion']) data.deactivateAll(tag=kwargs['field']) vlog(ctx, 'Finishing energetics decomposition') #end
def interpolate(ctx, **kwargs): vlog(ctx, 'Starting interpolate') pushChain(ctx, 'interpolate', **kwargs) data = ctx.obj['data'] basisType = None isModal = None if kwargs['basistype'] is not None: if kwargs['basistype'] == 'ms': basisType = 'serendipity' isModal = True elif kwargs['basistype'] == 'ns': basisType = 'serendipity' isModal = False elif kwargs['basistype'] == 'mo': basisType = 'maximal-order' isModal = True elif kwargs['basistype'] == 'mt': basisType = 'tensor' isModal = True #end #end for dat in data.iterator(kwargs['use']): if kwargs['basistype'] is None and dat.meta['basisType'] is None: ctx.fail(click.style("ERROR in interpolate: no 'basistype' was specified and dataset {:s} does not have required metadata".format(dat.getLabel()), fg='red')) #end if isModal or dat.meta['isModal']: dg = GInterpModal(dat, kwargs['polyorder'], kwargs['basistype'], kwargs['interp'], kwargs['read']) else: dg = GInterpNodal(dat, kwargs['polyorder'], basisType, kwargs['interp'], kwargs['read']) #end numNodes = dg.numNodes numComps = int(dat.getNumComps() / numNodes) if not kwargs['new']: if kwargs['tag']: out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) grid, values = dg.interpolate(tuple(range(numComps))) out.push(grid, values) data.add(out) else: dg.interpolate(tuple(range(numComps)), overwrite=True) #end else: interpFn(dat, kwargs['polyorder']) #end #end vlog(ctx, 'Finishing interpolate')
def val2coord(ctx, **kwargs): """Given a dataset (typically a DynVector) selects columns from it to create new datasets. For example, you can choose say column 1 to be the X-axis of the new dataset and column 2 to be the Y-axis. Multiple columns can be choosen using range specifiers and as many datasets are then created. """ vlog(ctx, 'Starting val2coord') pushChain(ctx, 'val2coord', **kwargs) data = ctx.obj['data'] activeSets = [] colors = ['C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9'] tags = list(data.tagIterator()) outTag = kwargs['tag'] if outTag is None: if len(tags) == 1: outTag = tags[0] else: outTag = 'val2coord' #end #end for setIdx, dat in data.iterator(kwargs['use'], enum=True): values = dat.getValues() xComps = _getRange(kwargs['x'], len(values[0, :])) yComps = _getRange(kwargs['y'], len(values[0, :])) if len(xComps) > 1 and len(xComps) != len(yComps): click.echo(click.style("ERROR 'val2coord': Length of the x-components ({:d}) is greater than 1 and not equal to the y-components ({:d}).".format(len(xComps), len(yComps)), fg='red')) ctx.exit() #end for i, yc in enumerate(yComps): if len(xComps) > 1: xc = xComps[i] else: xc = xComps[0] #end x = values[..., xc] y = values[..., yc, np.newaxis] out = Data(tag=outTag, label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) out.push([x], y) out.color = 'C0' data.add(out) #end dat.deactivate() #end vlog(ctx, 'Finishing val2coord')
def current(ctx, **kwargs): vlog(ctx, 'Starting current accumulation') pushChain(ctx, 'current', **kwargs) data = ctx.obj['data'] for dat in data.iterator(kwargs['use']): grid = dat.getGrid() outcurrent = np.zeros(dat.getValues().shape) grid, outcurrent = diag.accumulate_current(dat, kwargs['qbym']) dat.deactivate() out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=dat.meta) out.push(grid, outcurrent) data.add(out) #end vlog(ctx, 'Finishing current accumulation')
def integrate(ctx, **kwargs): vlog(ctx, 'Starting integrate') pushChain(ctx, 'integrate', **kwargs) data = ctx.obj['data'] for dat in data.iterator(kwargs['use']): if kwargs['tag']: grid, values = diag.integrate(dat, kwargs['axis']) out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) out.push(grid, values) data.add(out) else: diag.integrate(dat, kwargs['axis'], overwrite=True) #en #end vlog(ctx, 'Finishing integrate')
def magsq(ctx, **kwargs): """Calculate the magnitude squared of an input array """ vlog(ctx, 'Starting magnitude squared computation') pushChain(ctx, 'magsq', **kwargs) data = ctx.obj['data'] for dat in data.iterator(kwargs['use']): if kwargs['tag']: out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) grid, values = diag.magsq(dat) out.push(grid, values) data.add(out) else: diag.magsq(dat, overwrite=True) #end #end vlog(ctx, 'Finishing magnitude squared computation')
def fft(ctx, **kwargs): """Calculate the Fourier Transform or the power-spectral density of input data. Only works on 1D data at present. """ vlog(ctx, 'Starting FFT') pushChain(ctx, 'fft', **kwargs) data = ctx.obj['data'] for dat in data.iterator(kwargs['use']): if kwargs['tag']: out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) grid, values = diag.fft(dat, kwargs['psd'], kwargs['iso']) out.push(grid, values) data.add(out) else: diag.fft(dat, kwargs['psd'], kwargs['iso'], overwrite=True) #end #end vlog(ctx, 'Finishing FFT')
def select(ctx, **kwargs): r"""Subselect data from the active dataset(s). This command allows, for example, to choose a specific component of a multi-component dataset, select a index or coordinate range. Index ranges can also be specified using python slice notation (start:end:stride). """ vlog(ctx, 'Starting select') pushChain(ctx, 'select', **kwargs) data = ctx.obj['data'] for dat in data.iterator(kwargs['use']): if kwargs['tag']: out = Data(tag=kwargs['tag'], label=kwargs['label'], compgrid=ctx.obj['compgrid'], meta=dat.meta) grid, values = postgkyl.data.select(dat, z0=kwargs['z0'], z1=kwargs['z1'], z2=kwargs['z2'], z3=kwargs['z3'], z4=kwargs['z4'], z5=kwargs['z5'], comp=kwargs['comp']) out.push(grid, values) data.add(out) else: postgkyl.data.select(dat, overwrite=True, z0=kwargs['z0'], z1=kwargs['z1'], z2=kwargs['z2'], z3=kwargs['z3'], z4=kwargs['z4'], z5=kwargs['z5'], comp=kwargs['comp']) #end #end vlog(ctx, 'Finishing select')
def velocity(ctx, **kwargs): vlog(ctx, 'Starting velocity') pushChain(ctx, 'velocity', **kwargs) data = ctx.obj['data'] # shortcut for m0, m1 in zip(data.iterator(kwargs['density']), data.iterator(kwargs['momentum'])): grid = m0.getGrid() valsM0 = m0.getValues() valsM1 = m1.getValues() out = Data(tag=kwargs['tag'], compgrid=ctx.obj['compgrid'], label=kwargs['label'], meta=m0.meta) out.push(grid, valsM1 / valsM0) data.add(out) #end data.deactivateAll(tag=kwargs['density']) data.deactivateAll(tag=kwargs['momentum']) vlog(ctx, 'Finishing velocity')
def collect(ctx, **kwargs): """Collect data from the active datasets and create a new combined dataset. The time-stamp in each of the active datasets is collected and used as the new X-axis. Data can be collected in chunks, in which case several datasets are created, each with the chunk-sized pieces collected into each new dataset. """ vlog(ctx, 'Starting collect') pushChain(ctx, 'collect', **kwargs) data = ctx.obj['data'] tags = list(data.tagIterator()) outTag = kwargs['tag'] if outTag == (): if len(tags) == 1: outTag = tags[0] else: outTag = 'collect' #end #end for tag in data.tagIterator(kwargs['use']): time = [[]] values = [[]] grid = [[]] cnt = 0 for i, dat in data.iterator(tag, enum=True): cnt += 1 if kwargs['chunk'] and cnt > kwargs['chunk']: cnt = 1 time.append([]) values.append([]) grid.append([]) #end if dat.meta['time']: time[-1].append(dat.meta['time']) elif dat.meta['frame']: time[-1].append(dat.meta['frame']) else: time[-1].append(i) #end val = dat.getValues() if kwargs['sumdata']: numDims = dat.getNumDims() axis = tuple(range(numDims)) values[-1].append(np.nansum(val, axis=axis)) else: values[-1].append(val) #end if not grid[-1]: grid[-1] = dat.getGrid().copy() #end #end data.deactivateAll(tag) for i in range(len(time)): time[i] = np.array(time[i]) values[i] = np.array(values[i]) if kwargs['period'] is not None: time[i] = (time[i] - kwargs['offset']) % kwargs['period'] #end sortIdx = np.argsort(time[i]) time[i] = time[i][sortIdx] values[i] = values[i][sortIdx] if kwargs['sumdata']: grid[i] = [time[i]] else: grid[i].insert(0, np.array(time[i])) #end tempTag = outTag if isinstance(outTag, tuple) and len(outTag) > 1: tempTag = outTag[i] elif isinstance(outTag, tuple): tempTag = outTag[0] #end out = Data(tag=tempTag, label=kwargs['label'], compgrid=ctx.obj['compgrid']) out.push(grid[i], values[i]) data.add(out) #end #end vlog(ctx, 'Finishing collect')