Esempio n. 1
0
def useExplicitX(ds, dpath="/", chanX=0):
    ds = ds.getSubData(dpath)
    dat = ds.getData()
    head = ds.header()
    chan = dat[:, chanX]
    steps = chan[1:] - chan[:-1]
    smin = steps.min()
    sdev = (steps.max() - smin) / smin
    nonmono = False
    if smin < 0:
        print("data are not monotonic increasing. Sorting")
        nonmono = True
        smin = abs(steps).min()
        sdev = abs(sdev)
    head["StartTime"] = chan.min()
    head["Labels"] = [head["Labels"][i] for i in range(dat.shape[1]) if not i == chanX]
    if smin <= 0:
        print("Data do not represent a function on the indicated X variable. Aborting.")
        return
    head["SamplesPerSecond"] = 1.0 / smin
    if sdev < 0.001:
        if nonmono:
            dat = dat[N.arange(dat.shape[0] - 1, -1, -1), :]
        print("Data are uniformly sampled")
        dat = dat[:, [x for x in range(dat.shape[1]) if not x == chanX]]
    else:
        print("Data are not uniformly sampled. Interpolating")
        if chanX != 0:
            ind = [chanX] + [x for x in range(dat.shape[1]) if not x == chanX]
            dat = dat[:, ind]
        dat = uniformsample(dat, smin)
    ds.datinit(dat, head)
def tffmax(ds, usamp=True):
	ftt.scanForTFValues(ds, window=.2, minFreq=4.9, maxFreq = 250.0)
	tfv = ds.getSubData('/ftvals')
	tf = _cleanupTFV(tfv.getData())
	if tf.shape[0]==0:
		print("No values in fmax tf")
		return None
	if usamp:
		sv = tf[:,0].min()
		tf = uniformsample(tf, 1.0)
		dat = mdat.newData(tf, {'SampleType':'timeseries', 'SamplesPerSecond':1.0, "StartTime":sv})
	else:
		dat = mdat.newData(tf, {'SampleType':'function'})
	return dat
def allTFsRs(fnames):
	ndoc = nmpml.blankDocument()
	for fname in fnames:
		doc = io.read(fname)
		ds = doc.getElements("Data", depth=1)[0]
		bn=os.path.splitext(fname)[0]
		tf = tffmax(ds, False)
		if not tf:
			continue
		tf = tf.getData()
		if fname.startswith("2009") or fname.startswith("2010"):
			tf[:,2]+=pi
			tf[:,2] = tf[:,2] -  ( 2*pi*(tf[:,2]>pi))
		tf = row_stack( [array([[0,0,0]]), tf, array([[250,tf[-1,1],tf[-1,2]]])])
		tf = uniformsample(tf, 1.0)
		tf = mdat.newData(tf, {'Name':bn, 'SampleType':'timeseries', 'SamplesPerSecond':1.0, "StartTime":0})
		ndoc.newElement(tf)
	io.write(ndoc, "allTFsResamp.mdat")		
Esempio n. 4
0
def orderPlotsByAttribute(ds, attrib="length", channel=1):
    dats = ds.getElements("Data", {"SampleType": "function"})
    ifv = min([d.data[:, 0].min() for d in dats])
    mfv = max([d.data[:, 0].max() for d in dats])
    vals = array([e.attrib(attrib) for e in dats])
    order = vals.argsort()
    chans = []
    for i in order:
        t = dats[i].data[:, [0, channel]]
        ii = t[:, 0].argmin()
        iv = t[ii, 0]
        if iv > ifv:
            t = row_stack([array([ifv, t[ii, 1]]), t])
        mi = t[:, 0].argmax()
        mv = t[mi, 0]
        if mv < mfv:
            t = row_stack([t, array([mfv, t[mi, 1]])])
        d = uniformsample(t, 5.0)[:, 0]
        chans.append(d)
    d = column_stack(chans)
    ds.datinit(d, {"SampleType": "timeseries", "SamplesPerSecond": 0.2})