def read_hdf5_arc(file_name): """reads a .hdf file and returns data contents mapped in a dict :type file_name: str :param file_name: path to the file to read :returns: ndarray: raw data [f32], float: sampling rate in Hz, or None, None on read error """ data, srate = None, None with openFile(file_name, 'r') as arc: for node in arc: # XXX: any more identifiers we used or the data in an archive?! if node._v_name.lower() in ['data', 'x']: data = sp.asanyarray(node.read()) try: data = data.astype(sp.float32) if data.shape[0] <= data.shape[1]: data = data.T.copy() except: data = None if node._v_name.lower() == 'srate': try: srate = float(node.read()) except: srate = None return data, srate
def create_hdf5_arc(file_name, rdata, srate=1000.0, **kwargs): """creates a valid hdf5 archive for :rdata: :type file_name: str :param file_name: path to the file to write :type rdata: ndarray :param rdata: raw data array [samples, channels] castable to f32 :type srate: float :param srate: sampling rate of rdata in Hz Default=1000.0 :returns: True on success, False else """ with openFile(file_name, 'w') as arc: # XXX: we go with 'data' here try: data = sp.asanyarray(rdata, dtype=sp.float32) if data.shape[0] <= rdata.shape[1]: data = data.T arc.createArray(arc.root, 'data', data) arc.createArray(arc.root, 'srate', srate) for k, v in kwargs.iteritems(): arc.createArray(arc.root, str(k), v) return True except: return False
def __coerce__(self, other): try: other = sp.asanyarray(other) if other.shape == self.shape: return (self.toarray(), other) else: return None except: return None
def __coerce__(self, other): try: other = sp.asanyarray(other) if other.shape == self.shape or sp.isscalar(other): return (self.toarray(), other) else: return NotImplemented except: return NotImplemented
def __mul__(self, other): if sp.isscalar(other): return simple_diag_matrix(self.diag * other) try: other = sp.asanyarray(other) if other.shape == self.shape: return simple_diag_matrix(self.diag * other.diagonal()) return self.toarray() * other except: return NotImplemented
def __init__(self, value): """ :type value: scalar dtype :param value: single digit value """ super(MRScalar, self).__init__() value_ = value if sp.isscalar(value_): value_ = sp.asanyarray(value_) try: assert value_.ndim == 0 except: raise ValueError('%s is not a scalar!' % value) self._value = value_
def append_data_peaks(self, data, force=False): """append bin(s) calculated from a strip of data with this method the data is first queried for peaks. this should reduce the noise/smoothness of the histogram as observed from the amplitude distribution of the pure signal. :type data: ndarray :param data: the data to generate the bin(s) to append from :type force: bool :param force: if True, immediately start a new bin before calculation """ # check data data_ = sp.asanyarray(data) if data.ndim < 2: data_ = sp.atleast_2d(data_) if data_.shape[0] < data_.shape[1]: data_ = data_.T nsmpl, nchan = data_.shape if nchan != self._nchan: raise ValueError('data has channel count %s, expected %s' % (nchan, self._nchan)) # generate bin set bin_set = [0] if self._cur_bin_smpl != 0: bin_set.append(self._bin_size - self._cur_bin_smpl) while bin_set[-1] < nsmpl: bin_set.append(bin_set[-1] + self._bin_size) if bin_set[-1] > nsmpl: bin_set[-1] = nsmpl # process bins idx = 1 while idx < len(bin_set): data_bin = data_[bin_set[idx - 1]:bin_set[idx], :] for c in xrange(self._nchan): self._cur_bin[c] += sp.histogram(data_bin[:, c], bins=self._ampl_range)[0] self._cur_bin_smpl += data_bin.shape[0] if self._cur_bin_smpl == self._bin_size: self.append_bin(self._cur_bin) self._cur_bin[:] = 0 self._cur_bin_smpl = 0 idx += 1
def append_bin(self, bin): """append an AmplHistBin instance :type bin: ndarray like :param bin: the amplHistBin to append """ # checks bin_ = sp.asanyarray(bin) if bin_.shape != self._cur_bin.shape: raise ValueError('shape does not match! expected %s, got %s' % (self._cur_bin.shape, bin_.shape)) if bin_.sum() == 0: print '!!appending zero bin!!' # append bin self._hist_data.append(bin_)
def dict_list2arr(in_dict): """converts all lists in a dictionary to `ndarray`. [in place!] If there are instances of dict found as values, this function will be applied recursively. :Parameters: in_dict : dict """ try: for k in in_dict: if isinstance(in_dict[k], list): in_dict[k] = sp.asanyarray(in_dict[k]) elif isinstance(in_dict[k], dict): dict_list2arr(in_dict[k]) else: pass finally: return in_dict
def __init__(self, value, header=None): """ :type value: ndarray :param value: single digit _value :type header: list :param header: list of str with as many entries as columns in _value """ super(MRTable, self).__init__() val = sp.asanyarray(value) if val.dtype == object: raise ValueError('%s is not a compatible type: %s' % (value, value.__class__.__name__)) if val.ndim != 2: raise ValueError('%s is not ndim==2: _value.ndim==%s' % val.ndim) self._value = val self.header = None if header is not None: if len(header) == self._value.shape[1]: self.header = map(str, header)
def __init__(self, diag, dtype=None): diag = sp.asanyarray(diag, dtype=dtype) self.dtype = diag.dtype assert diag.ndim == 1 self.diag = diag self.shape = (diag.shape[0], diag.shape[0])