def make_libart_config(src): from struct import calcsize as sizeof L = [ """/* Automatically generated by setup.py */ #ifndef _ART_CONFIG_H #\tdefine _ART_CONFIG_H #\tdefine ART_SIZEOF_CHAR %d #\tdefine ART_SIZEOF_SHORT %d #\tdefine ART_SIZEOF_INT %d #\tdefine ART_SIZEOF_LONG %d""" % (sizeof('c'), sizeof('h'), sizeof('i'), sizeof('l')) ] aL = L.append if sizeof('c') == 1: aL("typedef unsigned char art_u8;") else: die("sizeof(char) != 1") if sizeof('h') == 2: aL("typedef unsigned short art_u16;") else: die("sizeof(short) != 2") if sizeof('i') == 4: aL("typedef unsigned int art_u32;") elif sizeof('l') == 4: aL("typedef unsigned long art_u32;") else: die("sizeof(int)!=4 and sizeof(long)!=4") aL('#endif\n') with open(pjoin(src, 'art_config.h'), 'w') as f: f.write('\n'.join(L))
def make_libart_config(src): from struct import calcsize as sizeof L=["""/* Automatically generated by setup.py */ #ifndef _ART_CONFIG_H #\tdefine _ART_CONFIG_H #\tdefine ART_SIZEOF_CHAR %d #\tdefine ART_SIZEOF_SHORT %d #\tdefine ART_SIZEOF_INT %d #\tdefine ART_SIZEOF_LONG %d""" % (sizeof('c'), sizeof('h'), sizeof('i'), sizeof('l')) ] aL = L.append if sizeof('c')==1: aL("typedef unsigned char art_u8;") else: die("sizeof(char) != 1") if sizeof('h')==2: aL("typedef unsigned short art_u16;") else: die("sizeof(short) != 2") if sizeof('i')==4: aL("typedef unsigned int art_u32;") elif sizeof('l')==4: aL("typedef unsigned long art_u32;") else: die("sizeof(int)!=4 and sizeof(long)!=4") aL('#endif\n') with open(pjoin(src,'art_config.h'),'w') as f: f.write('\n'.join(L))
def process_packet(packet): try: code, bytes = unpack("!I{}s".format(len(packet) - sizeof("!I")), packet) return (code, bytes.decode("utf-8")) except: return (0, "")
def getdatafids(self,fid_start,fid_end,rcvrnum=None): datasize = sizeof(self.datatype) nrcvrs = self.nrcvrs if (rcvrnum is None): crcvrs = arange(self.nrcvrs) else: try: test = len(rcvrnum) crcvrs = array(rcvrnum,int) except TypeError: crcvrs = array([rcvrnum],int) #complex data array complex_data = zeros((len(crcvrs),fid_end-fid_start,int(self.nro)),complex) #read in data data_error = 0 for k in range(len(crcvrs)): for j in range(fid_end-fid_start): self.fidfilehandle.seek(self.blocksize*(fid_start+j)+2*self.nro*datasize*crcvrs[k],0) bindata=A.array(self.datatype) try: # array.read() is depricated in python 3 # bindata.read(self.fidfilehandle,2*self.nro) bindata.fromfile(self.fidfilehandle, int(2*self.nro)) except EOFError: print('Error(%s): Missing data in file!' % program_name) data_error = j break complex_data[k,j,:]=array(bindata[0:2*self.nro:2],float)+1.j*array(bindata[1:2*self.nro+1:2],float) if (len(crcvrs)==1): complex_data.shape=(fid_end-fid_start,self.nro) return complex_data,data_error
def __init__(self, dbfile, sxident='SxG'): self.fd = os.open(dbfile, os.O_RDONLY | os.O_NONBLOCK) self.db = mmap.mmap(self.fd, os.SEEK_SET, prot=mmap.PROT_READ) ident = self.db.read(len(sxident)) assert ident == sxident, 'Missing `{ident}` ident in dbfile'.format(ident=sxident) self.db.seek(len(sxident)) fields = '/'.join( ( 'B:version', 'L:created', 'B:db_type', 'B:charset', 'B:idx_octets_count', 'H:idx_blocks_count', 'H:idx_block_size', 'L:net_ranges_count', 'B:id_datatype_size', 'H:region_record_size', 'H:city_record_size', 'L:region_cat_size', 'L:city_cat_size', 'H:country_record_size', 'L:country_cat_size', 'H:pack_format_size' ) ) header = StructModel('DBHeader', fields, sxstruct=False) header_fmt = '{order}{spec}'.format( order=header.byteorder, spec=''.join(header.struct) ) meta = header.create(self.db.read(sizeof(header_fmt))) self.models = [] for x in self.db.read(meta.pack_format_size).split(b'\0'): self.models.append(StructModel('SxRecord', x, byteorder='<')) self.idx_net_ranges = unpack( '>{0}L'.format(meta.idx_octets_count), self.db.read(meta.idx_octets_count * 4) ) self.idx_blocks = unpack( '>{0}L'.format(meta.idx_blocks_count), self.db.read(meta.idx_blocks_count * 4) ) self.net_blocks_offset = self.db.tell() self.net_block_record_size = meta.id_datatype_size + 3 self.db_region_offset = self.net_blocks_offset + meta.net_ranges_count * self.net_block_record_size self.db_cities_offset = self.db_region_offset + meta.region_cat_size self.DBMeta = meta
def getStructAt(self,addr,struct): out=struct.parse(self.read(addr,struct.sizeof())) if isinstance(out,Container): out['address']=addr return out
def process_packet(packet): return unpack("!I{}s".format(len(packet) - sizeof("!I")), packet)
def __init__(self,inputpath,method_name="method",acqp_name="acqp",fid_name='fid'): self.platform="Bruker" #resolve file names and check existence if not (inputpath[-1]=='/'): inputpath = inputpath + '/' self.inputpath=inputpath method_file = os.path.join(inputpath,method_name) acq_file = os.path.join(inputpath,acqp_name) fid_file = os.path.join(inputpath,fid_name) try: if not ( os.path.exists(acq_file) ): raise FatalError("acqp file not found...") if not ( os.path.exists(fid_file) ): raise FatalError("fid file not found...") if not ( os.path.exists(method_file) ): raise FatalError("method file not found...") except FatalError as e: print('Error(%s):' % 'open_bruker_file', e.msg) raise SystemExit #generate bruker parameter dictionaries self.acq_param_dict=bruker_paramfile_to_dict(acq_file) self.method_param_dict=bruker_paramfile_to_dict(method_file) #specify datatype for "array" based read if (self.acq_param_dict['GO_raw_data_format']=='GO_16BIT_SGN_INT'): self.datatype = 'h' elif (self.acq_param_dict['GO_raw_data_format']=='GO_32BIT_FLOAT'): self.datatype = 'f' else: self.datatype = 'i' #default = 'GO_32BIT_SGN_INT' #determine data shape data_shape = ones(5,int) #(nreps*nframes*...),nrcvrs,nphase2|nslice,nphase,nro if ('PVM_EncMatrix' in self.method_param_dict): data_shape[-len(self.method_param_dict['PVM_EncMatrix'])::] = self.method_param_dict['PVM_EncMatrix'][::-1] #nphase2,nphase,nro elif ('PVM_SpecMatrix' in self.method_param_dict): #need to reshape this better data_shape[-3::] = array([1,1,self.method_param_dict['PVM_SpecMatrix'][0]],int) #leave 1's in for consistency if 'PVM_EncActReceivers' in self.method_param_dict: data_shape[-4] = self.method_param_dict["PVM_EncActReceivers"].count('On') #nrcvrs self.nrcvrs = data_shape[-4] nslices = sum(get_dict_value(self.method_param_dict,'PVM_SPackArrNSlices',[1])) if (nslices>1): data_shape[-3] *= nslices if 'PVM_NRepetitions' in self.method_param_dict: nreps = self.method_param_dict['PVM_NRepetitions'] if (nreps>0): data_shape[-5] = nreps nframes = 0 if 'PVM_NMovieFrames' in self.method_param_dict: if 'PVM_MovieOnOff' in self.method_param_dict: if (self.method_param_dict['PVM_MovieOnOff']=='On'): nframes = self.method_param_dict['PVM_NMovieFrames'] if (nframes>1): data_shape[-5] *= nframes #Do we need to verify the file size? #file_size = os.stat(fid_file)[6] #nfids = product(data_shape[:-1]) #nropts = file_size/(2*nfids*sizeof(datatype)) #if (data_shape[-1]!=nropts): # print "Warning: inconsistency in file size and RO dimension (deriving number of RO points from file size)" # data_shape[-1] = nropts self.data_shape=take(data_shape,nonzero(data_shape>1)[0]) #for convenience, put some elements of data_shape into named variables self.nmice = get_dict_value(self.method_param_dict,'MICe_nmice',4) self.nro = data_shape[-1] self.npe = data_shape[-2] if (len(get_dict_value(self.method_param_dict,'PVM_EncMatrix',[1,1]))>2): self.npe2 = self.method_param_dict['PVM_EncMatrix'][2] else: self.npe2 = 1 self.nslices = nslices #nD SpatDimEnum=get_dict_value(self.method_param_dict,'PVM_SpatDimEnum','<3D>') SpatDimEnumDict={'<3D>':3, '<2D>':2, '<1D>':1} self.nD=get_dict_value(SpatDimEnumDict,SpatDimEnum,3) #rcvrmouse_mapping: index is rcvr, value is mouse #need to read this from MICe_rcvrtomouse_mapping or similar complete_mapping=get_dict_value(self.method_param_dict,"MICe_receiver_to_coils",array([0,0,1,1,2,2,3,3],int)) self.rcvrmouse_mapping=complete_mapping[ [i for i,onoff in enumerate(self.method_param_dict["PVM_EncActReceivers"]) if onoff=="On"] ] #dummies for convenience when dealing with Varian self.nf=self.npe self.ni=[self.nslices,self.npe2][self.nD==3] self.ni_perchan=self.ni self.nfid=1 #may need to adjust this at some point #blocksize may be padded with zeroes depending on setting of GO_block_size base_blocksize=self.nrcvrs*self.nro*2*sizeof(self.datatype) if ((self.acq_param_dict["GO_block_size"]=="Standard_KBlock_Format") and not (fid_name[0:11]=="rawdata.job")): #temporary kluge, rawdata.jobN files don't follow KBlock format as fid files do self.blocksize=1024*( base_blocksize//1024 ) if ((base_blocksize%1024)>0): self.blocksize+=1024 else: self.blocksize=base_blocksize #define gradient matrix for orientation acqgradorient = get_dict_value(self.acq_param_dict,"ACQ_grad_matrix",None) acqpatientpos = get_dict_value(self.acq_param_dict,"ACQ_patient_pos",None) self.gmatrix = reshape(acqgradorient[0:9],(3,3)) #need to handle multiple orientations properly here if (acqpatientpos=="Head_Supine"): self.gmatrix[:,0]*=-1 self.gmatrix[:,2]*=-1 elif (acqpatientpos=="Head_Prone"): self.gmatrix[:,1]*=-1 self.gmatrix[:,2]*=-1 elif (acqpatientpos=="Head_Left"): self.gmatrix[:,2]*=-1 self.gmatrix[:,[0,1]]=self.gmatrix[:,[1,0]] elif (acqpatientpos=="Head_Right"): self.gmatrix[:,:]*=-1 self.gmatrix[:,[0,1]]=self.gmatrix[:,[1,0]] elif (acqpatientpos=="Foot_Supine"): self.gmatrix[:,:]=self.gmatrix[:,:] #no change for Foot_Supine elif (acqpatientpos=="Foot_Prone"): self.gmatrix[:,0]*=-1 self.gmatrix[:,1]*=-1 elif (acqpatientpos=="Foot_Left"): self.gmatrix[:,0]*=-1 self.gmatrix[:,1]*=-1 self.gmatrix[:,[0,1]]=self.gmatrix[:,[1,0]] elif (acqpatientpos=="Foot_Right"): self.gmatrix[:,0]*=-1 self.gmatrix[:,[0,1]]=self.gmatrix[:,[1,0]] #Bruker convention seems to be to run -ve --> +ve on 1st phase encode, # but then run +ve --> -ve on second phase encode; confusingly, this is # sometimes recorded in the "ACQ_gradient_amplitude" value and sometimes in # the ppg instead (e.g., compare FLASH and RARE) :( # This may necessitate a sequence specific recon to set gmatrix properly # on a case-by-case basis, matching the ACQ_gradient_amplitude, ppg, and petable # statements, which may be switched in user sequences. # Here, I am adding a -ve sign to the 2nd phase encode, so that the default # should handle the Bruker stock sequences properly self.gmatrix[2,:]*=-1 print(self.gmatrix) #temporary handling of offsets default_hive_table=array([[-41,-41,0],[-41,41,0],[41,-40,0],[41,41,0]],float) if 1: #(not self.method_param_dict.has_key("MICe_ReadOffset")): self.method_param_dict["MICe_ReadOffset"]=array([sum(self.gmatrix[0,:]*default_hive_table[j]) for j in range(4)],float) if 1: #(not self.method_param_dict.has_key("MICe_Phase1Offset")): self.method_param_dict["MICe_Phase1Offset"]=array([sum(self.gmatrix[1,:]*default_hive_table[j]) for j in range(4)],float) if 1: #(not self.method_param_dict.has_key("MICe_Phase2Offset")): self.method_param_dict["MICe_Phase2Offset"]=array([sum(self.gmatrix[2,:]*default_hive_table[j]) for j in range(4)],float) if 1: #(not self.method_param_dict.has_key("MICe_SliceOffset")): self.method_param_dict["MICe_SliceOffset"]=array([sum(self.gmatrix[2,:]*default_hive_table[j]) for j in range(4)],float) #finally, open fid file for reading data #file encogind changes from 'r' (python 2) to 'br' (python 3) self.fidfilehandle=open(fid_file,'br')
def parse(buffer, struct): struct_size = struct.sizeof() assert len(buffer) % struct_size == 0 return list(map(struct.parse, chunks(buffer, struct_size)))
def read_struct(self, struct): buffer = self.file.read(struct.sizeof()) assert len(buffer) == struct.sizeof() return only(parse(buffer, struct))
def getStructAt(self, addr, struct): out = struct.parse(self.read(addr, struct.sizeof())) if isinstance(out, Container): out['address'] = addr return out