Exemplo n.º 1
0
  def _readDataGeneral(self,stepSlice=0,shotSlice=None,fileSlice=0,fieldName=None):
    fileSlice = tools.iterfy(fileSlice)
    stepSlice = tools.iterfy(stepSlice)
    #self.readTime(stepSlice,shotSlice,fileSlice)
    #if not self._existsInSelf("time"):
      #timeStampObj = memdata(self,"timestamp",fileSlice[0])
      #self._addToSelf("time",timeStampObj)
    outS = []
    # NB: Here is an issue, doesn't make sense like it is right now...
    for stepNum in stepSlice:
      # check if memory chached exists and the already read values contains what we need..
      #print "TODO: add slice1 in slice2 ..., the current one does not work with None ..."
      #print "r1=range(5,100); r2=range(10,20); [idx for (idx,x) in zip(range(len(r1)),r1) if x in r2]"
      fileNum,FstepNum = self._getFileStep(stepNum)
      addr = address(fileNum,stepNum,"shotSlice")
      if (not self._existsInSelf(addr)) or (len(tools.iterDiff( self._getFromSelf(addr), shotSlice) )==0):

	path = self._paths["data"][FstepNum][0]
	path = getPath(path)
	#de=bug
	if (shotSlice is None):
	  data = [h5r(self._h5s[fileNum],path)[...] % tt for tt in self.time[stepNum]]
	else:
	  if isinstance(shotSlice,np.ndarray) and shotSlice.dtype is np.dtype(int):
	    data = np.asarray([h5r(self._h5s[fileNum],path % self.time[stepNum][ts])[...] for ts in shotSlice])
	    #tshotSlice = np.zeros([data.len()],dtype=bool)
	    #tshotSlice[shotSlice]=True
	    #shotSlice=tshotSlice

	  #data = data[shotSlice]
      else:
	  data = self._getFromSelf(address(fileNum,stepNum,"_data"))
    # store is asked to use memory cache
      if (self._useMemoryCache):
	# save in .fileNum.stepNum._data
	self._addToSelf(address(fileNum,stepNum,"_data"),data)
	self._addToSelf(address(fileNum,stepNum,"shotSlice"),shotSlice)
      if (isinstance(data.dtype.names,tuple)):
	for fieldname in data.dtype.names:
	  self._addToSelf(address(fileNum,stepNum,fieldname),data[fieldname])
	  if ( not (fieldname in self.__dict__) ):
	    timeStampObj = memdata(self,"timestamp",fileNum)
	    dataObj = memdata(self,fieldname,fileNum,timeStampObj)
	    self._addToSelf(fieldname,dataObj)
      #else: 
	#timeStampObj = memdata(self,"timestamp",fileNum)
	#dataObj = memdata(self,"_data",fileNum,timeStampObj)
	#tools.addToObj(self,"_data",dataObj)
      outS.append(data)
    return outS
Exemplo n.º 2
0
 def _readDataEVR(self,stepSlice=0,shotSlice=None,fileSlice=0):
   fileSlice = tools.iterfy(fileSlice)
   stepSlice = tools.iterfy(stepSlice)
   for fileNum in fileSlice:
     for stepNum in stepSlice:
       data= self._readDataGeneral(stepSlice=stepNum,shotSlice=shotSlice,
         fileSlice=fileSlice)
       timeStampObj = self._getFromSelf("time")
       addr = address(fileNum,stepNum,"shotSlice")
       addrCode = address(fileNum,stepNum,"code%d"%self.codes[0])
       # if not read or we are asking shots outside the range of read values...
       if ( (not self._existsInSelf(addrCode)) or (len(tools.iterDiff( self._getFromSelf(addr), shotSlice) )==0) ):
         for code in self.codes:
           nshots = len(data)
           temp = np.zeros(nshots,dtype=np.bool)
           for nshot in range(nshots):
             if code in data[nshot][0]["eventCode"]:
               temp[nshot] = True
           addr = address(fileNum,stepNum,"code%d"%code)
           self._addToSelf(addr,temp)
           dataObj = memdata(self,"code%d"%code,fileNum,timeStampObj)
           self._addToSelf("code%d"%code,dataObj)
Exemplo n.º 3
0
 def __init__(self,fhandle,name,paths,useMemoryCache=True,isauto=False,isPointDet=False):
   self._h5s = tools.iterfy(fhandle)
   self.name = name
   self._paths = paths
   self._useMemoryCache=useMemoryCache
   self._isPointDet = isPointDet
   self._numOfScanStepsFile = [1]*len(self._h5s)
   #if not isauto:
     #self.__init__heavy()
   if self._isPointDet:
     self._initPointDet()
   else:
     self._initAreaDet()
Exemplo n.º 4
0
 def _readDataIPM(self,stepSlice=0,shotSlice=None,fileSlice=0,fieldName=None):
   # further splits channels
   fileSlice = tools.iterfy(fileSlice)
   stepSlice = tools.iterfy(stepSlice)
   for fileNum in fileSlice:
     data= self._readDataGeneral(stepSlice=stepSlice,shotSlice=shotSlice,
       fileSlice=fileNum,fieldName=fieldName)
     timeStampObj = self._getFromSelf("time")
     for stepNum in stepSlice:
       addr = address(fileNum,stepNum,"shotSlice")
       addrChannel = address(fileNum,stepNum,"channel0")
       # if not read or we are asking shots outside the range of read values...
       if ( (not self._existsInSelf(addrChannel)) or (len(tools.iterDiff( self._getFromSelf(addr), shotSlice) )==0) ):
         # if only one step is read it does not return list ...
         if isinstance(data,list):
           channel_data = data[stepNum]["channel"]
         else:
           channel_data = data["channel"]
         for i in range(4):
           addr = address(fileNum,stepNum,"channel%d"%i)
           self._addToSelf(addr,channel_data[:,i])
           dataObj = memdata(self,"channel%d"%i,fileNum,timeStampObj)
           self._addToSelf("channel%d"%i,dataObj)
Exemplo n.º 5
0
  def _readEpicsAllData(self,datapath,timepath):
    
    stepSlice = range(self.Nsteps)
    stepSlice = tools.iterfy(stepSlice)

    outD = []
    outT = []
    for stepNum in stepSlice:
      fileNum,FstepNum = self._getFileStep(stepNum)
      cpath = datapath % FstepNum # really beautiful, same for python 3 ?
      data = h5r(self._h5s[fileNum],cpath)
      outD.append(data['value'])
      time = data['stamp']
      time.dtype.names = ('seconds','nanoseconds')
      outT.append(time)
    return [outD,outT]
Exemplo n.º 6
0
 def __getitem__(self,x):
   n = self._numOfScanSteps[0]
   if isinstance(x,slice):
     return [self[ii] for ii in xrange(*x.indices(n))]
   else:
     x = tools.iterfy(x)
     if (max(x)>=self._numOfScanSteps[0]):
       raise IndexError
     if (not self._useMemoryCache):
       return self.readData(stepSlice=x)
     if self._existsInSelf("_data"):
       return self._getFromSelf("_data")[x]
     elif self._existsInSelf("value"):
       return self._getFromSelf("value")[x]
     else:
       return tools.getFromObj(self,x)
Exemplo n.º 7
0
def polyFit(i0,Imat,order=3, removeOrders=[]):
  Imatf = Imat.reshape((len(Imat),-1))
  pol = np.vander(i0,order+1)                                                   
  removeOrders = tools.iterfy(removeOrders)                                     
  removeOrders = np.sort(removeOrders)[-1::-1]                                  
  for remo in removeOrders:                                                     
    #print remo                                                                  
    pol = np.delete(pol,-(remo+1),axis=1)                                       
  lhs = copy.copy(pol)                                                          
  scale = np.sqrt((lhs*lhs).sum(axis=0))                                        
  lhs /= scale                                                                  
  comps,resid,rnk,singv = linalg.lstsq(lhs,Imatf)                                
  comps = (comps.T/scale).T                                                     
                                                                                
  for remo in removeOrders:                                                     
    comps = np.insert(comps,order-remo,0,axis=0)                                
  return comps.reshape((order+1,)+np.shape(Imat)[1:])
Exemplo n.º 8
0
  def _readTime(self,path,stepSlice=None,shotSlice=None):
    if stepSlice is None:
      stepSlice = range(self.Nsteps)
    stepSlice = tools.iterfy(stepSlice)
    times = []
    for stepNum in stepSlice:
      fileNum,FstepNum = self._getFileStep(stepNum)
      tpath = path
      time = h5r(self._h5s[fileNum],tpath)
      try:  
	if (shotSlice is None):
          time = time[...]
        else:
          time = time[shotSlice]
      except:
        time = np.array([])

      #raise NotImplementedError('Use the source, luke!')
      times.append(time)
    return times
Exemplo n.º 9
0
  def findDatasets(self,detectors=None,exclude=None):
    """finds datasets from a cnf that contains aliases, if no aliases are defined the file is parsed and the hdf5 names are returned as names.
    
    Finds detectors in hdf5 file matching with mnemonic given in config file;
    the matching mnemonic names are as dictionaries (self.pointDet and self.areaDet)
    The 
    """
    subSelection = detectors
    if (subSelection==[]) or (subSelection is None):
      subSelection = self.cnf["pointDet"].keys() + self.cnf["areaDet"].keys()

    if exclude is not None:
      exclude = tools.iterfy(exclude)
      for tex in exclude:
	while True:
	  try:
            subSelection.remove(tex)
	    continue
	  except:
	    break
    #h = self.fileHandles[0]
    h = self.fileHandles
   
    # Getting all Detector path strings in CCs and config
    h5names = [tH5.getDataset(th) for th in h]

    

    ret = {}
    
    pointDet = self.cnf["pointDet"]
    for (mnemonic,name) in pointDet.iteritems():
      if (mnemonic.find("nops")>-1) and (mnemonic.find("*")>-1):
        continue
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      nameData = name["data"].replace("*","\S+")
      detDataset = [[x for x in th5names if (re.search(nameData,x) is not None)] for th5names in h5names]
      nameConf = name["conf"].replace("*","\S+")
      try:
        detConf    = [x for x in h5confs if (re.search(nameConf,x) is not None)]
      except:
	detConf=[]
      nameTime = name["timestamp"].replace("*","\S+")
      try:
        detTime    = [[x for x in th5names if (re.search(nameTime,x) is not None)] for th5names in h5names]
      except:
	detTime=[]
      data = [x for x in detDataset]
      time = [x for x in detTime ]

      if ( (sum([len(tdata) for tdata in data]) != 0) \
	  and (sum([len(time) for ttime in time]) != 0) ):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = data
        ret[mnemonic]["time"] = time
        if len(detConf)>0:
          ret[mnemonic]["conf"] = detConf[0]
    self._pointDetPaths = ret
    self.pointDetNames = ret.keys()


    areaDet = self.cnf["areaDet"]
    ret = {}
    # 3D detectors need special care because data are written differently 
    # /data, /image, /waveform
    for (mnemonic,name) in areaDet.iteritems():
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      name = name["data"].replace("*","\S+")
      detDataset = [[x for x in th5names if (re.search(name,x) is not None)] for th5names in h5names]

      conf = [ ]
      data = [[x for x in tdetDataset if 'tag_' in x and x[-13:]=='detector_data'] for tdetDataset in detDataset]
      if ( (len(data) != 0)):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = [data]
    self._areaDetPaths = ret
    self.areaDetNames = ret.keys()
    self._detectorsPaths = tools.dictMerge(self._pointDetPaths,self._areaDetPaths)
    self.detectorsNames = self.pointDetNames + self.areaDetNames
    # *** start scan variables *** #
    #temp = dict()
    #if (len(self.cnf["scan_step"])>0):
      #for scan_var in self.cnf["scan_step"]:
	#mne,reg = scan_var
	#reg  = reg.replace("*","\S+")
	#data = [x for x in h5names if (re.search(reg,x) is not None)]
#
	#if not data==[]:
	  #path = replaceCalibCycleString(data[0])
	  ##try:
	  #obj = scanVar(self.fileHandles,mne,path)
	 # 
	  ##tools.addToObj(self,mne,obj)
	  #temp[mne] = obj
	  #except:
	    #pass
    #self.scanVars = temp
    # *** stop scan variables *** #
    return
Exemplo n.º 10
0
  def _readPointDataGeneral(self,path,field=None,stepSlice=None,shotSlice=None):
    if stepSlice is None:
      stepSlice = range(self.Nsteps)
    stepSlice = tools.iterfy(stepSlice)

    #self.readTime(stepSlice,shotSlice,fileSlice)
    #if not self._existsInSelf("time"):
      #timeStampObj = memdata(self,"timestamp",fileSlice[0])
      ##self._addToSelf("time",timeStampObj)

    outS = []
    for stepNum in stepSlice:
      fileNum,FstepNum = self._getFileStep(stepNum)
      cpath = path
      cpath = getPath(cpath)
      data = h5r(self._h5s[fileNum],cpath)
      try:
	if (shotSlice is None):
	  data = data[...]
	else:
	  data = data[shotSlice]
      except:
	data = np.array([])

      outS.append(data)

    # find structure with something in
    outSind = 0
    for toutS in outS:
      if len(toutS)>0:
	break
      outSind+=1

    if outS[outSind].dtype.names:
      if not field is None:
	index = ''
	while not field in outS[outSind].dtype.names:
	  index = field[-1] + index
	  field = field[:-1]
	index = int(index)
        fields = [field]
      else:
        fields = outS[outSind].dtype.names
	index = None
      
      pret = [[dd[tfield] if len(dd)>0 else np.array([]) for dd in outS ] for tfield in fields]
      ret = []
      retfields = []
      for tret,tfield in zip(pret,fields):

	if tret[0].ndim==2:
          noofvecs = np.shape(outS[0][tfield])[1]
	  if not index is None:
	    indices = [index]
	  else:
	    indices = range(noofvecs)
	  for tindex in indices:
	    strfmt = '%0' + '%dd' %(1+np.floor(np.log10(noofvecs)))
	    tname = tfield + strfmt %(tindex)
	    ret.append([sd[:,tindex] if np.ndim(sd)==2 else np.asarray([]) for sd in tret ])
	    retfields.append(tname)
	else:
	  ret.append(tret)
	  retfields.append(tfield)
    else:
      ret = [outS]
      retfields = ['data']
    
    return ret,retfields
Exemplo n.º 11
0
  def findDatasets(self,detectors=None,exclude=None):
    """finds datasets from a cnf that contains aliases, if no aliases are defined the file is parsed and the hdf5 names are returned as names.
    
    Finds detectors in hdf5 file matching with mnemonic given in config file;
    the matching mnemonic names are as dictionaries (self.pointDet and self.areaDet)
    The 
    """
    subSelection = detectors
    if (subSelection==[]) or (subSelection is None):
      subSelection = self.cnf["pointDet"].keys() + self.cnf["areaDet"].keys()

    if exclude is not None:
      exclude = tools.iterfy(exclude)
      for tex in exclude:
        while True:
          try:
            subSelection.remove(tex)
            continue
          except:
            break
    h = self.fileHandles[0]
   
    # Getting all Detector path strings in CCs and config
    try:
      # try to use only CalibCycle0
      # bad for MEC as some calib cycles don't contain amything... look for longest dataset for now, later look in all

      base = "Configure:0000/Run:0000/"
      bases = h[base].keys()
      lens = np.array([len(h[base][key].keys()) for key in bases])
      base = base + bases[lens.argmax()] +'/'
      h5names = tH5.getDataset_hack(h[base])
      #h5names = [base+x for x in h5names]
      # find all confs
      base = "Configure:0000/"
      confs = h[base].keys()
      h5confs = []
      for c in confs:
        if (c.find("Run")==0):
          continue
        else:
          temp = tH5.getDataset(h[base][c])
          for t in temp:
            h5confs.append(base+c+"/"+t)
    except KeyError:
      h5names = tH5.getDataset(h)

    

    #raise NotImplementedError('Use the source, luke!')
    ret = {}
    ## *** start EpicsPV *** #
    ## look for epics name
    #epicsFound=False
    #if ("epics_dset" in self.cnf):
      #epicsMne = self.cnf["epics_dset"][0]
      #epicsReg = self.cnf["epics_dset"][1]
      #epicsH5Names=[x for x in h5names if (x.find(epicsReg)>-1)]
      ## common Epics path:
      #ntemp = min([len(x.split("/")) for x in epicsH5Names])
      #epicsCommon = "/".join(epicsH5Names[0].split("/")[0:ntemp])
      ## epics var
      #self._epicsPaths = {}
      #for d in h[epicsCommon]:
        #dpath = d
        #d = d.replace(':','_')
        #d = d.replace('-','_')
        #d = d.replace(' ','_')
        #d = d.replace('.','_')
        #mne = "%s.%s" % (epicsMne.split("/")[0],d)
        #self._epicsPaths[mne]={}
        #self._epicsPaths[mne]["data"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/data"
        #self._epicsPaths[mne]["time"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/time"
        #self._epicsPaths[mne]["conf"] = []
      #self._epicsNames = self._epicsPaths.keys()
    #else:
      #self._epicsNames = []
    ## *** stop EpicsPV *** #
    pointDet = self.cnf["pointDet"]
    for (mnemonic,name) in pointDet.iteritems():
      if (mnemonic.find("nops")>-1) and (mnemonic.find("*")>-1):
        continue
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      nameData = name["data"].replace("*","\S+")
      detDataset = [x for x in h5names if (re.search(nameData,x) is not None)]
      nameConf = name["conf"].replace("*","\S+")
      try:
        detConf    = [x for x in h5confs if (re.search(nameConf,x) is not None)]
      except:
              detConf=[]
      data = [x for x in detDataset if x[-5:]=="/data" or x[-8:]=="/evrData" or x[-13:]=="/channelValue"]
      time = [x for x in detDataset if x[-5:]=="/time"]
      if ( (len(data) != 0) and (len(time) != 0) ):
        ret[mnemonic] = {}
        #ret[mnemonic]["data"] = data[0].replace('CalibCycle:0000','CalibCycle:%04d')
        #ret[mnemonic]["time"] = time[0].replace('CalibCycle:0000','CalibCycle:%04d')
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        if len(detConf)>0:
          ret[mnemonic]["conf"] = detConf[0]
    self._pointDetPaths = ret
    self.pointDetNames = ret.keys()



    areaDet = self.cnf["areaDet"]
    ret = {}
    # 3D detectors need special care because data are written differently 
    # /data, /image, /waveform
    for (mnemonic,name) in areaDet.iteritems():
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      name = name["data"].replace("*","\S+")
      name_nodata = "/".join(name.split("/")[0:-1])
      detDataset = [x for x in h5names if (re.search(name_nodata,x) is not None)]
      conf = [ ]
      data = [x for x in detDataset if (re.search(name,x) is not None)]
      time = [x for x in detDataset if x[-5:]=="/time"]
      #raise NotImplementedError('Use the source, luke!')
      if ( (len(data) != 0) and (len(time) !=0) ):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        ret[mnemonic]["conf"] = conf
    self._areaDetPaths = ret
    self.areaDetNames = ret.keys()
    self._detectorsPaths = tools.dictMerge(self._pointDetPaths,self._areaDetPaths)
    self.detectorsNames = self.pointDetNames + self.areaDetNames
    # *** start scan variables *** #
    logbook("Finding scan variables in hdf5 file ...",end="")
    temp = dict()
    if (len(self.cnf["scan_step"])>0):
      for scan_var in self.cnf["scan_step"]:
        mne,reg = scan_var
        reg  = reg.replace("*","\S+") 
        data = [x for x in h5names if (re.search(reg,x) is not None)]
        if len(data)>1:
          logbook("\nWarning: in lclsH5:findDatasets, multiple path matching regex, using only first",reg)
          logbook("Paths are:",data)
        path = replaceCalibCycleString(data[0])
        obj = scanVar(self.fileHandles,mne,path)
        temp[mne] = obj
    self.scanVars = temp
    names_to_displ = [ x.name for x in temp.values() \
      if hasattr(x,"name")]
    names_to_displ = ",".join(names_to_displ)
    logbook(" ... done, scanVar found:",names_to_displ, \
      time=False)
    # *** stop scan variables *** #
    return
Exemplo n.º 12
0
def polyVal(comps,i0):                                                          
  i0 = np.asarray(tools.iterfy(i0))                                             
  pol = np.vander(i0,len(comps))                                        
  return np.asarray(np.matrix(pol)*np.matrix(comps.reshape((len(comps),-1)))).reshape((len(i0),)+np.shape(comps)[1:])