示例#1
0
文件: saclaH5.py 项目: htlemke/ixppy
  def findDetectors(self,detectors=None,exclude=None): 
    # Detectors
    # TODO
    # strategy in 2 paths:
    # (1) detectors are given as alias --> try to read with cached datasets/adresses --> if fails use dataset finder ONLY for defined aliases.
    # (2) no detectors are given --> find all detectors for all aliases and get dataset name if alias not existing.
    #
    if (detectors != "parse"):
      # _findDetectors tries to match datasets found in files with mnemonic given in config
      t0 = time.time()
      print "Finding data in hdf5 file ...",
      self.findDatasets(detectors=detectors,exclude=exclude)
      print " ... done (%.1f) ms" % ((time.time()-t0)*1e3)

    else:
      print "Starting to look in the file"
      # parsing look in the file for dataset ...
      # use first (data or cached) file to find detectors to use
      h = self.fileHandles[0]
      try:
        cnf = parseToCnf(h)
        self.cnf = tools.dictMerge(self.cnf,cnf)
        self.areaDet  = cnf['areaDet'].keys()
        self.pointDet = cnf['pointDet'].keys()
        self.detectors = cnf['areaDet'].keys()+cnf['pointDet'].keys()
      except KeyError:
        print "Failed to find detectors in ", h.filename
示例#2
0
文件: saclaH5.py 项目: htlemke/ixppy
  def findDatasets(self,detectors=None,exclude=None):
    """finds datasets from a cnf that contains aliases, if no aliases are defined the file is parsed and the hdf5 names are returned as names.
    
    Finds detectors in hdf5 file matching with mnemonic given in config file;
    the matching mnemonic names are as dictionaries (self.pointDet and self.areaDet)
    The 
    """
    subSelection = detectors
    if (subSelection==[]) or (subSelection is None):
      subSelection = self.cnf["pointDet"].keys() + self.cnf["areaDet"].keys()

    if exclude is not None:
      exclude = tools.iterfy(exclude)
      for tex in exclude:
	while True:
	  try:
            subSelection.remove(tex)
	    continue
	  except:
	    break
    #h = self.fileHandles[0]
    h = self.fileHandles
   
    # Getting all Detector path strings in CCs and config
    h5names = [tH5.getDataset(th) for th in h]

    

    ret = {}
    
    pointDet = self.cnf["pointDet"]
    for (mnemonic,name) in pointDet.iteritems():
      if (mnemonic.find("nops")>-1) and (mnemonic.find("*")>-1):
        continue
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      nameData = name["data"].replace("*","\S+")
      detDataset = [[x for x in th5names if (re.search(nameData,x) is not None)] for th5names in h5names]
      nameConf = name["conf"].replace("*","\S+")
      try:
        detConf    = [x for x in h5confs if (re.search(nameConf,x) is not None)]
      except:
	detConf=[]
      nameTime = name["timestamp"].replace("*","\S+")
      try:
        detTime    = [[x for x in th5names if (re.search(nameTime,x) is not None)] for th5names in h5names]
      except:
	detTime=[]
      data = [x for x in detDataset]
      time = [x for x in detTime ]

      if ( (sum([len(tdata) for tdata in data]) != 0) \
	  and (sum([len(time) for ttime in time]) != 0) ):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = data
        ret[mnemonic]["time"] = time
        if len(detConf)>0:
          ret[mnemonic]["conf"] = detConf[0]
    self._pointDetPaths = ret
    self.pointDetNames = ret.keys()


    areaDet = self.cnf["areaDet"]
    ret = {}
    # 3D detectors need special care because data are written differently 
    # /data, /image, /waveform
    for (mnemonic,name) in areaDet.iteritems():
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      name = name["data"].replace("*","\S+")
      detDataset = [[x for x in th5names if (re.search(name,x) is not None)] for th5names in h5names]

      conf = [ ]
      data = [[x for x in tdetDataset if 'tag_' in x and x[-13:]=='detector_data'] for tdetDataset in detDataset]
      if ( (len(data) != 0)):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = [data]
    self._areaDetPaths = ret
    self.areaDetNames = ret.keys()
    self._detectorsPaths = tools.dictMerge(self._pointDetPaths,self._areaDetPaths)
    self.detectorsNames = self.pointDetNames + self.areaDetNames
    # *** start scan variables *** #
    #temp = dict()
    #if (len(self.cnf["scan_step"])>0):
      #for scan_var in self.cnf["scan_step"]:
	#mne,reg = scan_var
	#reg  = reg.replace("*","\S+")
	#data = [x for x in h5names if (re.search(reg,x) is not None)]
#
	#if not data==[]:
	  #path = replaceCalibCycleString(data[0])
	  ##try:
	  #obj = scanVar(self.fileHandles,mne,path)
	 # 
	  ##tools.addToObj(self,mne,obj)
	  #temp[mne] = obj
	  #except:
	    #pass
    #self.scanVars = temp
    # *** stop scan variables *** #
    return
示例#3
0
文件: lclsH5.py 项目: htlemke/ixppy
  def findDatasets(self,detectors=None,exclude=None):
    """finds datasets from a cnf that contains aliases, if no aliases are defined the file is parsed and the hdf5 names are returned as names.
    
    Finds detectors in hdf5 file matching with mnemonic given in config file;
    the matching mnemonic names are as dictionaries (self.pointDet and self.areaDet)
    The 
    """
    subSelection = detectors
    if (subSelection==[]) or (subSelection is None):
      subSelection = self.cnf["pointDet"].keys() + self.cnf["areaDet"].keys()

    if exclude is not None:
      exclude = tools.iterfy(exclude)
      for tex in exclude:
        while True:
          try:
            subSelection.remove(tex)
            continue
          except:
            break
    h = self.fileHandles[0]
   
    # Getting all Detector path strings in CCs and config
    try:
      # try to use only CalibCycle0
      # bad for MEC as some calib cycles don't contain amything... look for longest dataset for now, later look in all

      base = "Configure:0000/Run:0000/"
      bases = h[base].keys()
      lens = np.array([len(h[base][key].keys()) for key in bases])
      base = base + bases[lens.argmax()] +'/'
      h5names = tH5.getDataset_hack(h[base])
      #h5names = [base+x for x in h5names]
      # find all confs
      base = "Configure:0000/"
      confs = h[base].keys()
      h5confs = []
      for c in confs:
        if (c.find("Run")==0):
          continue
        else:
          temp = tH5.getDataset(h[base][c])
          for t in temp:
            h5confs.append(base+c+"/"+t)
    except KeyError:
      h5names = tH5.getDataset(h)

    

    #raise NotImplementedError('Use the source, luke!')
    ret = {}
    ## *** start EpicsPV *** #
    ## look for epics name
    #epicsFound=False
    #if ("epics_dset" in self.cnf):
      #epicsMne = self.cnf["epics_dset"][0]
      #epicsReg = self.cnf["epics_dset"][1]
      #epicsH5Names=[x for x in h5names if (x.find(epicsReg)>-1)]
      ## common Epics path:
      #ntemp = min([len(x.split("/")) for x in epicsH5Names])
      #epicsCommon = "/".join(epicsH5Names[0].split("/")[0:ntemp])
      ## epics var
      #self._epicsPaths = {}
      #for d in h[epicsCommon]:
        #dpath = d
        #d = d.replace(':','_')
        #d = d.replace('-','_')
        #d = d.replace(' ','_')
        #d = d.replace('.','_')
        #mne = "%s.%s" % (epicsMne.split("/")[0],d)
        #self._epicsPaths[mne]={}
        #self._epicsPaths[mne]["data"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/data"
        #self._epicsPaths[mne]["time"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/time"
        #self._epicsPaths[mne]["conf"] = []
      #self._epicsNames = self._epicsPaths.keys()
    #else:
      #self._epicsNames = []
    ## *** stop EpicsPV *** #
    pointDet = self.cnf["pointDet"]
    for (mnemonic,name) in pointDet.iteritems():
      if (mnemonic.find("nops")>-1) and (mnemonic.find("*")>-1):
        continue
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      nameData = name["data"].replace("*","\S+")
      detDataset = [x for x in h5names if (re.search(nameData,x) is not None)]
      nameConf = name["conf"].replace("*","\S+")
      try:
        detConf    = [x for x in h5confs if (re.search(nameConf,x) is not None)]
      except:
              detConf=[]
      data = [x for x in detDataset if x[-5:]=="/data" or x[-8:]=="/evrData" or x[-13:]=="/channelValue"]
      time = [x for x in detDataset if x[-5:]=="/time"]
      if ( (len(data) != 0) and (len(time) != 0) ):
        ret[mnemonic] = {}
        #ret[mnemonic]["data"] = data[0].replace('CalibCycle:0000','CalibCycle:%04d')
        #ret[mnemonic]["time"] = time[0].replace('CalibCycle:0000','CalibCycle:%04d')
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        if len(detConf)>0:
          ret[mnemonic]["conf"] = detConf[0]
    self._pointDetPaths = ret
    self.pointDetNames = ret.keys()



    areaDet = self.cnf["areaDet"]
    ret = {}
    # 3D detectors need special care because data are written differently 
    # /data, /image, /waveform
    for (mnemonic,name) in areaDet.iteritems():
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      name = name["data"].replace("*","\S+")
      name_nodata = "/".join(name.split("/")[0:-1])
      detDataset = [x for x in h5names if (re.search(name_nodata,x) is not None)]
      conf = [ ]
      data = [x for x in detDataset if (re.search(name,x) is not None)]
      time = [x for x in detDataset if x[-5:]=="/time"]
      #raise NotImplementedError('Use the source, luke!')
      if ( (len(data) != 0) and (len(time) !=0) ):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        ret[mnemonic]["conf"] = conf
    self._areaDetPaths = ret
    self.areaDetNames = ret.keys()
    self._detectorsPaths = tools.dictMerge(self._pointDetPaths,self._areaDetPaths)
    self.detectorsNames = self.pointDetNames + self.areaDetNames
    # *** start scan variables *** #
    logbook("Finding scan variables in hdf5 file ...",end="")
    temp = dict()
    if (len(self.cnf["scan_step"])>0):
      for scan_var in self.cnf["scan_step"]:
        mne,reg = scan_var
        reg  = reg.replace("*","\S+") 
        data = [x for x in h5names if (re.search(reg,x) is not None)]
        if len(data)>1:
          logbook("\nWarning: in lclsH5:findDatasets, multiple path matching regex, using only first",reg)
          logbook("Paths are:",data)
        path = replaceCalibCycleString(data[0])
        obj = scanVar(self.fileHandles,mne,path)
        temp[mne] = obj
    self.scanVars = temp
    names_to_displ = [ x.name for x in temp.values() \
      if hasattr(x,"name")]
    names_to_displ = ",".join(names_to_displ)
    logbook(" ... done, scanVar found:",names_to_displ, \
      time=False)
    # *** stop scan variables *** #
    return