Exemplo n.º 1
0
  def printGeomHkl(self,hkl,crystal=None):
    phi,phiE = self.getPhiRotationhkl(hkl,crystal)
    hklstr = '[%s]' % ', '.join(map(str, hkl))
    logbook('Reflection %s at phi = %f deg from preset orientation' % (hklstr,phi+phiE))
    logbook('  phiE = %f deg;  phi = %f deg' % (phiE,phi))

    self.getRobotAnglesHkl(hkl,crystal=None)
Exemplo n.º 2
0
def TTextractFilterPositions(Areadet,filtsettings=None,polysettings=None):
  if not filtsettings:
    filtsettings = Areadet.TTfiltsettings
  #if not polysettings:
    #polysettings = dict(rpts=100,cpts=20)
  pos = []
  amp = []
  fwhm = []
  if polysettings:
    poly_pos = []
    poly_cen = []
  ccN = 0
  for ds in Areadet.TTtraces:
    logbook('...extracting from cc %d'%(ccN))
    data = ds[:]
    if polysettings:
      tpos,tamp,tppos,tpcen = TTapplyFilter(data,filtsettings,polysettings=polysettings)
    else:
      tpos,tamp,tfwhm = TTapplyFilter(data,filtsettings)
    pos.append(tpos)
    amp.append(tamp)
    fwhm.append(tfwhm)
    if polysettings:
      poly_pos.append(tppos)
      poly_cen.append(tpcen)
    ccN+=1
  Areadet._add_saved_datafield('TTfiltPos',pos)
  Areadet._add_saved_datafield('TTfiltAmp',amp)
  Areadet._add_saved_datafield('TTfiltFwhm',fwhm)
  if polysettings:
    Areadet._add_saved_datafield('TTfiltPolyPos',poly_pos)
    Areadet._add_saved_datafield('TTfiltPolyCen',poly_cen)
Exemplo n.º 3
0
  def _checkNcalib(self):
    numOfScanStepsFile = []
    path = tools.commonPathPrefix(self._paths["data"])
    path = getPath(path)
    for h in self._h5s:
      n=0
      while( tH5.datasetExists(h,path % n) ):
        n+=1
      numOfScanStepsFile.append(n)
    nccs = []
    for h in self._h5s:
      n=0
      while( tH5.datasetExists(h,'/Configure:0000/Run:0000/CalibCycle:%04d/' % n) ):
        n+=1
      nccs.append(n)
    self._numOfScanStepsFile = []
    for n in range(len(self._h5s)):
      if nccs[n] > numOfScanStepsFile[n]:
        logbook("More calibcycle structures than detectors, will lead to empty detector steps...")
        self._numOfScanStepsFile.append(nccs[n])
      else:
        self._numOfScanStepsFile.append(numOfScanStepsFile[n])

    
    return self._numOfScanStepsFile
Exemplo n.º 4
0
 def getMask(self,mask="all"):
         if (mask =="all"):
                 return self.mask
         elif (mask in self._masks):
                 return self._masks[mask]
         else:
                 logbook("mask %s not present, returning None" % mask)
                 return None
Exemplo n.º 5
0
  def getRefIntensity(self,imagemask=None):
    self.Iref = self.refDataFilter * nansum(self.data)
    fina = 'tmp_getRefIntensity_' + \
	  datetime.datetime.now().isoformat() + '.ixp.h5'
    logbook(fina)
    self.Iref.setFile(fina)
    self.Iref.evaluate()
    self.Iref = self.Iref.get_memdata()[0]
    os.remove(fina)
Exemplo n.º 6
0
def readReferenceToRunNumber(searchstring,runno,comparator='le',wildcard='*'):
  comparator = operator.__dict__[comparator]
  filelist,numbers = getFileNumberlist(searchstring,wildcard=wildcard)
  nums = np.asarray(numbers)
  nums = nums[comparator(nums,runno).nonzero()[0]]
  if len(nums)==0:
    logbook("Warning: Could not find reference run, will try to read closest runnumber instead!")
    nums = np.asarray(numbers)
  refrunno = np.min(np.abs(nums-runno))
  idx = numbers.index(refrunno)
  return readDataFile(filelist[idx])
Exemplo n.º 7
0
def addScanVecToSingleShotReadings(scanv,tt):
  """ tt must be either a list of vectors or a matrix; now it
  is not needed as mem data natively supports that:
  d.timeTool.pos + d.scan.lxt"""
  logbook("this funciton is obsolete: please use d.timeTool.pos + d.scan.lxt")
  if isinstance(tt,list):
    return [scanv[i]+tt[i][:] for i in range(len(scanv))]
  elif (tt.shape[0] == len(scanv)):
    return [scanv[i]+tt[i,:] for i in range(len(scanv))]
  elif (tt.shape[1] == len(scanv)):
    return [scanv[i]+tt[:,i] for i in range(len(scanv))]
Exemplo n.º 8
0
  def _getMonitor(self): 
    if self._monitor is None:
      try:
        self._monitor = self.res[self._dsNamePrefix+'_nansum_i0']
      except:
        self.res[self._dsNamePrefix+'_nansum_raw'] = nansum(self.data)
        logbook("Extracting I0 from radial profile...")
        self.res[self._dsNamePrefix+'_nansum_raw'].evaluate()
        self.res[self._dsNamePrefix+'_nansum_i0'] = \
	    self.res[self._dsNamePrefix+'_nansum_raw'].get_memdata()[0]
        self._monitor = self.res[self._dsNamePrefix+'_nansum_i0']
    return self._monitor
Exemplo n.º 9
0
def process(d):
  if hasattr(d,'evrBool'):
    try:
      d['eventCodeBool'] = [te==1 for te in d.evrBool.data.get_memdata()]
    except:
      logbook("Post process unpacking evrBool data did not succeed for some reason!!!")

  if hasattr(d,'adc'):
    try:
      d['adcV'] = d.evrBool.data.get_memdata()
    except:
      logbook("Post process unpacking evrBool data did not succeed for some reason!!!")
Exemplo n.º 10
0
 def findDetectors(self,detectors=None,exclude=None): 
   # Detectors
   # TODO
   # strategy in 2 paths:
   # (1) detectors are given as alias --> try to read with cached datasets/adresses --> if fails use dataset finder ONLY for defined aliases.
   # (2) no detectors are given --> find all detectors for all aliases and get dataset name if alias not existing.
   #
   if (detectors != "parse"):
     # _findDetectors tries to match datasets found in files with mnemonic given in config
     t0 = time.time()
     logbook("Finding data in hdf5 file ...",end="")
     self.findDatasets(detectors=detectors,exclude=exclude)
     logbook(" ... done (%.1f) ms" % ((time.time()-t0)*1e3),time=False)
   else:
     logbook("Starting to look in the file")
     # parsing look in the file for dataset ...
     # use first (data or cached) file to find detectors to use
     h = self.fileHandles[0]
     try:
       cnf = parseToCnf(h)
       self.cnf = tools.dictMerge(self.cnf,cnf)
       self.areaDet  = cnf['areaDet'].keys()
       self.pointDet = cnf['pointDet'].keys()
       self.detectors = cnf['areaDet'].keys()+cnf['pointDet'].keys()
     except KeyError:
       logbook("Failed to find detectors in ", h.filename)
Exemplo n.º 11
0
def strucArrayToObj(data):
  """
  Transform a structured array as class
  x = np.zeros(3, dtype=[('x','f4'),('y',np.float32),('value','f4',(2,2))])
  A=strucArrayToObj(x)
  print A.value
  """
  c = dropObject()
  if data[0].dtype.names is not None:
    for fieldname in data[0].dtype.names:
      c._add(fieldname,data[fieldname])
  else:
    logbook("No clue on how to make an object out handle ",data)
  return c
Exemplo n.º 12
0
def openOrCreateFile(fname,mode="r",driver=None):
  if (os.path.isfile(fname)):
    if (mode == "r"):
      if not os.access(fname,os.R_OK):
        raise IOError("Asked to read %s but it is not possible, check permissions" % fname)
        return None
    elif (mode=="r+") or (mode=="a") or (mode=="w"):
      if not os.access(fname,os.W_OK):
        raise IOError("Asked to read/write %s but it is not possible, check permissions" % fname)
        return None
    h5handle=h5py.File(fname,mode,driver=driver)
    logbook("File %s exists already, opening in %s mode" % (fname,mode))
  else:
    logbook("File %s does not exists, creating it" % (fname))
    h5handle=h5py.File(fname,"w")
  return h5handle
Exemplo n.º 13
0
  def getI0Imat(self,bins=None,evaluate=False):
    digi = (self.refDataFilter*self.Iref).digitize(bins=bins)
    self.I0 = digi.scan.bincenters
    self.Imat = digi.ones()*self.data
    if evaluate:
      fina = 'tmp_getImat_' \
	  + datetime.datetime.now().isoformat() + '.ixp.h5'
      logbook(fina)
      self.Imat.setFile(fina)
      self.Imat.evaluate()
      self.Imat = np.asarray(self.Imat.mean())
      os.remove(fina)
    else:
      self.Imat = np.asarray(self.Imat.mean())
      if self.dataset is not None:
	self.dataset['corrNonLin_Imat'] = self.Imat
	self.dataset['corrNonLin_I0'] = self.I0
	self.dataset.save()
Exemplo n.º 14
0
def TTteachFilter(profiles,):
  from scipy.linalg import toeplitz
  nfh = tools.nfigure('Digital filter design: Select signal')
  pl.clf()
  p = profiles[2,:]
  pl.plot(p)
  siglim = np.round(tools.getSpanCoordinates('horizontal'))
  #print 'Select step position of step'
  #stepoffs = pl.ginput(1)[0][0]-np.mean(siglim)
  #pl.axvline(stepoffs+np.mean(siglim),color='r')
  ys = p[siglim[0]:siglim[1]]
  ysacl = np.correlate(ys,ys,'same')
  sacl = ysacl

  nfh = tools.nfigure('Digital filter design: Select noise region')
  pl.clf()
  pl.plot(profiles.transpose())
  logbook("select lower limit of noise area (NB: has same width as signal range!)")
  noiselim = pl.ginput(1)
  noiselim = round(noiselim[0][0])+np.array([0,np.diff(np.array(siglim))[0]])
  pl.axvspan(noiselim[0],noiselim[1],facecolor='r',alpha=0.5)
  pl.axvline(noiselim[0],color='r')
  pl.axvline(noiselim[1],color='r')
  logbook(noiselim)
  nacl = []
  for p in profiles:
    yn = p[noiselim[0]:noiselim[1]]
    ynacl = np.correlate(yn,yn,'same')
    nacl.append(ynacl)
  nacl = np.mean(np.vstack(nacl),axis=0)

  Ynacl = toeplitz(nacl,r=np.zeros(len(nacl)))
  R  = np.matrix(Ynacl).I
  Rs = np.matrix(sacl)

  weights = R*Rs.transpose()
  weights = np.array(weights).ravel()
  weights = weights-np.median(weights)


  filtsettings = dict(weights=np.array(weights),
                      #stepoffs=stepoffs,
                      noise_limits=noiselim)
  return filtsettings
Exemplo n.º 15
0
 def _checkCalibConsistency(self):
   import itertools
   Nc = []
   # upon initialization each detector check for Ncalib
   tocheck = list(itertools.chain( self.detectors.values(),self._scanVars))
   Nc = []
   for d in tocheck:
     Nc.append(d._numOfScanSteps)
   Nc = np.array(Nc)
   NcMin = Nc.min(axis=0)
   NcMax = Nc.max(axis=0)
   for d in tocheck:
     # print out most limiting detector
     if (list(NcMin) == list(d._numOfScanSteps)) and (list(NcMin)!=list(NcMax)):
       logbook("WARNING: Detector/Scan ",d,"is limiting the number of Calybcycle to",str(NcMin),"instead of ",str(NcMax))
     d._numOfScanSteps = list(NcMin)
   self.numOfScanSteps = list(NcMin)
   if len(NcMin) ==1:
     self.numOfScanSteps = self.numOfScanSteps[0]
Exemplo n.º 16
0
def readDataFile(fina,fieldname=None):
  name,extension = os.path.splitext(fina)
  if extension=='.m':
    if fieldname is not None:
      return loadmat(fina)[fieldname]
    else:
      return loadmat(fina)
  elif extension=='.npy':
    return np.load(fina)
  elif extension=='.h5':
    sname,sext = os.path.splitext(name)
    if sext=='.ixp':
      return ixppy.dataset(fina)
    return h5py.File(fina)
  else:
    try:
      return np.loadtxt(fina)
    except Exception(e):
      logbook("could not read that file. \nError ---->\n ")
      logbook(e)
Exemplo n.º 17
0
 def _isallowed(self,hkl):
   if self.packing is 'fcc':
     if not isodd(sum(hkl)):
       isallowed = True
     else:
       isallowed = False
   if self.packing is 'bcc':
     if isodd(hkl[0])==isodd(hkl[1])==isodd(hkl[2]):
       isallowed = True
     else:
       isallowed = False
   if self.packing is 'diamond':
     if (isodd(hkl[0])==isodd(hkl[1])==isodd(hkl[2])) or (sum(hkl)/4.).is_integer():
       isallowed = True
     else:
       isallowed = False
   if self.packing is 'cubic':
       isallowed = True
   else:
     logbook("crystal structure not implemented (yet)")
Exemplo n.º 18
0
def gamdel2Qfib(gamma,delta,alpha,lam):
  gamma = np.array(iterfy(gamma))
  delta = np.array(iterfy(delta))

  shpgam = np.shape(gamma)
  shpdel = np.shape(delta)
  if not shpgam==shpdel:
    logbook("gamma and delta array must have same shape!")
    return
  gamma = gamma.ravel()
  delta = delta.ravel()
  Qs =  2*np.pi/lam * np.array((-rotmat3D([0,1,0],-alpha))*np.mat([
    np.cos(delta)*np.cos(gamma)-1,
    -np.cos(delta)*np.sin(gamma),
    -np.sin(delta)]))
  Qip = np.sign(Qs[1,:])*np.sqrt(Qs[0,:]**2+Qs[1,:]**2);
  Qop = Qs[2,:]
  Qip = Qip.reshape(shpgam)
  Qop = Qop.reshape(shpgam)
  return Qip,Qop
Exemplo n.º 19
0
def corrNonlinGetPar(data,correct,order=2,data_0=0,correct_0=0,
    displayWarning=True,plot=False):
  """ Find parameters for non linear correction
    *data* should be an 1D array (use .ravel() in case) of the
    detectors that is suspected to be non linear
    *correct* is the detector that is sussposed to be linear
    *data_0" is an offset to use for the data (used only if plotting"
    *correct_0* offset of the "linear detector"""
  # poor man wrapping :D #
  try:
    data = data.ravel()
  except AttributeError:
    pass
  try:
    correct = correct.ravel()
  except AttributeError:
    pass
  p =  np.polyfit(data,correct,order)
  if order>=2 and p[-3]<0:
    logbook("corrNonlinGetPar: consistency problem, second order coefficient should \
    be > 0, please double check result (plot=True) or try inverting the data and the\
    correct arguments",level=2,func="toolsDetectors.corrNonlinGetPar")
  p[-1] = p[-1]-correct_0
  if plot:
    d = corrNonlin(data,p,data_0=data_0,correct_0=correct_0)
    plt.plot(correct,data,".",label="before correction")
    plt.plot(correct,d,".",label="after correction")
    poly_lin = np.polyfit(correct,d,1)
    xmin = min(correct.min(),0)
    xtemp = np.asarray( (xmin,correct.max()) )
    plt.plot(xtemp,np.polyval(poly_lin,xtemp),
       label="linear fit")
    plt.plot(correct,d-np.polyval(poly_lin,correct),
       ".",label="difference after-linear")
    plt.xlabel("correct")
    plt.ylabel("data")
    plt.legend()
  return p
Exemplo n.º 20
0
def applyFilter(data,filtsettings,plotOutput=False,polysettings=None,erfsettings=None,saveplots=False,kind="stepUp"):
  weights = np.array(filtsettings['weights']).ravel()
  #stepoffs = filtsettings['stepoffs'] 
  lf = len(weights)
  halfrange = round(lf/10)
  pos = []
  amp = []
  fwhm = []
  runningno = 0
  for d in data:
    f0 = np.convolve(np.array(weights).ravel(),d,'same')
    f = f0[lf/2:len(f0)-lf/2-1]
    if (kind=="stepUp"):
      mpr = f.argmax()
    else:
      mpr = f.argmin()
    # now do a parabolic fit around the max
    xd = np.arange(max(0,mpr-halfrange),min(mpr+halfrange,len(f)-1))
    yd = f[max(0,mpr-halfrange):min(mpr+halfrange,len(f)-1)]
    p2 = np.polyfit(xd,yd,2)
    tpos = -p2[1]/2./p2[0]
    tamp = np.polyval(p2,tpos)
    try:
      beloh = (f<tamp/2).nonzero()[0]-mpr
      tfwhm = abs(beloh[beloh<0][-1]-beloh[beloh>0][0])
    except:
      logbook("FWHM not applied",level=0)
      tfwhm = np.nan
    pos.append(tpos)
    amp.append(tamp)
    fwhm.append(tfwhm)
    runningno+=1
  pos  = np.asarray(pos) + lf/2.
  amp  = np.asarray(amp)
  fwhm = np.asarray(fwhm)
  returntuple = [pos,amp,fwhm]
  return tuple(returntuple)
Exemplo n.º 21
0
def histogramSmart(x,fac=20.,include=-1,remove=0,maxints=1000000):
  lims0 = np.percentile(x,[20,80])
  ind = (x>lims0[0])&(x<lims0[1])
  interval = np.diff(lims0)/np.round(sum(ind)/fac)
  include = iterfy(include)
  remove = iterfy(remove)
  if sum(include)>0:
    med = np.median(x)
    if len(include)==1:
      include = np.abs(include[0])
      hmn,hmx = np.percentile(x,[50-include,50+include])
    elif len(include)==2:
      hmn,hmx = np.percentile(x,[50-include[0],50+include[1]])
  elif sum(remove)>0:
    med = np.median(x)
    if len(remove)==1:
      remove = np.abs(remove[0])
      hmn,hmx = np.percentile(x,[remove,100-remove])
    elif len(remove)==2:
      hmn,hmx = np.percentile(x,[remove[0],100-remove[1]])
  else:
    hmn,hmx = (np.min(x),np.max(x))

  xd = np.diff(x[ind])
  xd = xd[xd>0]
  xdmn = np.min(xd)
  if xdmn>interval:
    interval = xdmn
    
  if (hmx-hmn)/interval>maxints:
    logbook("Warning: the assigned binwidth %g leads to more bins than assigned in maxint (%g)." %(interval,maxints),level=2)
    interval = (hmx-hmn)/maxints
    logbook("binwidth is set to %g." %(interval))
  edges = np.arange(hmn,hmx,interval)
  h,dum = np.histogram(x,bins=edges)
  return h,edges
Exemplo n.º 22
0
  def _initPointDet(self):
    if self._isPointDet and self._useMemoryCache:
      if len(self._paths['data'])>1:
        if len(self._paths['data'])>50:
          # this seems the epics case
          logbook("crazy amount of point counters in %s, force full reading initialization with..." %(self.name))
        else:
        # this might be the tt case
          for datapath,timepath in zip(self._paths['data'],self._paths['time']):
            datapath = getPath(datapath)
            timepath = getPath(timepath)
            name = getDetNamefromPath(datapath)
            dat,fields = self._readPointDataGeneral(datapath)
            times = self._readTime(timepath)
            if not fields==[] and not hasattr(self,'fields'):
              self.fields = dict()
            for field,tdat in zip(fields,dat):
              tname = name+'_'+field
              data = [tdat,times]
              self.fields[tname] = data 
        	#memdata(tname,data)
      else:
         # this might be the ipm case
        datapath = self._paths['data'][0]
        timepath = self._paths['time'][0]
        datapath = getPath(datapath)
        timepath = getPath(timepath)
        dat,fields = self._readPointDataGeneral(datapath)
        times = self._readTime(timepath)

        if not fields==[]:
          self.fields = dict()
          for field,tdat in zip(fields,dat):
            tname = field
            data = [tdat,times]
            self.fields[tname] = data 
Exemplo n.º 23
0
  def initDetectors(self,detectors=None):
    detectors = self.detectorsNames
    # define detectors
    self.detectors = {}

    # START POINT DETECTORS
    t0 = time.time()
    logbook("defining pointDet (with memory cache) ...",end="")
    #if (rdPointDetectorsImmediately):
      #print " (pre-reading all) ",
    for dname in self.pointDetNames:
      #TODO: here dtector dependent modules from a folder are to be used in special cases, like a plugin. Not working because of importing issues. Commented out for now.
      if dname in pluginNames:
        tdclass = eval('detector_'+dname)
      else:
        tdclass = detector
      det = tdclass(self.fileHandles,dname,self._detectorsPaths[dname],useMemoryCache=True,isPointDet=True)
      self.detectors[dname] =det

      #if (rdPointDetectorsImmediately):
        #for i in range(len(self.fileHandles)):
          #det.readData(stepSlice=range(det._numOfScanSteps[i]),fileSlice=i)
      #tools.addToObj( self,dname,det )

    logbook(" ... done (%.1f) ms, %d detectors" % 
      ((time.time()-t0)*1e3,len(self.pointDetNames)),time=False)
    
    # DONE POINT DETECTORS

    # START AREA DETECTORS
    t0 = time.time()
    logbook("defining areaDet (without memory cache) ...",end="")
    for dname in self.areaDetNames:
      det = detector(self.fileHandles,dname,self._detectorsPaths[dname],useMemoryCache=False)
      self.detectors[dname] =det
      #tools.addToObj( self,dname,det )
    logbook(" ... done (%.1f) ms, %d detectors" %
      ((time.time()-t0)*1e3,len(self.areaDetNames)),time=False)
Exemplo n.º 24
0
 def __init__(self):
   logbook("This funciton is obsolete, please use: corrNonlinGetPar and corrNonlin")
   pass
Exemplo n.º 25
0
def histogram2dSmart(x,y,fac=400,include=-1,remove=0,maxints=500):
  limsx0 = matplotlib.mlab.prctile(x,p=(20,80))
  limsy0 = matplotlib.mlab.prctile(y,p=(20,80))
  indx = (x>limsx0[0])&(x<limsx0[1])
  indy = (y>limsy0[0])&(y<limsy0[1])
  intervalx = np.diff(limsx0)/np.round(sum(indx)/fac)
  intervaly = np.diff(limsy0)/np.round(sum(indy)/fac)
  include = iterfy(include)
  remove = iterfy(remove)
  if sum(include)>0:
    medx = np.median(x)
    medy = np.median(y)
    if len(include)==1:
      includesingle = np.abs(include[0])
      xhmn,xhmx = np.percentile(x,[50-includesingle,50+includesingle])
      yhmn,yhmx = np.percentile(y,[50-includesingle,50+includesingle])
    elif len(include)==2:
      if len(include[0])==1:
        xhmn,xhmx = np.percentile(x,[50-include[0][0],50+include[0][0]])
      elif len(include[0])==2:
        xhmn,xhmx = np.percentile(x,[50-include[0][0],50+include[0][1]])
      if len(include[1])==1:
        yhmn,yhmx = np.percentile(y,[50-include[1][0],50+include[1][0]])
      elif len(include[0])==2:
        yhmn,yhmx = np.percentile(x,[50-include[1][0],50+include[1][1]])
  else:
    xhmn,xhmx = np.min(x),np.max(x)
    yhmn,yhmx = np.min(y),np.max(y)


  if sum(remove)>0:
    medx = np.median(x)
    medy = np.median(y)
    if len(remove)==1:
      removesingle = np.abs(remove[0])
      xhmn,xhmx = np.percentile(x,[removesingle,100-removesingle])
      yhmn,yhmx = np.percentile(y,[removesingle,100-removesingle])
      logbook("here",removesingle)
    elif len(remove)==2:
      remove[0] = iterfy(remove[0])
      remove[1] = iterfy(remove[1])
      if len(remove[0])==1:
        xhmn,xhmx = np.percentile(x,[remove[0][0],100-remove[0][0]])
      elif len(remove[0])==2:
        xhmn,xhmx = np.percentile(x,[remove[0][0],100-remove[0][1]])
      if len(remove[1])==1:
        yhmn,yhmx = np.percentile(y,[remove[1][0],100-remove[1][0]])
      elif len(remove[0])==2:
        yhmn,yhmx = np.percentile(x,[remove[1][0],100-remove[1][1]])
  else:
    xhmn,xhmx = np.min(x),np.max(x)
    yhmn,yhmx = np.min(y),np.max(y)
  #elif sum(remove)>0:
    #med = np.median(x)
    #if len(remove)==1:
      #remove = np.abs(remove[0])
      #hmn,hmx = np.percentile(x,[remove,100-remove])
    #elif len(remove)==2:
      #hmn,hmx = np.percentile(x,[remove[0],100-remove[1]])
  #else:
    #hmn,hmx = (np.min(x),np.max(x))include=-1,remove=0,maxints=1000000
  xd = np.diff(x[indx])
  xd = xd[xd>0]
  xdmn = np.min(xd)
  if xdmn>intervalx:
    intervalx = xdmn
  yd = np.diff(y[indy])
  yd = yd[yd>0]
  ydmn = np.min(yd)
  if ydmn>intervaly:
    intervaly = ydmn
  if (xhmx-xhmn)/intervalx>maxints:
    logbook("Warning: the assigned x binwidth %g leads to more bins than assigned in maxint (%g)." %(intervalx,maxints),level=2)
    intervalx = (xhmx-xhmn)/maxints
    logbook("binwidth is set to %g." %(intervalx),level=2)
  if (yhmx-yhmn)/intervaly>maxints:
    logbook("Warning: the assigned y binwidth %g leads to more bins than assigned in maxint (%g)." %(intervaly,maxints),level=2)
    intervaly = (yhmx-yhmn)/maxints
    logbook("binwidth is set to %g." %(intervaly),level=2)
  edgesx = np.arange(xhmn,xhmx,intervalx)
  edgesy = np.arange(yhmn,yhmx,intervaly)
  h,dumx,dumy = np.histogram2d(x,y,[edgesx,edgesy])
  h = h.transpose()
  return h,edgesx,edgesy
Exemplo n.º 26
0
def extractFromRunList(runlist,exp,datasetname='opal2',profileLimits=None,xrayoffCode=None,laseroffCode=None,filter=None,save=False):
  for run in runlist:
    d = ixppy.dataset((exp,run))
    logbook("TT extracting from run %d" %run)
    extractFromRun(d,datasetname=datasetname,profileLimits=profileLimits,xrayoffCode=xrayoffCode,laseroffCode=laseroffCode,filter=filter,save=save)
    logbook("done!")
Exemplo n.º 27
0
  def findDatasets(self,detectors=None,exclude=None):
    """finds datasets from a cnf that contains aliases, if no aliases are defined the file is parsed and the hdf5 names are returned as names.
    
    Finds detectors in hdf5 file matching with mnemonic given in config file;
    the matching mnemonic names are as dictionaries (self.pointDet and self.areaDet)
    The 
    """
    subSelection = detectors
    if (subSelection==[]) or (subSelection is None):
      subSelection = self.cnf["pointDet"].keys() + self.cnf["areaDet"].keys()

    if exclude is not None:
      exclude = tools.iterfy(exclude)
      for tex in exclude:
        while True:
          try:
            subSelection.remove(tex)
            continue
          except:
            break
    h = self.fileHandles[0]
   
    # Getting all Detector path strings in CCs and config
    try:
      # try to use only CalibCycle0
      # bad for MEC as some calib cycles don't contain amything... look for longest dataset for now, later look in all

      base = "Configure:0000/Run:0000/"
      bases = h[base].keys()
      lens = np.array([len(h[base][key].keys()) for key in bases])
      base = base + bases[lens.argmax()] +'/'
      h5names = tH5.getDataset_hack(h[base])
      #h5names = [base+x for x in h5names]
      # find all confs
      base = "Configure:0000/"
      confs = h[base].keys()
      h5confs = []
      for c in confs:
        if (c.find("Run")==0):
          continue
        else:
          temp = tH5.getDataset(h[base][c])
          for t in temp:
            h5confs.append(base+c+"/"+t)
    except KeyError:
      h5names = tH5.getDataset(h)

    

    #raise NotImplementedError('Use the source, luke!')
    ret = {}
    ## *** start EpicsPV *** #
    ## look for epics name
    #epicsFound=False
    #if ("epics_dset" in self.cnf):
      #epicsMne = self.cnf["epics_dset"][0]
      #epicsReg = self.cnf["epics_dset"][1]
      #epicsH5Names=[x for x in h5names if (x.find(epicsReg)>-1)]
      ## common Epics path:
      #ntemp = min([len(x.split("/")) for x in epicsH5Names])
      #epicsCommon = "/".join(epicsH5Names[0].split("/")[0:ntemp])
      ## epics var
      #self._epicsPaths = {}
      #for d in h[epicsCommon]:
        #dpath = d
        #d = d.replace(':','_')
        #d = d.replace('-','_')
        #d = d.replace(' ','_')
        #d = d.replace('.','_')
        #mne = "%s.%s" % (epicsMne.split("/")[0],d)
        #self._epicsPaths[mne]={}
        #self._epicsPaths[mne]["data"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/data"
        #self._epicsPaths[mne]["time"] = epicsCommon.replace('CalibCycle:0000','CalibCycle:%04d')+"/"+dpath+"/time"
        #self._epicsPaths[mne]["conf"] = []
      #self._epicsNames = self._epicsPaths.keys()
    #else:
      #self._epicsNames = []
    ## *** stop EpicsPV *** #
    pointDet = self.cnf["pointDet"]
    for (mnemonic,name) in pointDet.iteritems():
      if (mnemonic.find("nops")>-1) and (mnemonic.find("*")>-1):
        continue
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      nameData = name["data"].replace("*","\S+")
      detDataset = [x for x in h5names if (re.search(nameData,x) is not None)]
      nameConf = name["conf"].replace("*","\S+")
      try:
        detConf    = [x for x in h5confs if (re.search(nameConf,x) is not None)]
      except:
              detConf=[]
      data = [x for x in detDataset if x[-5:]=="/data" or x[-8:]=="/evrData" or x[-13:]=="/channelValue"]
      time = [x for x in detDataset if x[-5:]=="/time"]
      if ( (len(data) != 0) and (len(time) != 0) ):
        ret[mnemonic] = {}
        #ret[mnemonic]["data"] = data[0].replace('CalibCycle:0000','CalibCycle:%04d')
        #ret[mnemonic]["time"] = time[0].replace('CalibCycle:0000','CalibCycle:%04d')
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        if len(detConf)>0:
          ret[mnemonic]["conf"] = detConf[0]
    self._pointDetPaths = ret
    self.pointDetNames = ret.keys()



    areaDet = self.cnf["areaDet"]
    ret = {}
    # 3D detectors need special care because data are written differently 
    # /data, /image, /waveform
    for (mnemonic,name) in areaDet.iteritems():
      mnemonic = mnemonic.split('_bak')[0]
      # skip if not in the group we want to read
      if mnemonic not in subSelection:
        continue
      name = name["data"].replace("*","\S+")
      name_nodata = "/".join(name.split("/")[0:-1])
      detDataset = [x for x in h5names if (re.search(name_nodata,x) is not None)]
      conf = [ ]
      data = [x for x in detDataset if (re.search(name,x) is not None)]
      time = [x for x in detDataset if x[-5:]=="/time"]
      #raise NotImplementedError('Use the source, luke!')
      if ( (len(data) != 0) and (len(time) !=0) ):
        ret[mnemonic] = {}
        ret[mnemonic]["data"] = [replaceCalibCycleString(tdat) for tdat in data]
        ret[mnemonic]["time"] = [replaceCalibCycleString(ttim) for ttim in time]
        ret[mnemonic]["conf"] = conf
    self._areaDetPaths = ret
    self.areaDetNames = ret.keys()
    self._detectorsPaths = tools.dictMerge(self._pointDetPaths,self._areaDetPaths)
    self.detectorsNames = self.pointDetNames + self.areaDetNames
    # *** start scan variables *** #
    logbook("Finding scan variables in hdf5 file ...",end="")
    temp = dict()
    if (len(self.cnf["scan_step"])>0):
      for scan_var in self.cnf["scan_step"]:
        mne,reg = scan_var
        reg  = reg.replace("*","\S+") 
        data = [x for x in h5names if (re.search(reg,x) is not None)]
        if len(data)>1:
          logbook("\nWarning: in lclsH5:findDatasets, multiple path matching regex, using only first",reg)
          logbook("Paths are:",data)
        path = replaceCalibCycleString(data[0])
        obj = scanVar(self.fileHandles,mne,path)
        temp[mne] = obj
    self.scanVars = temp
    names_to_displ = [ x.name for x in temp.values() \
      if hasattr(x,"name")]
    names_to_displ = ",".join(names_to_displ)
    logbook(" ... done, scanVar found:",names_to_displ, \
      time=False)
    # *** stop scan variables *** #
    return