def OnSaveHistogram(self, evt): toolkit = self.toolkit # get histogram key = self.focus histogram = self.histograms.get(key) # no histogram, alert if histogram is None: toolkit.messageDialog(None, "Error", "No histogram is on focus") return # save filetypes = 'h5' filename = toolkit.savefileDialog( None, "Save histogram to file", filetypes) if filename: import os if os.path.exists(filename): toolkit.messageDialog( None, "Error", "Overwrite is not supported yet.") return import histogram.hdf as hh hh.dump(histogram, filename, '/', 'c') return
def computeFocusedSpectraForRealMonitors(E, m2sout, out): from mcni.utils.conversion import e2v v = e2v(E) from pyre.units.time import second import histogram.hdf as hh, histogram as H m1 = hh.load(os.path.join(m2sout, "mon1-tof.h5"), "I(tof)") L1 = 11.831 t1 = L1 / v # * second m1p = m1[(t1 * 0.9, t1 * 1.1)] m1pc = H.histogram("I(tof)", m1p.axes(), data=m1p.I, errors=m1p.E2) m1pc.setAttribute("title", "Monitor 1 I(tof)") hh.dump(m1pc, os.path.join(out, "mon1-itof-focused.h5"), "/", "c") m2 = hh.load(os.path.join(m2sout, "mon2-tof.h5"), "I(tof)") L2 = 18.5 t2 = L2 / v # * second m2p = m2[(t2 * 0.9, t2 * 1.1)] m2pc = H.histogram("I(tof)", m2p.axes(), data=m2p.I, errors=m2p.E2) m2pc.setAttribute("title", "Monitor 2 I(tof)") hh.dump(m2pc, os.path.join(out, "mon2-itof-focused.h5"), "/", "c") return
def testdump2(self): 'dump two histograms to one hdf' filename = 'testdump1.h5' import os if os.path.exists( filename): os.remove( filename ) from h5py import File fs = File( filename, 'w' ) from histogram import histogram, arange h = histogram('h', [('x', arange(0,100, 1.) ), ('y', arange(100, 180, 1.) ),], unit = 'meter', ) dump( h, None, '/', fs = fs ) h2 = histogram('h2', [('x', arange(0,100, 1.) ), ('y', arange(100, 180, 1.) ),], unit = 'meter', ) dump( h2, None, '/', fs = fs ) #load histogram h2c = load( filename, '/h2', fs = fs ) print h2c self.assert_( os.path.exists( filename ))
def process(path): basename = os.path.basename(path) from nslice.Run import Run run = Run(path) print "instrument=%s, Ei=%s, psi=%s" % ( run.instrument, run.Ei, run.psi) from nslice.XtalOrientation import XtalOrientation a = b = 8.87; c = 5.2 from math import pi twopi = 2*pi ra,rb,rc = [twopi/a, 0,0], [0,twopi/b,0], [0,0,twopi/c] u,v = [1,0,0], [0,1,0] xtal_ori = XtalOrientation(ra,rb,rc, u,v, run.psi) h,k,l,E = run.compute_hklE(xtal_ori) I, error = run.read_data() h.shape = k.shape = l.shape = E.shape = I.shape = error.shape = -1 hklEIE = np.vstack((h,k,l,E,I,error)) from nslice.slice import slice_hE H, edges = slice_hE( hklEIE, k=[0.95,1.05], l=[-1,1], h=(-1, 6, 0.02), E=(-5, 10, 0.1), ) import histogram, histogram.hdf as hh axes = [ histogram.axis('h', boundaries=edges[0]), histogram.axis('E', unit='meV', boundaries=edges[1]), ] h = histogram.histogram('I(h,E)', axes=axes, data=H) hh.dump(h, 'I_hE.h5') return
def run( eventdatafilename, nevents, h5filename, pixparams ): print "eventdatafilename = %s" % eventdatafilename print "nevents = %s" % nevents print "output h5filename = %s" % h5filename print 'pixelID params = %s' % (pixparams, ) if os.path.exists(h5filename): raise IOError, "%s already exists" % h5filename pixbegin, pixend, pixstep = pixparams ipixdat = 'Ipix.%s-%s-%s.dat' % ( eventdatafilename, nevents, pixparams) cmd = 'ipix "%s" %s %s %s %s "%s" ' % ( eventdatafilename, nevents, pixbegin, pixend, pixstep, ipixdat ) if os.system( cmd ): raise "%s failed" % cmd if not os.path.exists( ipixdat ): raise "%s was not created" % ipixdat from arcseventdata.histogramFrom2colascii import convert h = convert( ipixdat, name = "I(pix)", xname = 'pixelID', xunit = '1' ) from histogram.hdf import dump dump(h, h5filename, '/', 'c' ) return
def run( ncount=1e7, nodes=5, Ei=700., E_Q="Q*Q/3", S_Q="1", sigma_Q='Q/2.', Qmin=0, Qmax=10., Qstep=0.1, Emin=0, Emax=50., Estep=1., mod2sample='../mod2sample', ): Ei_user = Ei Ei = computeAverageEnergy() if abs(Ei - Ei_user) / Ei > 0.1: raise ValueError, "nominal energy %s is too different from average energy at sample position %s" % ( Ei_user, Ei) import os incident_neutrons = 'incident-neutrons' if not os.path.exists(incident_neutrons): os.link('../mod2sample/out/neutrons', 'incident-neutrons') # create scattering kernel file createScatteringKernel( E_Q=E_Q, S_Q=S_Q, sigma_Q=sigma_Q, Qmin=Qmin, Qmax=Qmax, ) # run main sim cmd = './sssd --ncount=%s --mpirun.nodes=%s' % (ncount, nodes) execute(cmd) # reduce events to S(Q,E) eventsdat = 'out/events.dat' Qaxis = Qmin, Qmax, Qstep Eaxis = Emin, Emax, Estep Ei, toffset = getEiToffset(mod2sample) iqe = reduceToIQE(eventsdat, Ei, toffset, Qaxis, Eaxis) from histogram.hdf import dump dump(iqe, 'iqe.h5', '/', 'c') global interactive if interactive: from histogram.plotter import defaultPlotter defaultPlotter.plot(iqe) # quick analysis of S(Q,E) (without detector and sample size effects) cmd = ['./analyze-sqe '] cmd.append('--mpirun.nodes=%s' % nodes) cmd.append('--ncount=%s --monitor.Ei=%s' % (ncount, Ei)) cmd.append(' --monitor.Qmin=%s --monitor.Qmax=%s' % (Qmin, Qmax)) cmd.append(' --monitor.Emin=%s --monitor.Emax=%s' % (Emin, Emax)) cmd = ' '.join(cmd) # execute(cmd) return
def testdump2(self): 'dump two histograms to one hdf' filename = 'testdump1.h5' import os if os.path.exists(filename): os.remove(filename) from h5py import File fs = File(filename, 'w') from histogram import histogram, arange h = histogram( 'h', [ ('x', arange(0, 100, 1.)), ('y', arange(100, 180, 1.)), ], unit='meter', ) dump(h, None, '/', fs=fs) h2 = histogram( 'h2', [ ('x', arange(0, 100, 1.)), ('y', arange(100, 180, 1.)), ], unit='meter', ) dump(h2, None, '/', fs=fs) #load histogram h2c = load(filename, '/h2', fs=fs) print(h2c) self.assertTrue(os.path.exists(filename))
def _saveResult(self, res, directory): """save result to the given directory""" from histogram.hdf import dump import os p = os.path.join(directory, self._getHistogramFilename()) dump(res, p, '/', 'c') return
def test2(self): datadir = self.datadir from mcvine.phonon.powderSQE.IDF import from_data_dir import mcvine.phonon.powderSQE.IDF as psidf from mccomponents.sample import phonon as mcphonon doshist = mcphonon.read_dos.dos_fromidf(os.path.join(datadir, 'DOS')).doshist disp = psidf.disp_from_datadir(datadir) IQEhist, mphhist = psidf.from_data_dir( datadir=datadir, disp=disp, N = int(1e6), Q_bins=np.arange(0, 14, 0.1), E_bins=np.arange(0,90,.5), doshist=doshist, T=300., Ei=120., max_det_angle=140., include_multiphonon=True, ) IQEhist = IQEhist + mphhist hh.dump(IQEhist, 'Si-iqe-test2.h5') expected = hh.load(os.path.join(here, 'saved_results/Si-all-phonon-Ei_120-T_300-N_1e6.h5')) max = np.nanmax(expected.I) reldiff = IQEhist-expected reldiff.I/=max; reldiff.E2/=max*max Nbigdiff = (np.abs(reldiff.I)>0.03).sum() Ngood = (IQEhist.I==IQEhist.I).sum() Ntotal = IQEhist.size() self.assertTrue(Ngood*1./Ntotal>.65) self.assertTrue(Nbigdiff*1./Ngood<.10) return
def normalize(self, IQE): 'normalize IQE' # only the master node need to do normalization if self.mpiRank != 0: return #for debug from histogram.hdf import dump filename = 'IQE-nosolidanglenormalization.h5' import os if os.path.exists( filename ): os.remove( filename ) dump( IQE, filename, '/', 'c' ) info.log( 'node %s: convert I(Q,E) datatype from integer to double' % self.mpiRank) from histogram import histogram newIQE = histogram( IQE.name(), IQE.axes() ) newIQE[(), ()] = IQE[(), ()] Ei = self.Ei pixelPositions = self.pixelPositions pixelSolidAngles = self.pixelSolidAngles from arcseventdata.normalize_iqe import normalize_iqe info.log( 'node %s: normalize I(Q,E) by solid angle: Ei=%s, positions.shape=%s, solidangles.shape=%s' % (self.mpiRank, Ei, pixelPositions.shape, pixelSolidAngles.shape)) import time t0 = time.time() normalize_iqe( newIQE, Ei, pixelPositions, pixelSolidAngles ) t1 = time.time() info.log( 'node %s: normalization done: %s seconds' % ( self.mpiRank, t1-t0)) return newIQE
def slice2hist(ifile, ofile): import histogram as H, histogram.hdf as hh from mantid import simpleapi as msa def eliminateUnitDimension(shape): for d in shape: if d>1: yield d return ws = msa.Load(ifile) I = ws.getSignalArray() I.shape = tuple(eliminateUnitDimension(I.shape)) E2 = ws.getErrorSquaredArray() E2.shape = I.shape axes = [] for i in range(ws.getNumDims()): dim = ws.getDimension(i) if dim.getNBins() > 1: axis = H.axis( dim.getName(), unit="1", centers = [dim.getX(ind) for ind in range(dim.getNBins())] ) axes.append(axis) continue h = H.histogram("slice", axes, data=I, errors=E2) hh.dump(h, ofile) return
def saveSQE(Q, E, S, name): h = H.histogram( name, [('Q', Q, 'angstrom**-1'), ('E', E, 'meV')], S) hh.dump(h, '%s.h5' % (name,)) return
def run(scattering_rundir, nodes, ncount=None): # sendneutronstodetsys(scattering_rundir, nodes, ncount=ncount) eventsdat = 'out/events.dat' iqe = reduceToIQE(eventsdat) from histogram.hdf import dump dump(iqe, 'iqe.h5', '/', 'c') from histogram.plotter import defaultPlotter defaultPlotter.plot(iqe) return
def onGridSQE(self, gridsqe): sqehist = gridsqe.sqehist from histogram.hdf import dump filename = 'sqehist.h5' h5path = 'S(Q,E)' dump(sqehist, filename, '/', 'c') self._write('<GridSQE histogram-hdf-path="%s"/>' % '/'.join([filename, h5path])) return
def test(self): "wrap IQE_monitor" from mcstas2 import componentfactory category = 'monitors' componentname = 'IQE_monitor' factory = componentfactory(category, componentname) Qmin = 0 Qmax = 13. nQ = 130 Emin = -50 Emax = 50. nE = 100 component = factory( 'component', Ei=Ei, Qmin=Qmin, Qmax=Qmax, nQ=nQ, Emin=Emin, Emax=Emax, nE=nE, max_angle_out_of_plane=30, min_angle_out_of_plane=-30, max_angle_in_plane=120, min_angle_in_plane=-30, ) scatterer = makeScatterer() import mcni N = 10000 neutrons = mcni.neutron_buffer(N) for i in range(N): neutron = mcni.neutron(r=(0, 0, 0), v=(0, 0, vi), time=0, prob=1) scatterer.scatter(neutron) neutrons[i] = neutron #print neutrons[i] continue component.process(neutrons) hist = get_histogram(component) import os f = os.path.basename(__file__) filename = 'IQE-%s.h5' % f if os.path.exists(filename): os.remove(filename) import histogram.hdf as hh hh.dump(hist, filename, '/', 'c') if self.interactive: from histogram.plotter import defaultPlotter defaultPlotter.plot(hist) return
def saveHistogram(histogram, filename, overwrite=False): if os.path.exists(filename): if overwrite: os.remove( filename ) else: raise IOError, "%s already exists" % filename # from histogram.hdf import dump dump( histogram, filename, '/', 'c') return
def test1(self): Itof = H.histogram("itof", [('tof', np.arange(2000, 3000.), 'microsecond')]) Itof.I[:] = 0 Itof.I[500] = 10 dir = 'tmp.bpptest' if not os.path.exists(dir): os.makedirs(dir) hh.dump(Itof, os.path.join(dir, 'itof.h5')) print((bpp.computeFWHM(dir))) return
def onGridSQE(self, gridsqe): sqehist = gridsqe.sqehist from histogram.hdf import dump filename = 'sqehist.h5' h5path = 'S(Q,E)' dump(sqehist, filename, '/', 'c') self._write( '<GridSQE histogram-hdf-path="%s"/>' % '/'.join( [filename, h5path] ) ) return
def test(self): from idf2histogram.Omega2 import read material = 'fccNi-phonondisp-from-bvk-N20' h = read(material) out = '%s-omega2.h5' % material import os if os.path.exists(out): os.remove(out) import histogram.hdf as hh hh.dump(h, out, '/', 'c') return
def test_load_slice_and_dump(self): 'load a histogram and dump a slice of it' tmpfile = 'test_load_slice_and_dump.h5' if os.path.exists(tmpfile): os.remove(tmpfile) import histogram.hdf as hh h = hh.load('testload.h5', '/h') s = h[(10, 30), ()] hh.dump(s, tmpfile, '/', 'c') return
def test_load_slice_and_dump(self): 'load a histogram and dump a slice of it' tmpfile = 'test_load_slice_and_dump.h5' if os.path.exists( tmpfile ): os.remove( tmpfile ) import histogram.hdf as hh h = hh.load('testload.h5', '/h') s = h[(10, 30), ()] hh.dump(s, tmpfile, '/', 'c') return
def _dumpData(self, dir): h = self.engine.histogram title = self.inventory.title h.setAttribute('title', title) from histogram.hdf import dump f = self._getHistogramFilename() import os f = os.path.join(dir, f) dump(h, f, '/', 'c') return
def test2(self): from idf2histogram.Polarizations import read material = 'fccNi-phonondisp-from-phon-N10' h = read(material) out = '%s-pols.h5' % material import os if os.path.exists(out): os.remove(out) import histogram.hdf as hh hh.dump(h, out, '/', 'c') return
def test3(self): datadir = os.path.join(here, '..', '..', 'data', 'graphite') doshist = hh.load(os.path.join(datadir, 'exp_DOS.h5')) from mcvine.phonon.powderSQE._calc import multiphononSQE IQEhist = multiphononSQE( T=300., # kelvin doshist=doshist, # DOS histogram Q_bins=np.arange(0, 23, 0.1), E_bins=np.arange(0, 250, 1), ) hh.dump(IQEhist, 'graphite-multiephonon-T_300.h5') return
def test_slice_and_dump(self): 'slice a histogram and dump the slice' tmpfile = 'test_slice_and_dump.h5' if os.path.exists(tmpfile): os.remove(tmpfile) from histogram import histogram x = y = list(range(100)) h = histogram('h', [('x', x)], data=y, errors=y) s = h[(3, 10)] import histogram.hdf as hh hh.dump(s, tmpfile, '/', 'c') return
def run( ncount=1e7, nodes=5, Ei=700., E_Q="Q*Q/3", S_Q="1", sigma_Q='Q/2.', Qmin=0, Qmax=10., Qstep=0.1, Emin=0, Emax=50., Estep=1., ): # create scattering kernel file createScatteringKernel( E_Q=E_Q, S_Q=S_Q, sigma_Q=sigma_Q, Qmin=Qmin, Qmax=Qmax, ) # run main sim cmd = './sssd --source.energy=%s --ncount=%s --mpirun.nodes=%s' % ( Ei, ncount, nodes) execute(cmd) # reduce events to S(Q,E) eventsdat = 'out/events.dat' Qaxis = Qmin, Qmax, Qstep Eaxis = Emin, Emax, Estep toffset = 0 iqe = reduceToIQE(eventsdat, Ei, toffset, Qaxis, Eaxis) from histogram.hdf import dump dump(iqe, 'iqe.h5', '/', 'c') global interactive if interactive: from histogram.plotter import defaultPlotter defaultPlotter.plot(iqe) # quick analysis of S(Q,E) (without detector and sample size effects) cmd = ['./analyze-sqe '] cmd.append('--mpirun.nodes=%s' % nodes) cmd.append('--ncount=%s --monitor.Ei=%s' % (ncount, Ei)) cmd.append(' --monitor.Qmin=%s --monitor.Qmax=%s' % (Qmin, Qmax)) cmd.append(' --monitor.Emin=%s --monitor.Emax=%s' % (Emin, Emax)) cmd = ' '.join(cmd) # execute(cmd) return
def main(): from sim_params import instrument, tofparams, eventsdat, Idpt_filename as filename from mccomponents.detector.reduction_utils import readevents events = readevents( eventsdat ) Idpt = events2Idpt( events, instrument, tofparams ) #save to file import os if os.path.exists(filename): os.remove( filename ) import histogram.hdf as hh hh.dump( Idpt, filename, '/', 'c' ) return
def testdump0a(self): """ histogram.hdf: dump with compression""" from histogram import histogram, arange h = histogram('h', [('x', arange(0,100, 1.) ), ('y', arange(100, 180, 1.) ),], unit = 'meter', ) filename = 'test0-compressed.h5' import os if os.path.exists( filename): os.remove( filename ) dump( h, filename, '/', mode = 'c', compression=6 ) return
def main(): from sim_params import instrument, tofparams, eventsdat, Idpt_filename as filename from mccomponents.detector.reduction_utils import readevents events = readevents(eventsdat) Idpt = events2Idpt(events, instrument, tofparams) #save to file import os if os.path.exists(filename): os.remove(filename) import histogram.hdf as hh hh.dump(Idpt, filename, '/', 'c') return
def getSqeHistogramFromMantidWS(reduced, outfile, qaxis=None, eaxis=None): from mantid import simpleapi as msa # if eaxis is not specified, use the data in reduced workspace if eaxis is None: Edim = reduced.getXDimension() emin = Edim.getMinimum() emax = Edim.getMaximum() de = Edim.getX(1) - Edim.getX(0) eaxis = emin, de, emax qmin, dq, qmax = qaxis nq = int(round((qmax - qmin) / dq)) emin, de, emax = eaxis ne = int(round((emax - emin) / de)) md = msa.ConvertToMD( InputWorkspace=reduced, QDimensions='|Q|', dEAnalysisMode='Direct', MinValues="%s,%s" % (qmin, emin), MaxValues="%s,%s" % (qmax, emax), ) binned = msa.BinMD( InputWorkspace=md, AxisAligned=1, AlignedDim0="|Q|,%s,%s,%s" % (qmin, qmax, nq), AlignedDim1="DeltaE,%s,%s,%s" % (emin, emax, ne), ) # convert to histogram import histogram as H, histogram.hdf as hh data = binned.getSignalArray().copy() err2 = binned.getErrorSquaredArray().copy() nev = binned.getNumEventsArray() data /= nev err2 /= (nev * nev) import numpy as np qaxis = H.axis('Q', boundaries=np.arange(qmin, qmax + dq / 2., dq), unit='1./angstrom') eaxis = H.axis('E', boundaries=np.arange(emin, emax + de / 2., de), unit='meV') hist = H.histogram('IQE', (qaxis, eaxis), data=data, errors=err2) if outfile.endswith('.nxs'): import warnings warnings.warn( "reduce function no longer writes iqe.nxs nexus file. it only writes iqe.h5 histogram file" ) outfile = outfile[:-4] + '.h5' hh.dump(hist, outfile) return
def test1(self): "multiphonon.forward.dos2sqe" from dos import loadDOS E, g = loadDOS() Eaxis = H.axis('E', unit='meV', centers=E) doshist = H.histogram('DOS', [Eaxis], g) dE = E[1] - E[0] iqe = hh.load(os.path.join(datadir, 'V-iqe.h5')) from multiphonon.sqe import interp newiqe = interp(iqe, newE=np.arange(iqe.energy[0], 80., dE)) hh.dump(newiqe, 'V-iqe-interped.h5') from multiphonon.forward import dos2sqe sqe = dos2sqe(doshist, 0.01, newiqe, 300, 50.94, 120.) return
def run( ncount=1e7, nodes=5, Ei=700., E_Q="Q*Q/3", S_Q="1", Qmin=0, Qmax=10., Qstep=0.1, Emin=0, Emax=50., Estep=1., mod2sample='../mod2sample', ): Ei_user = Ei Ei = computeAverageEnergy() if abs(Ei-Ei_user)/Ei > 0.1: raise ValueError, "nominal energy %s is too different from average energy at sample position %s" % (Ei_user, Ei) import os incident_neutrons = 'incident-neutrons' if not os.path.exists(incident_neutrons): os.link('../mod2sample/out/neutrons', 'incident-neutrons') # create scattering kernel file createScatteringKernel( E_Q=E_Q, S_Q=S_Q, Qmin=Qmin, Qmax=Qmax, ) # run main sim cmd = './sssd --ncount=%s --mpirun.nodes=%s' % (ncount, nodes) execute(cmd) # reduce events to S(Q,E) eventsdat = 'out/events.dat' Qaxis = Qmin, Qmax, Qstep Eaxis = Emin, Emax, Estep Ei, toffset = getEiToffset(mod2sample) iqe = reduceToIQE(eventsdat, Ei, toffset, Qaxis, Eaxis) from histogram.hdf import dump dump(iqe, 'iqe.h5', '/', 'c') global interactive if interactive: from histogram.plotter import defaultPlotter defaultPlotter.plot(iqe) # quick analysis of S(Q,E) (without detector and sample size effects) cmd = ['./analyze-sqe '] cmd.append('--mpirun.nodes=%s' % nodes) cmd.append('--ncount=%s --monitor.Ei=%s' % (ncount, Ei)) cmd.append(' --monitor.Qmin=%s --monitor.Qmax=%s' % (Qmin, Qmax)) cmd.append(' --monitor.Emin=%s --monitor.Emax=%s' % (Emin, Emax)) cmd = ' '.join(cmd) execute(cmd) return
def test_slice_and_dump(self): 'slice a histogram and dump the slice' tmpfile = 'test_slice_and_dump.h5' if os.path.exists( tmpfile ): os.remove( tmpfile ) from histogram import histogram x = y = range(100) h = histogram( 'h', [ ('x', x) ], data = y, errors = y ) s = h[(3,10)] import histogram.hdf as hh hh.dump(s, tmpfile, '/', 'c') return
def test2a(self): "sqe2dos: V exp" iqehist = hh.load(os.path.join(datadir, "V-iqe.h5")) from multiphonon.sqe import interp newiqe = interp(iqehist, newE=np.arange(-15, 80, 1.)) hh.dump(newiqe, 'V-iqe-interped.h5') iterdos = sqe2dos.sqe2dos(newiqe, T=300, Ecutoff=55., elastic_E_cutoff=(-12., 6.7), M=50.94, C_ms=.2, Ei=120., workdir='work-V') with warnings.catch_warnings(record=True) as ws: warnings.simplefilter('always') for i, dos in enumerate(iterdos): # print dos # plot if interactive: # print '*' * 70 pylab.plot(dos.E, dos.I, label='%d' % i) pass # check warning for w in ws: assert 'Scaling factor' not in str(w) path = os.path.join(here, 'expected_results', 'sqe2dos-test2a-final-dos.h5') # hh.dump(dos, path) expected = hh.load(path) self.assert_(np.allclose(dos.I, expected.I)) self.assert_(np.allclose(dos.E2, expected.E2)) if interactive: pylab.figure() pylab.errorbar(dos.E, dos.I + dos.I.max() / 5., dos.E2**.5, label='new') pylab.errorbar(expected.E, expected.I, expected.E2**.5, label='expected') pylab.legend() pylab.show() return
def run(Q=20, E_Q=None, dQ=0.15, dE=60, iqe=None): """ Q: center of Q E_Q: E(Q) function dQ: (Q-dQ, Q+dQ) is the Q range to sum over dE: (-dE, dE) is the E range the result is on iqe: I(Q,E) histogram """ Qcenter = Q Qarr = iqe.Q Earr = iqe.E qstep = Qarr[1] - Qarr[0] estep = Earr[1] - Earr[0] # q values to sum over Qs = np.arange(Q - dQ, Q + dQ, qstep) # e values of result histogram Es = np.arange(-dE, dE, estep) # intensities intensities = np.zeros(len(Es)) # for Q in Qs: # slice slice = iqe[Q, ()] # center E from E(Q) function Ecenter = E_Q(Q) # slice in the region of interest Emin = Ecenter - dE Emax = Ecenter + dE subslice = slice[(Emin, Emax)] subslice = subslice.I[:len(Es)] # intensities += subslice continue xaxis = H.axis('dE', Es, unit='meV') h = H.histogram('I(dE)', [xaxis], data=intensities) hh.dump(h, 'I_dE-Q=%s.h5' % Qcenter) return
def test(self): "wrap IQE_monitor" from mcstas2 import componentfactory category = 'monitors' componentname = 'IQE_monitor' factory = componentfactory( category, componentname ) Qmin=0; Qmax=13.; nQ=130 Emin=-50; Emax=50.; nE=100 component = factory( 'component', Ei=Ei, Qmin=Qmin, Qmax=Qmax, nQ=nQ, Emin=Emin, Emax=Emax, nE=nE, max_angle_out_of_plane=30, min_angle_out_of_plane=-30, max_angle_in_plane=120, min_angle_in_plane=-30, ) scatterer = makeScatterer() import mcni N = 10000 neutrons = mcni.neutron_buffer( N ) for i in range(N): neutron = mcni.neutron(r=(0,0,0), v=(0,0,vi), time=0, prob=1) scatterer.scatter(neutron) neutrons[i] = neutron #print neutrons[i] continue component.process( neutrons ) hist = get_histogram(component) import os f = os.path.basename(__file__) filename = 'IQE-%s.h5' % f if os.path.exists(filename): os.remove(filename) import histogram.hdf as hh hh.dump(hist, filename, '/', 'c') if self.interactive: from histogram.plotter import defaultPlotter defaultPlotter.plot(hist) return
def testdump0a(self): """ histogram.hdf: dump with compression""" from histogram import histogram, arange h = histogram( 'h', [ ('x', arange(0, 100, 1.)), ('y', arange(100, 180, 1.)), ], unit='meter', ) filename = 'test0-compressed.h5' import os if os.path.exists(filename): os.remove(filename) dump(h, filename, '/', mode='c', compression=6) return
def run(scan=None, poolsize=None, output=None, **volume_opts): scan = load_mod(scan)['scan'] def worker(work_q, H, sa): while not work_q.empty(): f = work_q.get() print f one(f, H, sa, scan, volume_opts) continue return shape, edges = scan.volumeOutputDims(**volume_opts) size = shape[0] * shape[1] * shape[2] from multiprocessing import Process, Queue, Array shared_H = Array('d', size) shared_sa = Array('d', size) work_q = Queue() for path in scan.paths: work_q.put(path) processes = [] for w in xrange(poolsize): p = Process(target=worker, args=(work_q, shared_H, shared_sa)) p.start() processes.append(p) continue for p in processes: p.join() import histogram axes = [ histogram.axis(volume_opts['x'], boundaries=edges[0]), histogram.axis(volume_opts['y'], boundaries=edges[1]), histogram.axis(volume_opts['z'], boundaries=edges[2]), ] H = mparr2nparr(shared_H) sa = mparr2nparr(shared_sa) H.shape = sa.shape = shape h = histogram.histogram('I(%(x)s,%(y)s,%(z)s)' % volume_opts, axes=axes, data=H / sa) import histogram.hdf as hh hh.dump(h, output) return
def testdump_and_load2(self): 'dump and load in the same process' tmpfile = 'test_dump_load2.h5' if os.path.exists(tmpfile): os.remove(tmpfile) from h5py import File fs = File(tmpfile, 'w') from histogram import histogram x = y = list(range(10)) h = histogram('h', [('x', x)], data=y, errors=y) import histogram.hdf as hh hh.dump(h, tmpfile, '/', fs=fs) h = load(tmpfile, 'h') #print(h[1]) self.assertVectorAlmostEqual(h[1], (1, 1)) return
def testdump1(self): """ histogram.hdf: dump with fs specified""" filename = 'test1.h5' if os.path.exists( filename): os.remove( filename ) from h5py import File fs = File( filename, 'w' ) from histogram import histogram, arange h = histogram('h', [('x', arange(0, 100, 1.) ), ('y', arange(100, 180, 1.) ),], unit = 'meter', ) #fs will take over dump( h, 'abc', '/', mode = 'w', fs = fs ) self.assert_( os.path.exists( filename )) return
def test2a(self): datadir = os.path.join(here, '..', '..', 'data', 'graphite') doshist = hh.load(os.path.join(datadir, 'exp_DOS.h5')) from mcvine.phonon.powderSQE.use_phonopy import from_FORCE_CONSTANTS N = int(1e5) IQEhist = from_FORCE_CONSTANTS( datadir, Ei=30., # meV T=300., # kelvin doshist=doshist, # DOS histogram supercell=(6, 6, 1), Q_bins=np.arange(0, 4, 0.04), E_bins=np.arange(0, 30, .2), workdir='_tmp.test2a', N=N, include_multiphonon=False, max_det_angle=60., ) hh.dump(IQEhist, 'graphite-single-phonon-Ei_30-T_300.h5') # save for inspection expectedIQEhist = hh.load( os.path.join( here, 'saved_results/graphite-single-phonon-Ei_30-T_300-N_3e6.h5')) expected = expectedIQEhist.I # scale it to sth that is easy to get "errorbar" N = 3e6 scale = N / np.nansum(expected) expected *= scale max = np.nanmax(expected) this = IQEhist.I this *= scale if plot: plt.figure(figsize=(6, 3)) plt.subplot(1, 2, 1) mpsqe.plot(IQEhist) plt.clim(0, max / 50) plt.subplot(1, 2, 2) mpsqe.plot(expectedIQEhist) plt.clim(0, max / 50) plt.tight_layout() plt.show() return
def test2b(self): iqehist = hh.load(os.path.join(datadir, "Al-iqe.h5")) from multiphonon.sqe import interp newiqe = interp(iqehist, newE=np.arange(-40, 70, 1.0)) hh.dump(newiqe, "Al-iqe-interped.h5") iterdos = sqe2dos.sqe2dos( newiqe, T=300, Ecutoff=50.0, elastic_E_cutoff=(-10.0, 7), M=26.98, C_ms=0.2, Ei=80.0, workdir="work-Al" ) for i, dos in enumerate(iterdos): # print dos # plot if interactive: # print '*' * 70 pylab.plot(dos.E, dos.I, label="%d" % i) if interactive: pylab.legend() pylab.show() return
def run(scan=None, poolsize=None, output=None, **volume_opts): scan = load_mod(scan)['scan'] def worker(work_q, H, sa): while not work_q.empty(): f = work_q.get() print f one(f, H, sa, scan, volume_opts) continue return shape, edges = scan.volumeOutputDims(**volume_opts) size = shape[0] * shape[1] * shape[2] from multiprocessing import Process, Queue, Array shared_H = Array('d', size) shared_sa = Array('d', size) work_q = Queue() for path in scan.paths: work_q.put(path) processes = [] for w in xrange(poolsize): p = Process(target=worker, args=(work_q, shared_H, shared_sa)) p.start() processes.append(p) continue for p in processes: p.join() import histogram axes = [ histogram.axis(volume_opts['x'], boundaries=edges[0]), histogram.axis(volume_opts['y'], boundaries=edges[1]), histogram.axis(volume_opts['z'], boundaries=edges[2]), ] H = mparr2nparr(shared_H) sa = mparr2nparr(shared_sa) H.shape = sa.shape = shape h = histogram.histogram( 'I(%(x)s,%(y)s,%(z)s)'%volume_opts, axes=axes, data=H/sa) import histogram.hdf as hh hh.dump(h, output) return
def getDOS(sample_nxs, mt_nxs=None, mt_fraction=0.9, Emin=-100, Emax=100, dE=1., Qmin=0, Qmax=15., dQ=0.1, T=300, Ecutoff=50., elastic_E_cutoff=(-20., 7), M=50.94, C_ms=0.3, Ei=116.446, workdir='work', iqe_nxs="iqe.nxs", iqe_h5="iqe.h5"): # prepare paths if not os.path.exists(workdir): os.makedirs(workdir) if not os.path.isabs(iqe_nxs): iqe_nxs = os.path.abspath(os.path.join(workdir, iqe_nxs)) if not os.path.isabs(iqe_h5): iqe_h5 = os.path.abspath(os.path.join(workdir, iqe_h5)) # reduce Eaxis = Emin, Emax, dE Qaxis = Qmin, Qmax, dQ raw2iqe(sample_nxs, iqe_nxs, iqe_h5, Eaxis, Qaxis) iqehist = hh.load(iqe_h5) if mt_nxs is not None: _tomtpath = lambda p: os.path.join( os.path.dirname(p), 'mt-'+os.path.basename(p)) mtiqe_nxs = _tomtpath(iqe_nxs) mtiqe_h5 = _tomtpath(iqe_h5) raw2iqe(mt_nxs, mtiqe_nxs, mtiqe_h5, Eaxis, Qaxis) iqehist -= hh.load(mtiqe_h5) * (mt_fraction, 0) # to DOS # interpolate data from .sqe import interp # probably don't need this line newiqe = interp(iqehist, newE = np.arange(Emin, Emax, dE)) # save interpolated data hh.dump(newiqe, 'iqe-interped.h5') # create processing engine from .backward import sqe2dos print("iterative computation of DOS...") iterdos = sqe2dos.sqe2dos( newiqe, T=T, Ecutoff=Ecutoff, elastic_E_cutoff=elastic_E_cutoff, M=M, C_ms=C_ms, Ei=Ei, workdir='work') doslist = list(iterdos) print("done.") return doslist
def _fixEaxis(iqe_h5_path, Eaxis): """when iqe is obtained from a nxs or nxspe file where tof axis is already converted to E, the reduced data may not have the Eaxis as desired. this method fix it by interpolation """ h = hh.load(iqe_h5_path) eaxis = h.axes()[1] centers = eaxis.binCenters() emin, emax, de = Eaxis centers1 = np.arange(emin, emax, de) if centers.size == centers1.size and np.allclose(centers, centers1): return # save a copy of the original histogram import shutil shutil.copyfile(iqe_h5_path, iqe_h5_path + '.bkup-wrongEaxis') from .sqe import interp h1 = interp(h, centers1) hh.dump(h1, iqe_h5_path) return
def extract_iqe(mantid_nxs, histogram): "extract iqe from a mantid-saved h5 file and save to a histogram" import h5py, numpy as np inpath, outpath = mantid_nxs, histogram f = h5py.File(inpath) w = f['mantid_workspace_1']['workspace'] e = np.array(w['axis1']) de = e[1] - e[0] ee = (e+de/2.)[:-1] q = np.array(w['axis2']) dq = q[1] - q[0] qq = (q+dq/2.)[:-1] I = np.array(np.array(w['values'])) # I[I!=I] = 0 E2 = np.array(np.array(w['errors'])**2) import histogram as H iqe = H.histogram('iqe', [('Q',qq, 'angstrom**-1'), ('energy', ee, 'meV')], data=I, errors = E2) import histogram.hdf as hh hh.dump(iqe, outpath) return
def run( monitordatafilename, h5filename, tof_params = None, nevents = None, ): print "monitordatafilename = %s" % monitordatafilename print "nevents = %s" % nevents print "output h5filename = %s" % h5filename print 'tof_params (unit: us) = %s' % (tof_params, ) if os.path.exists(h5filename): raise IOError, "%s already exists" % h5filename from arcseventdata.monitorData import readHistogram h = readHistogram( monitordatafilename ) from histogram.hdf import dump dump(h, h5filename, '/', 'c' ) return
def process(path): basename, ext = os.path.splitext(os.path.basename(path)) from nslice.Run import Run run = Run(path) print "instrument=%s, Ei=%s, psi=%s" % (run.instrument, run.Ei, run.psi) from nslice.XtalOrientation import XtalOrientation a = b = 8.87 c = 5.2 from math import pi twopi = 2 * pi ra, rb, rc = [twopi / a, 0, 0], [0, twopi / b, 0], [0, 0, twopi / c] u, v = [1, 0, 0], [0, 1, 0] xtal_ori = XtalOrientation(ra, rb, rc, u, v, run.psi) h, k, l, E = run.compute_hklE(xtal_ori) I, error = run.read_data() h.shape = k.shape = l.shape = E.shape = I.shape = error.shape = -1 hklEIE = np.vstack((h, k, l, E, I, error)) from nslice.slice import slice H, edges = slice( hklEIE, x='h', y='k', u='l', v='E', E=[-2, 2], l=[-5, 5], h=(-5, 5, 0.02), k=(-5, 5, 0.02), ) import histogram, histogram.hdf as hh axes = [ histogram.axis('h', boundaries=edges[0]), histogram.axis('k', boundaries=edges[1]), ] h = histogram.histogram('I(h,k)', axes=axes, data=H) hh.dump(h, '%s-I_hk.h5' % basename) return
def test(self): datadir = self.datadir doshist = hh.load(os.path.join(datadir, 'exp_DOS.h5')) import mcvine.phonon.powderSQE.IDF as psidf disp = psidf.disp_from_datadir(datadir) IQEhist, mphhist = psidf.from_data_dir( datadir=datadir, disp=disp, N=int(1e6), Q_bins=np.arange(0, 23, 0.1), E_bins=np.arange(0, 250, 1.), doshist=doshist, T=300., Ei=300., max_det_angle=140., include_multiphonon=True, ) IQEhist += mphhist hh.dump(IQEhist, 'graphite-allphonon-Ei_300-T_300-IDF.h5') expected = hh.load( os.path.join( here, 'saved_results/graphite-allphonon-Ei_300-T_300-IDF.h5')) max = np.nanmax(expected.I) reldiff = IQEhist - expected reldiff.I /= max reldiff.E2 /= max * max Nbigdiff = (np.abs(reldiff.I) > 0.03).sum() Ngood = (IQEhist.I == IQEhist.I).sum() Ntotal = IQEhist.size() self.assertTrue(Ngood * 1. / Ntotal > .65) self.assertTrue(Nbigdiff * 1. / Ngood < .10) if plot: plt.figure(figsize=(6, 3)) max = np.nanmax(IQEhist.I) median = np.nanmedian(IQEhist.I[IQEhist.I > 0]) mpsqe.plot(IQEhist) plt.clim(0, median * 3) # plt.subplot(1,2,1); mpsqe.plot(IQEhist); plt.clim(0, max/50) # plt.subplot(1,2,2); mpsqe.plot(expectedIQEhist); plt.clim(0, max/50) plt.show() return
def testdump_and_load2(self): 'dump and load in the same process' tmpfile = 'test_dump_load2.h5' if os.path.exists( tmpfile ): os.remove( tmpfile ) from h5py import File fs = File( tmpfile, 'w' ) from histogram import histogram x = y = range(10) h = histogram( 'h', [ ('x', x) ], data = y, errors = y ) import histogram.hdf as hh hh.dump( h, tmpfile, '/', fs = fs ) h = load( tmpfile, 'h') #print h[1] self.assertVectorAlmostEqual( h[1], (1,1) ) return
def run( pixelPositionsFilename, h5filename, npacks = 115, ndetsperpack = 8, npixelsperdet = 128): print "pixel-positions-filename=%s" % pixelPositionsFilename print "output h5filename = %s" % h5filename if os.path.exists(h5filename): raise IOError, "%s already exists" % h5filename s = open( pixelPositionsFilename ).read() from numpy import fromstring positions = fromstring( s, 'd' ) import arcseventdata phi_p, psi_p = arcseventdata.pixelpositions2angles( positions, npacks, ndetsperpack, npixelsperdet) from histogram.hdf import dump dump(phi_p, h5filename, '/', 'c' ) dump(psi_p, h5filename, '/', 'w' ) return
def computeDirtyDOS(sqe, dos, M, T, workdir): """dirty dos calculation is procedure that quickly "correct" sqe using the one-phonon Q multiplier. After correction, the sqe would look like mostly Q-independent, and the sum over Q axis can give a very rough estimate of the DOS. This is mostly for double-checking the calculations. """ if not os.path.exists(workdir): os.makedirs(workdir) from ..forward.phonon import computeSNQ, DWExp, kelvin2mev, gamma0 beta = 1./(T*kelvin2mev) E = dos.E; Q = sqe.Q; g = dos.I dE = E[1] - E[0] DW2 = DWExp(Q, M, E, g, beta, dE) sq = computeSNQ(DW2, 1) sqe1 = sqe.copy() sqe1.I /= sq[:, np.newaxis] sqe1.E2 /= sq[:, np.newaxis] * sq[:, np.newaxis] hh.dump(sqe1, os.path.join(workdir, 'corrected-sqe.h5')) # compute a sum to obtain S(E) Qdiff = Q[-1]-Q[0] # take the middle part. 1/6 is kind of arbitrary se1 = sqe1[ (Q[0]+Qdiff/6., Q[-1]-Qdiff/6.), (E[0], None) ].sum('Q') hh.dump(se1, os.path.join(workdir, 'se.h5')) assert np.allclose(se1.E, E) # g0 = gamma0(E,g, beta, dE) fE = (1-np.exp(-se1.E*beta)) * se1.E * g0 ddos = se1.copy() ddos.I *= fE ddos.E2 *= fE*fE hh.dump(ddos, os.path.join(workdir, 'ddos.h5')) return