def merge_and_normalize(filename, outputs_dir, overwrite_datafiles=True): """merge_and_normalize('scattered-neutrons', 'out') """ # find all output files from mcni.components.outputs import n_mcsamples_files, mcs_sum import glob, os pattern = os.path.join(outputs_dir, "*", filename) nsfiles = glob.glob(pattern) n_mcsamples = n_mcsamples_files(outputs_dir) assert len(nsfiles) == n_mcsamples, "neutron storage files %s does not match #mcsample files %s" % ( len(nsfiles), n_mcsamples, ) if not nsfiles: return None, None # output out = os.path.join(outputs_dir, filename) if overwrite_datafiles: if os.path.exists(out): os.remove(out) # merge from mcni.neutron_storage import merge merge(nsfiles, out) # number of neutron events totaly in the neutron file from mcni.neutron_storage.idf_usenumpy import count nevts = count(out) # load number_of_mc_samples mcs = mcs_sum(outputs_dir) # normalization factor. this is a bit tricky!!! nfactor = mcs / nevts # normalize from mcni.neutron_storage import normalize normalize(out, nfactor) return
def _merge_and_normalize(self): # XXX: should rewrite using mcni.neutron_storage.merge_and_normalize outdir = self.simulation_context.outputdir # find all output files from mcni.components.outputs import n_mcsamples_files, mcs_sum import glob, os filename = self.path pattern = os.path.join(outdir, '*', filename) nsfiles = glob.glob(pattern) n_mcsamples = n_mcsamples_files(outdir) assert len(nsfiles) == n_mcsamples, \ "neutron storage files %s does not match #mcsample files %s" %( len(nsfiles), n_mcsamples) if not nsfiles: return None, None # output out = os.path.join(outdir, self.path) if self.overwrite_datafiles: if os.path.exists(out): os.remove(out) # merge from mcni.neutron_storage import merge merge(nsfiles, out) # number of neutron events totaly in the neutron file from mcni.neutron_storage.idf_usenumpy import count nevts = count(out) # load number_of_mc_samples mcs = mcs_sum(outdir) # normalization factor. this is a bit tricky!!! nfactor = mcs / nevts # normalize from mcni.neutron_storage import normalize normalize(out, nfactor) return
def _merge_and_normalize(self): # XXX: should rewrite using mcni.neutron_storage.merge_and_normalize outdir = self.simulation_context.outputdir # find all output files from mcni.components.outputs import n_mcsamples_files, mcs_sum import glob, os filename = self.path pattern = os.path.join(outdir, '*', filename) nsfiles = glob.glob(pattern) n_mcsamples = n_mcsamples_files(outdir) assert len(nsfiles) == n_mcsamples, \ "neutron storage files %s does not match #mcsample files %s" %( len(nsfiles), n_mcsamples) if not nsfiles: return None, None # output out = os.path.join(outdir, self.path) if self.overwrite_datafiles: if os.path.exists(out): os.remove(out) # merge from mcni.neutron_storage import merge merge(nsfiles, out) # number of neutron events totaly in the neutron file from mcni.neutron_storage.idf_usenumpy import count nevts = count(out) # load number_of_mc_samples mcs = mcs_sum(outdir) # normalization factor. this is a bit tricky!!! nfactor = mcs/nevts # normalize from mcni.neutron_storage import normalize normalize(out, nfactor) return
def test(self): 'neutron_storage.merge' oldstorage_path = 'test-merge-old' newstorage_path = 'test-merge-new' newpacketsize = 10 for path in [ oldstorage_path, newstorage_path ]: if os.path.exists( path ): os.remove( path ) continue from mcni.neutron_storage import storage, merge #open storage for writing s = storage( oldstorage_path, 'w' ) #create neutrons import mcni neutrons = mcni.neutron_buffer( 7 ) neutrons[5] = mcni.neutron( v = (8,9,10) ) #write neutrons s.write( neutrons ) s.write( neutrons ) s.write( neutrons ) #important!!! flush the storage del s #merge merge([oldstorage_path], newstorage_path) #open the merged storage for reading sr = storage( newstorage_path, 'r') neutrons = sr.read() self.assertEqual( len(neutrons), 7*3 ) self.assertAlmostEqual( neutrons[5].state.velocity[0] , 8 ) return
def merge_and_normalize(filename, outputs_dir, overwrite_datafiles=True): """merge_and_normalize('scattered-neutrons', 'out') """ # find all output files from mcni.components.outputs import n_mcsamples_files, mcs_sum import glob, os pattern = os.path.join(outputs_dir, '*', filename) nsfiles = glob.glob(pattern) n_mcsamples = n_mcsamples_files(outputs_dir) assert len(nsfiles) == n_mcsamples, \ "neutron storage files %s does not match #mcsample files %s" %( len(nsfiles), n_mcsamples) if not nsfiles: return None, None # output out = os.path.join(outputs_dir, filename) if overwrite_datafiles: if os.path.exists(out): os.remove(out) # merge from mcni.neutron_storage import merge merge(nsfiles, out) # number of neutron events totaly in the neutron file from mcni.neutron_storage.idf_usenumpy import count nevts = count(out) # load number_of_mc_samples mcs = mcs_sum(outputs_dir) # normalization factor. this is a bit tricky!!! nfactor = mcs / nevts # normalize from mcni.neutron_storage import normalize normalize(out, nfactor) return
def merge(*args, **kwds): from .merge import merge merge(*args, **kwds) return
def merge( *args, **kwds ): from .merge import merge merge( *args, **kwds ) return