Beispiel #1
0
    def test2a(self):
        'normalize: large buffer'
        import mcni
        # create a dummy input
        neutrons = mcni.neutron_buffer(int(3e6))
        narr = neutrons.to_npyarr()
        narr[:, -1] = 1
        neutrons.from_npyarr(narr)
        out = 'tmp-nst-test2a.ns'
        mns.dump(neutrons, out)
        del neutrons

        # try normalize out-of-place
        out2 = 'tmp-nst-test2a-normalized.ns'
        if os.path.exists(out2): os.remove(out2)
        mns.normalize(out, 10., out2)
        neutrons2 = mns.load(out2)
        # and see if it is done correctly
        narr = neutrons2.to_npyarr()
        self.assertTrue((narr[:, -1] == .1).all())
        del neutrons2, narr

        # try normalize in-place
        mns.normalize(out, 10.)
        neutrons2 = mns.load(out)
        # and see if it is done correctly
        narr = neutrons2.to_npyarr()
        self.assertTrue((narr[:, -1] == .1).all())
        return
Beispiel #2
0
    def test2(self):
        'normalize'
        import mcni
        # create a dummy input
        neutrons = mcni.neutron_buffer(10)
        for n in neutrons:
            n.probability = 1
            continue
        out = 'tmp-nst-test2.ns'
        mns.dump(neutrons, out)

        # try normalize out-of-place
        out2 = 'tmp-nst-test2-normalized.ns'
        if os.path.exists(out2): os.remove(out2)
        mns.normalize(out, 10., out2)
        neutrons2 = mns.load(out2)
        # and see if it is done correctly
        for n in neutrons2:
            self.assertAlmostEqual(n.probability, .1)
            continue

        # try normalize in-place
        mns.normalize(out, 10.)
        neutrons2 = mns.load(out)
        # and see if it is done correctly
        for n in neutrons2:
            self.assertAlmostEqual(n.probability, .1)
            continue
        return
 def test2a(self):
     'normalize: large buffer'
     import mcni
     # create a dummy input
     neutrons = mcni.neutron_buffer( int(3e6) )
     narr = neutrons.to_npyarr()
     narr[:, -1] = 1
     neutrons.from_npyarr(narr)
     out = 'tmp-nst-test2a.ns'
     mns.dump(neutrons, out)
     del neutrons
     
     # try normalize out-of-place
     out2 = 'tmp-nst-test2a-normalized.ns'
     if os.path.exists(out2): os.remove(out2)
     mns.normalize(out, 10., out2)
     neutrons2 = mns.load(out2)
     # and see if it is done correctly
     narr = neutrons2.to_npyarr()
     self.assertTrue((narr[:, -1] == .1).all())
     del neutrons2, narr
     
     # try normalize in-place
     mns.normalize(out, 10.)
     neutrons2 = mns.load(out)
     # and see if it is done correctly
     narr = neutrons2.to_npyarr()
     self.assertTrue((narr[:, -1] == .1).all())
     return
    def test2(self):
        'normalize'
        import mcni
        # create a dummy input
        neutrons = mcni.neutron_buffer( 10 )
        for n in neutrons:
            n.probability = 1
            continue
        out = 'tmp-nst-test2.ns'
        mns.dump(neutrons, out)

        # try normalize out-of-place
        out2 = 'tmp-nst-test2-normalized.ns'
        if os.path.exists(out2): os.remove(out2)
        mns.normalize(out, 10., out2)
        neutrons2 = mns.load(out2)
        # and see if it is done correctly
        for n in neutrons2:
            self.assertAlmostEqual(n.probability, .1)
            continue

        # try normalize in-place
        mns.normalize(out, 10.)
        neutrons2 = mns.load(out)
        # and see if it is done correctly
        for n in neutrons2:
            self.assertAlmostEqual(n.probability, .1)
            continue
        return
Beispiel #5
0
def merge_and_normalize(filename, outputs_dir, overwrite_datafiles=True):
    """merge_and_normalize('scattered-neutrons', 'out')
    """
    # find all output files
    from mcni.components.outputs import n_mcsamples_files, mcs_sum
    import glob, os

    pattern = os.path.join(outputs_dir, "*", filename)
    nsfiles = glob.glob(pattern)
    n_mcsamples = n_mcsamples_files(outputs_dir)
    assert len(nsfiles) == n_mcsamples, "neutron storage files %s does not match #mcsample files %s" % (
        len(nsfiles),
        n_mcsamples,
    )
    if not nsfiles:
        return None, None

    # output
    out = os.path.join(outputs_dir, filename)
    if overwrite_datafiles:
        if os.path.exists(out):
            os.remove(out)
    # merge
    from mcni.neutron_storage import merge

    merge(nsfiles, out)

    # number of neutron events totaly in the neutron file
    from mcni.neutron_storage.idf_usenumpy import count

    nevts = count(out)

    # load number_of_mc_samples
    mcs = mcs_sum(outputs_dir)

    # normalization factor. this is a bit tricky!!!
    nfactor = mcs / nevts

    # normalize
    from mcni.neutron_storage import normalize

    normalize(out, nfactor)
    return
Beispiel #6
0
    def _merge_and_normalize(self):
        # XXX: should rewrite using mcni.neutron_storage.merge_and_normalize
        outdir = self.simulation_context.outputdir

        # find all output files
        from mcni.components.outputs import n_mcsamples_files, mcs_sum
        import glob, os
        filename = self.path
        pattern = os.path.join(outdir, '*', filename)
        nsfiles = glob.glob(pattern)
        n_mcsamples = n_mcsamples_files(outdir)
        assert len(nsfiles) == n_mcsamples, \
            "neutron storage files %s does not match #mcsample files %s" %(
            len(nsfiles), n_mcsamples)
        if not nsfiles:
            return None, None

        # output
        out = os.path.join(outdir, self.path)
        if self.overwrite_datafiles:
            if os.path.exists(out):
                os.remove(out)
        # merge
        from mcni.neutron_storage import merge
        merge(nsfiles, out)

        # number of neutron events totaly in the neutron file
        from mcni.neutron_storage.idf_usenumpy import count
        nevts = count(out)

        # load number_of_mc_samples
        mcs = mcs_sum(outdir)

        # normalization factor. this is a bit tricky!!!
        nfactor = mcs / nevts

        # normalize
        from mcni.neutron_storage import normalize
        normalize(out, nfactor)
        return
    def _merge_and_normalize(self):
        # XXX: should rewrite using mcni.neutron_storage.merge_and_normalize
        outdir = self.simulation_context.outputdir

        # find all output files
        from mcni.components.outputs import n_mcsamples_files, mcs_sum
        import glob, os
        filename = self.path
        pattern = os.path.join(outdir, '*', filename)
        nsfiles = glob.glob(pattern)
        n_mcsamples = n_mcsamples_files(outdir)
        assert len(nsfiles) == n_mcsamples, \
            "neutron storage files %s does not match #mcsample files %s" %(
            len(nsfiles), n_mcsamples)
        if not nsfiles:
            return None, None
        
        # output
        out = os.path.join(outdir, self.path)
        if self.overwrite_datafiles:
            if os.path.exists(out):
                os.remove(out)
        # merge
        from mcni.neutron_storage import merge
        merge(nsfiles, out)
        
        # number of neutron events totaly in the neutron file
        from mcni.neutron_storage.idf_usenumpy import count
        nevts = count(out)

        # load number_of_mc_samples
        mcs = mcs_sum(outdir)

        # normalization factor. this is a bit tricky!!!
        nfactor = mcs/nevts
        
        # normalize
        from mcni.neutron_storage import normalize
        normalize(out, nfactor)
        return
Beispiel #8
0
def merge_and_normalize(filename, outputs_dir, overwrite_datafiles=True):
    """merge_and_normalize('scattered-neutrons', 'out')
    """
    # find all output files
    from mcni.components.outputs import n_mcsamples_files, mcs_sum
    import glob, os
    pattern = os.path.join(outputs_dir, '*', filename)
    nsfiles = glob.glob(pattern)
    n_mcsamples = n_mcsamples_files(outputs_dir)
    assert len(nsfiles) == n_mcsamples, \
        "neutron storage files %s does not match #mcsample files %s" %(
        len(nsfiles), n_mcsamples)
    if not nsfiles:
        return None, None

    # output
    out = os.path.join(outputs_dir, filename)
    if overwrite_datafiles:
        if os.path.exists(out):
            os.remove(out)
    # merge
    from mcni.neutron_storage import merge
    merge(nsfiles, out)

    # number of neutron events totaly in the neutron file
    from mcni.neutron_storage.idf_usenumpy import count
    nevts = count(out)

    # load number_of_mc_samples
    mcs = mcs_sum(outputs_dir)

    # normalization factor. this is a bit tricky!!!
    nfactor = mcs / nevts

    # normalize
    from mcni.neutron_storage import normalize
    normalize(out, nfactor)
    return