def main(): # get list of nuclides data.atomic_mass('U235') nucs = set(data.atomic_mass_map.keys()) for nuc in data.atomic_mass_map: nucm = nuc + 1 if nucname.anum(nuc) == 0 or data.decay_const(nucm) < 1e-16 or \ data.decay_const(nuc) == data.decay_const(nucm): continue nucs.add(nucm) nucs = [nuc for nuc in nucs if nucname.anum(nuc) > 0] #and # not np.isnan(data.decay_const(nuc)) and # nuc < 200000000] nucs.sort() # get symbols symbols = {} channels = {} for nuc in nucs: add_child_decays(nuc, symbols) add_child_xss(nuc, channels, nucs) # print symbols ns = [] for nuc in nucs: try: nname = nucname.name(nuc) except RuntimeError: continue ns.append(nname) nucs = ns d = {'symbols': symbols, 'nucs': nucs, 'channels': channels} s = json.dumps(d, indent=4, sort_keys=True) print(s)
def _build_matrix(N): """This function builds burnup matrix, A. Decay only.""" A = np.zeros((len(N), len(N))) # convert N to id form N_id = [] for i in range(len(N)): if isinstance(N[i], str): ID = nucname.id(N[i]) else: ID = N[i] N_id.append(ID) sds = SimpleDataSource() # Decay for i in range(len(N)): A[i, i] -= decay_const(N_id[i]) # Find decay parents for k in range(len(N)): if N_id[i] in decay_children(N_id[k]): A[i, k] += branch_ratio(N_id[k], N_id[i]) * decay_const(N_id[k]) return A
def decayheat(c): """Lists decay heats at 0, 10, 100, 1000, 10000 years summed over all nuclides and facilities. Args: c: connection cursor to sqlite database. """ # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 # Retrieve list of decay heats of each nuclide alldecayheats = query(c) dict_decayheat = {} sim_time = alldecayheats[-1][0] # Get only one nuclide per entry by adding decay heats for time_step, nuc, dh in alldecayheats: sec = (sim_time - time_step) * MCONV q_i = dh * 0.5**(sec * data.decay_const(nuc)) if nuc in dict_decayheat.keys(): dict_decayheat[nuc] += q_i else: dict_decayheat[nuc] = q_i # Put back into list of tuples & sort by time-step decayheat = dict_decayheat.items() decayheat.sort() # calculate decay heats of each nuc 0, 10, 100, 1000, 10000 yrs after sim decayheats = [] for nuc, dh0 in decayheat: t = 10 dhs = (dh0,) while t <= 10000: sec = t * YCONV dh = dh0 * 0.5**(sec * data.decay_const(nuc)) dhs += (dh,) t = 10 * t row = (nuc,) + tuple(dhs) decayheats.append(row) # Write to csv file fname = 'decayheat.csv' with open(fname,'w') as out: csv_out=csv.writer(out) csv_out.writerow(['nuclide', 'decay heat at 0 yrs [MW]', 'decay heat at 10 yrs [MW]', 'decay heat at 100 yrs [MW]', 'decay heat at 1000 yrs [MW]', 'decay heat at 10000 yrs [MW]']) for row in decayheats: csv_out.writerow(row) print('file saved as ' + fname + '!')
def decayheat(c): """Lists decay heats at 0, 10, 100, 1000, 10000 years summed over all nuclides and facilities. Args: c: connection cursor to sqlite database. """ # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 # Retrieve list of decay heats of each nuclide alldecayheats = query(c) dict_decayheat = {} sim_time = alldecayheats[-1][0] # Get only one nuclide per entry by adding decay heats for time_step, nuc, dh in alldecayheats: sec = (sim_time - time_step) * MCONV q_i = dh * 0.5**(sec * data.decay_const(nuc)) if nuc in dict_decayheat.keys(): dict_decayheat[nuc] += q_i else: dict_decayheat[nuc] = q_i # Put back into list of tuples & sort by time-step decayheat = dict_decayheat.items() decayheat.sort() # calculate decay heats of each nuc 0, 10, 100, 1000, 10000 yrs after sim decayheats = [] for nuc, dh0 in decayheat: t = 10 dhs = (dh0, ) while t <= 10000: sec = t * YCONV dh = dh0 * 0.5**(sec * data.decay_const(nuc)) dhs += (dh, ) t = 10 * t row = (nuc, ) + tuple(dhs) decayheats.append(row) # Write to csv file fname = 'decayheat.csv' with open(fname, 'w') as out: csv_out = csv.writer(out) csv_out.writerow([ 'nuclide', 'decay heat at 0 yrs [MW]', 'decay heat at 10 yrs [MW]', 'decay heat at 100 yrs [MW]', 'decay heat at 1000 yrs [MW]', 'decay heat at 10000 yrs [MW]' ]) for row in decayheats: csv_out.writerow(row) print('file saved as ' + fname + '!')
def activity(c): """Lists activities of all nuclides at 10, 100, 1000, 10000 yrs from the end of the sim. Args: c: connection cursor to sqlite database. """ activities = query(c) # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 dict_acts = {} sim_time = activities[-1][0] # Get only one nuclide per entry, add activities @ end of sim for time_step, nuc, mass, act in activities: sec = (sim_time - time_step) * MCONV acts = act * math.exp(-sec * data.decay_const(nuc)) if nuc in dict_acts.keys(): dict_acts[nuc] += acts else: dict_acts[nuc] = acts # Put back into list of tuples & sort by nuclide act_endsim = dict_acts.items() act_endsim.sort() # calculate activities 10, 100, 1000, 10000 yrs later acts = [] for nuc, act0 in act_endsim: t = 10 nuc_acts = (act0,) while t <= 10000: sec = t * YCONV nuc_act = act0 * math.exp(-sec * data.decay_const(nuc)) nuc_acts += (nuc_act,) t = 10 * t row = (nuc,) + tuple(nuc_acts) acts.append(row) # Write to csv file fname = 'activity.csv' with open(fname,'w') as out: csv_out=csv.writer(out) csv_out.writerow(['nuclide', 'act at 0 yrs [Bq]', 'act at 10 yrs [Bq]', 'act at 100 yrs [Bq]', 'act at 1000 yrs [Bq]', 'act at 10000 yrs [Bq]']) for row in acts: csv_out.writerow(row) print('file saved as ' + fname + '!')
def activity(c): """Lists activities of all nuclides at 10, 100, 1000, 10000 yrs from the end of the sim. Args: c: connection cursor to sqlite database. """ activities = query(c) # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 dict_acts = {} sim_time = activities[-1][0] # Get only one nuclide per entry, add activities @ end of sim for time_step, nuc, mass, act in activities: sec = (sim_time - time_step) * MCONV acts = act * math.exp(-sec * data.decay_const(nuc)) if nuc in dict_acts.keys(): dict_acts[nuc] += acts else: dict_acts[nuc] = acts # Put back into list of tuples & sort by nuclide act_endsim = dict_acts.items() act_endsim.sort() # calculate activities 10, 100, 1000, 10000 yrs later acts = [] for nuc, act0 in act_endsim: t = 10 nuc_acts = (act0, ) while t <= 10000: sec = t * YCONV nuc_act = act0 * math.exp(-sec * data.decay_const(nuc)) nuc_acts += (nuc_act, ) t = 10 * t row = (nuc, ) + tuple(nuc_acts) acts.append(row) # Write to csv file fname = 'activity.csv' with open(fname, 'w') as out: csv_out = csv.writer(out) csv_out.writerow([ 'nuclide', 'act at 0 yrs [Bq]', 'act at 10 yrs [Bq]', 'act at 100 yrs [Bq]', 'act at 1000 yrs [Bq]', 'act at 10000 yrs [Bq]' ]) for row in acts: csv_out.writerow(row) print('file saved as ' + fname + '!')
def get_activity_Ci(isodict=moldict, valtype="mol"): activity_Ci={} ci_dec_per_sec = 3.7E10 # conversion factor 1 curie = 3.7E10 decays/sec if valtype=="mol": for iso, mols in isodict.iteritems(): dec_per_sec = mols*constants.N_A*data.decay_const(nucname.id(iso)) activity_Ci[nucname.name(iso)]= dec_per_sec/ci_dec_per_sec elif valtype=="mass": for iso, mass in isodict.iteritems(): dec_per_sec = mass*data.atomic_mass(nucname.id(iso))*constants.N_A*data.decay_const(nucname.id(iso)) activity_Ci[nucname.name(iso)]= dec_per_sec/ci_dec_per_sec sorted_a = sorted(activity_Ci.iteritems(), key=operator.itemgetter(0)) print sorted_a return activity_Ci
def query(c): """Lists activities of all nuclides with respect to time for all facilities. Args: c: connection cursor to sqlite database. """ # SQL query returns a table with the nuclides and their masses at each timestep sql = ("SELECT resources.TimeCreated, compositions.NucID," "compositions.MassFrac*resources.Quantity ") sql += ( "FROM resources " "INNER JOIN compositions ON resources.QualId = compositions.QualId " "GROUP BY resources.TimeCreated, compositions.NucId " "ORDER BY resources.TimeCreated;") cur = c.execute(sql) results = cur.fetchall() # Gives avogadro's number with a kg to g conversion CONV = 1000 * 6.022e23 activities = [] # Calculates activities (/s) of each nuclide at each timestep for time_step, nuc, mass in results: act = CONV * mass * data.decay_const(nuc) / data.atomic_mass(nuc) row = (time_step, nuc, mass, act) activities.append(row) return activities
def query(c): """Lists decay heats of all nuclides with respect to time for all facilities. Args: c: connection cursor to sqlite database. """ # gives avogadro's number with a kg to g conversion ACT_CONV = 1000 * 6.022e23 # converts from MeV/s to MW Q_CONV = 1.602e-19 # SQL query returns a table with the nuclides (and their masses) transacted from reactor sql = ("SELECT resources.TimeCreated, compositions.NucId," "compositions.MassFrac*resources.Quantity ") sql += ( "FROM resources " "INNER JOIN compositions ON resources.QualId = compositions.QualId " "INNER JOIN transactions ON resources.TimeCreated = transactions.Time " "WHERE transactions.SenderId=13 " "GROUP BY resources.TimeCreated, compositions.NucId " "ORDER BY resources.TimeCreated;") cur = c.execute(sql) results = cur.fetchall() alldecayheats = [] # Calculates decay heat (MW) at each timestep for time_step, nuc, mass in results: act = ACT_CONV * mass * data.decay_const(nuc) / data.atomic_mass(nuc) dh = Q_CONV * act * data.q_val(nuc) row = (time_step, nuc, dh) alldecayheats.append(row) return alldecayheats
def query(c): """Lists activities of all nuclides with respect to time for all facilities. Args: c: connection cursor to sqlite database. """ # SQL query returns a table with the nuclides and their masses at each timestep sql = ("SELECT Resources.TimeCreated, Compositions.NucID," "Compositions.MassFrac*Resources.Quantity ") sql += ("FROM Resources " "INNER JOIN Compositions ON Resources.StateID = Compositions.StateID " "GROUP BY Resources.TimeCreated, Compositions.NucID " "ORDER BY Resources.TimeCreated;") cur = c.execute(sql) results = cur.fetchall() # Gives avogadro's number with a kg to g conversion CONV = 1000*6.022e23 activities = [] # Calculates activities (/s) of each nuclide at each timestep for time_step, nuc, mass in results: act = CONV * mass * data.decay_const(nuc) / data.atomic_mass(nuc) row = (time_step, nuc, act) activities.append(row) return activities
def query(c): """Lists decay heats of all nuclides with respect to time for all facilities. Args: c: connection cursor to sqlite database. """ # gives avogadro's number with a kg to g conversion ACT_CONV = 1000*6.022e23 # converts from MeV/s to MW Q_CONV = 1.602e-19 # SQL query returns a table with the nuclides (and their masses) transacted from reactor sql = ("SELECT resources.TimeCreated, compositions.NucId," "compositions.MassFrac*resources.Quantity ") sql += ("FROM resources " "INNER JOIN compositions ON resources.QualId = compositions.QualId " "INNER JOIN transactions ON resources.TimeCreated = transactions.Time " "WHERE transactions.SenderId=13 " "GROUP BY resources.TimeCreated, compositions.NucId " "ORDER BY resources.TimeCreated;") cur = c.execute(sql) results = cur.fetchall() alldecayheats = [] # Calculates decay heat (MW) at each timestep for time_step, nuc, mass in results: act = ACT_CONV * mass * data.decay_const(nuc) / data.atomic_mass(nuc) dh = Q_CONV * act * data.q_val(nuc) row = (time_step, nuc, dh) alldecayheats.append(row) return alldecayheats
def test_tm171_decay(): "Tests if decay is properly implemented" t_sim = 1.2119E+8 # Run for 3.843 years (approx 2 half lives) lamb = data.decay_const('TM171') exp = np.exp(-1*lamb*t_sim) inp = Material({'TM171': 1.0}, mass=1.0) obs = tm.transmute(inp, t=t_sim, phi=0.0, tol=1e-7) assert_equal(exp, obs['TM171'])
def test_tm171_decay(): "Tests if decay is properly implemented" t_sim = 1.2119E+8 # Run for 3.843 years (approx 2 half lives) lamb = data.decay_const('TM171') exp = np.exp(-1 * lamb * t_sim) inp = Material({'TM171': 1.0}, mass=1.0) obs = tm.transmute(inp, t=t_sim, phi=0.0, tol=1e-7) assert_equal(exp, obs['TM171'])
def load_default_nucs(): with tb.open_file(nuc_data) as f: ll = f.root.decay.level_list stable = ll.read_where('(nuc_id%10000 == 0) & (nuc_id != 0)') metastable = ll.read_where('metastable > 0') nucs = set(int(nuc) for nuc in stable['nuc_id']) nucs |= set(int(nuc) for nuc in metastable['nuc_id']) nucs = sorted(nuc for nuc in nucs if not np.isnan(decay_const(nuc, False))) return nucs
def load_default_nucs(): with tb.open_file(nuc_data) as f: ll = f.root.decay.level_list stable = ll.read_where("(nuc_id%10000 == 0) & (nuc_id != 0)") metastable = ll.read_where("metastable > 0") nucs = set(int(nuc) for nuc in stable["nuc_id"]) nucs |= set(int(nuc) for nuc in metastable["nuc_id"]) nucs = sorted(nuc for nuc in nucs if not np.isnan(decay_const(nuc, False))) return nucs
def genchains(chains, sf=False): chain = chains[-1] children = all_children(chain[-1]) # filters spontaneous fission if not sf: children = {c for c in children if (0.0 == fpyield(chain[-1], c)) and (c not in chain) } if decay_const(chain[-1]) != 0: for child in children: if child not in chain: chains.append(chain + (child,)) chains = genchains(chains, sf=sf) return chains
def genchains(chains, sf=False): chain = chains[-1] children = decay_children(chain[-1]) # filters spontaneous fission if not sf: children = { c for c in children if (0.0 == fpyield(chain[-1], c)) and (c not in chain) } if decay_const(chain[-1]) != 0: for child in children: if child not in chain: chains.append(chain + (child,)) chains = genchains(chains, sf=sf) return chains
def _get_destruction(self, nuc, decay=True): """Computes the destruction rate of the nuclide. Parameters ---------- nuc : int Name of the nuclide in question decay : bool True if the decay constant should be added to the returned value. False if only destruction from neutron reactions should be considered. Returns ------- d : float Destruction rate of the nuclide. """ xscache = self.xscache sig_a = sigma_a(nuc, xs_cache=xscache) d = utils.from_barns(sig_a[0], 'cm2') * xscache['phi_g'][0] if decay and not np.isnan(data.decay_const(nuc)): d += data.decay_const(nuc) return d
def calc_decay_depletion(isotopes, DATA): # Create dictionary to keep track of daughters daughter_flags = dict() for iso in isotopes: daughter_flags[iso] = False dec_depletion = np.zeros([len(isotopes) + 2, len(isotopes) + 2]) # Decay from j to i for i, iso_i in enumerate(isotopes): for j, iso_j in enumerate(isotopes): # Off-diagonal terms if j < i or j > i: # if j < i: ratio = data.branch_ratio(iso_j, iso_i) # print('print(iso_j, iso_i, ratio)') # print(iso_j, iso_i, ratio) # print(iso_j, iso_i, ratio) if ratio > 0: decay_const = data.decay_const(iso_j) dec_depletion[i, j] -= ratio * decay_const daughter_flags[iso_j] = True # if iso_j=='932340': # print(iso_j, iso_i, data.branch_ratio(iso_j,iso_i)) # Diagonal terms elif i == j: # Add decay constant here # print(iso_j, data.half_life(iso_j)) dec_depletion[i, j] += data.decay_const(iso_j) for i, iso in enumerate(isotopes): if not daughter_flags[iso] and dec_depletion[i, i] != 0: print("Warning: no decay daughter for ", iso) dec_depletion[-2, i] += -dec_depletion[i, i] return dec_depletion
def _build_matrix(N): """ This function builds burnup matrix, A. Decay only. """ A = np.zeros((len(N), len(N))) # convert N to id form N_id = [] for i in xrange(len(N)): ID = id(N[i]) N_id.append(ID) sds = SimpleDataSource() # Decay for i in xrange(len(N)): A[i, i] -= decay_const(N_id[i]) # Find decay parents for k in xrange(len(N)): if N_id[i] in decay_children(N_id[k]): A[k, i] += decay_const(N_id[k]) return A
def activity(series): """Activity metric returns the instantaneous activity of a nuclide in a material (material mass * decay constant / atomic mass) indexed by the SimId, QualId, ResourceId, ObjId, TimeCreated, and NucId. """ tools.raise_no_pyne('Activity could not be computed', HAVE_PYNE) mass = series[0] act = [] for (simid, qual, res, obj, time, nuc), m in mass.iteritems(): val = (1000 * data.N_A * m * data.decay_const(nuc) \ / data.atomic_mass(nuc)) act.append(val) act = pd.Series(act, index=mass.index) act.name = 'Activity' rtn = act.reset_index() return rtn
def add_child_decays(nuc, symbols): try: childname = nucname.name(nuc) except RuntimeError: return for rx in DECAY_RXS: try: parent = rxname.parent(nuc, rx, b'decay') except RuntimeError: continue if data.branch_ratio(parent, nuc) < 1e-16: continue parname = nucname.name(parent) symbols['lambda_' + parname] = data.decay_const(parent) gamma = 'gamma_{0}_{1}_{2}'.format(parname, childname, rx) symbols[gamma] = data.branch_ratio(parent, nuc)
def k_a(chain, short=1e-8): # gather data hl = np.array([half_life(n, False) for n in chain]) a = -1.0 / hl dc = np.array(list(map(lambda nuc: decay_const(nuc, False), chain))) if np.isnan(dc).any(): # NaNs are bad, mmmkay. Nones mean we should skip return None, None ends_stable = (dc[-1] < 1e-16) # check if last nuclide is a stable species # compute cij -> ci in prep for k cij = dc[:, np.newaxis] / (dc[:, np.newaxis] - dc) if ends_stable: cij[-1] = -1.0 / dc # adjustment for stable end nuclide mask = np.ones(len(chain), dtype=bool) cij[mask, mask] = 1.0 # identity is ignored, set to unity ci = cij.prod(axis=0) # compute k if ends_stable: k = dc * ci k[-1] = 1.0 else: k = (dc / dc[-1]) * ci if np.isinf(k).any(): # if this happens then something wen very wrong, skip return None, None # compute and apply branch ratios gamma = np.prod( [all_branch_ratio(p, c) for p, c in zip(chain[:-1], chain[1:])]) if gamma == 0.0 or np.isnan(gamma): return None, None k *= gamma # half-life filter, makes compiling faster by pre-ignoring negligible species # in this chain. They'll still be picked up in their own chains. if ends_stable: mask = (hl[:-1] / hl[:-1].sum()) > short mask = np.append(mask, True) else: mask = (hl / hl.sum()) > short if mask.sum() < 2: mask = np.ones(len(chain), dtype=bool) return k[mask], a[mask]
def k_a(chain, short=1e-8): # gather data hl = np.array([half_life(n, False) for n in chain]) a = -1.0 / hl dc = np.array(list(map(lambda nuc: decay_const(nuc, False), chain))) if np.isnan(dc).any(): # NaNs are bad, mmmkay. Nones mean we should skip return None, None ends_stable = (dc[-1] < 1e-16) # check if last nuclide is a stable species # compute cij -> ci in prep for k cij = dc[:, np.newaxis] / (dc[:, np.newaxis] - dc) if ends_stable: cij[-1] = -1.0 / dc # adjustment for stable end nuclide mask = np.ones(len(chain), dtype=bool) cij[mask, mask] = 1.0 # identity is ignored, set to unity ci = cij.prod(axis=0) # compute k if ends_stable: k = dc * ci k[-1] = 1.0 else: k = (dc / dc[-1]) * ci if np.isinf(k).any(): # if this happens then something wen very wrong, skip return None, None # compute and apply branch ratios gamma = np.prod([all_branch_ratio(p, c) for p, c in zip(chain[:-1], chain[1:])]) if gamma == 0.0 or np.isnan(gamma): return None, None k *= gamma # half-life filter, makes compiling faster by pre-ignoring negligible species # in this chain. They'll still be picked up in their own chains. if ends_stable: mask = (hl[:-1] / hl[:-1].sum()) > short mask = np.append(mask, True) else: mask = (hl / hl.sum()) > short if mask.sum() < 2: mask = np.ones(len(chain), dtype=bool) return k[mask], a[mask]
def enddecayheat(c): """Lists decay heat at end of simulation. Args: c: connection cursor to sqlite database. """ # Conversion of time from months to seconds CONV = 3.16e7 / 12 # Retrieve list of decay heats of each nuclide alldecayheats = query(c) end_heat = 0 sim_time = alldecayheats[-1][0] # Sum decayed heats for each time-step/nuclide for time_step, nuc, dh in alldecayheats: t = (sim_time - time_step) * CONV exp = t * data.decay_const(nuc) q_i = dh * 0.5**exp end_heat += q_i return end_heat
def inventories_activity(evaler, facilities=(), nucs=()): """ Get a simple time series of the activity of the inventory in the selcted facilities. Applying nuclides selection when required. Parameters ---------- evaler : evaler facilities : of the facility nucs : of nuclide to select. """ if len(nucs) != 0: nucs = format_nucs(nucs) df = inventories(evaler, facilities, nucs) for i, row in df.iterrows(): val = 1000 * data.N_A * row['Quantity'] * \ data.decay_const(int(row['NucId'])) df.set_value(i, 'Activity', val) return df
def gencase(nuc, idx, b, short=1e-8, sf=False): case = ['}} case {0}: {{'.format(nuc)] dc = decay_const(nuc, False) if dc == 0.0: # stable nuclide case.append(CHAIN_STMT.format(idx[nuc], 'it->second')) else: chains = genchains([(nuc,)], sf=sf) print(len(chains), len(set(chains)), nuc) cse = {} # common sub-expression exponents to elimnate bt = 0 for c in chains: if c[-1] not in idx: continue cexpr, b, bt = chainexpr(c, cse, b, bt, short=short) if cexpr is None: continue case.append(CHAIN_STMT.format(idx[c[-1]], cexpr)) bstmts = [' ' + B_STMT.format(exp=exp, b=bval) for exp, bval in \ sorted(cse.items(), key=lambda x: x[1])] case = case[:1] + bstmts + case[1:] case.append(BREAK) return case, b
def activity(c): """Lists activities of all nuclides at a given time (in years) from the end of the sim. Args: c: connection cursor to sqlite database. """ activities = query(c) # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 dict_acts = {} t = input("Enter a time in years: ") sim_time = activities[-1][0] time = sim_time * MCONV + t * YCONV # Get only one nuclide per entry, add activities for time_step, nuc, act in activities: sec = time - time_step * MCONV act = act * math.exp(-sec * data.decay_const(nuc)) if nuc in dict_acts.keys(): dict_acts[nuc] += act else: dict_acts[nuc] = act # Put back into list of tuples & sort by nuclide acts = dict_acts.items() acts.sort() return acts
from pyne import data, nucname import numpy as np print(data.decay_const('U-235')) print(data.decay_const('922350')) print(data.decay_const('922350000')) print(data.branch_ratio('932390000','942390000', use_metastable=False)) print(data.decay_children('932390000')) print(data.decay_const('942420000')) print(data.decay_children('942420000')) print(np.log(2)/data.decay_const('922340000')/3.15e7) print('-------Np-239 to Pu-239 test--------') print(data.decay_const('932390')) print(data.decay_children('932390')) print(data.decay_const('932390')) print(data.decay_children('932390')) print(data.branch_ratio(932390,942390)) print('-------U-240 decay test--------') print(np.log(2)/data.decay_const('922400')/3600) print(data.branch_ratio('922400','932400', use_metastable=False)) print('-----U234 Capture Test-----') print(float('922350')-float('922340') == 10) print('-----Mass Test-----') print(nucname.anum('922350')) print('-----Name Test-----')
def get_decay_constant(self): return OrderedDict( (nuc_name, data.decay_const(nuc_id)) for nuc_id, nuc_name in zip( self.get_all_children(), self.get_all_children_nuc_name()))
def test_decay_const(): assert_equal(data.decay_const('H1'), 0.0) assert_equal(data.decay_const(922351), np.log(2.0) / 1560.0)
def test_decay_const(): assert_equal(data.decay_const("H1"), 0.0) assert_equal(data.decay_const(922351), np.log(2.0) / 1560.0)
def test_decay_const(): assert_equal(data.decay_const('H1'), 0.0) assert_equal(data.decay_const(922350001), np.log(2.0) / 1560.0)
else: term = "0" else: b = ensure_cse(a_i, b, cse) term = kbexpr(k_i, b_from_a(cse, a_i)) # multiply by t if needed if t_term_i: term += "*t" terms.append(term) terms = " + ".join(terms) return CHAIN_EXPR.format(terms), b, bt def gencase(nuc, idx, b, short=1e-16, small=1e-16, sf=False, debug=False): case = ["}} case {0}: {{".format(nuc)] dc = decay_const(nuc, False) if dc == 0.0: # stable nuclide case.append(CHAIN_STMT.format(idx[nuc], "it->second")) else: chains = genchains([(nuc,)], sf=sf) print("{} has {} chains".format(nucname.name(nuc), len(set(chains)))) cse = {} # common sub-expression exponents to elimnate bt = 0 for c in chains: if c[-1] not in idx: continue cexpr, b, bt = chainexpr(c, cse, b, bt, short=short, small=small) if cexpr is None: continue if debug:
def rel_activity(c): """Lists activity of spent fuel from all facilities relative to natural U 0, 10, 100, 1000, 10000 years after the end of the simulation. Args: c: connection cursor to sqlite database. """ activities = activity.query(c) dict_acts = {} sim_time = activities[-1][0] # Conversion of time from months to seconds MCONV = 3.16e7 / 12 # Conversion of years to seconds YCONV = 3.16e7 dict_acts = {} tot_mass = 0.0 sim_time = activities[-1][0] # Get list of one activity per nuclide wrt end of sim & sum total mass for time_step, nuc, mass, act in activities: tot_mass += mass sec = (sim_time - time_step) * MCONV acts = act * math.exp(-sec * data.decay_const(nuc)) if nuc in dict_acts.keys(): dict_acts[nuc] += acts else: dict_acts[nuc] = acts # Put back into list of tuples & sort by nuclide act_endsim = dict_acts.items() act_endsim.sort() # calculate natural uranium activity CONV_235 = 0.007*1000*6.022e23 CONV_238 = 0.993*1000*6.022e23 actU235 = CONV_235 * tot_mass * data.decay_const('U235') / data.atomic_mass('U235') actU238 = CONV_238 * tot_mass * data.decay_const('U235') / data.atomic_mass('U235') act_U = actU235 + actU238 # calculate relative activities to nat U after 0, 10, 100, 1000, 10000 yrs rel_acts = [] for nuc, act0 in act_endsim: t = 10 nuc_acts = (act0,) while t <= 10000: sec = t * YCONV nuc_act = act0 * math.exp(-sec * data.decay_const(nuc)) nuc_acts += (nuc_act,) t = 10 * t rel = [] for i in nuc_acts: frac = i / act_U rel.append(frac) row = (nuc,) + tuple(rel) rel_acts.append(row) # Write to csv file fname = 'relative_activity.csv' with open(fname,'w') as out: csv_out=csv.writer(out) csv_out.writerow(['nuclide', 'rel_act at 0 yrs', 'rel_act at 10 yrs', 'rel_act at 100 yrs', 'rel_act at 1000 yrs', 'rel_act at 10000 yrs']) for row in rel_acts: csv_out.writerow(row) print('file saved as ' + fname + '!')
def _traversal(self, nuc, A, out, depth=0): """Nuclide transmutation traversal method. This method will traverse the reaction tree recursively, using a DFS algorithm. On termination, the method will return all number densities after a given time that are a result of the starting nuclide. Parameters ---------- nuc : int ID of the active nuclide for the traversal. A : NumPy 2-dimensional array Current state of the coupled equation matrix. out : dict A dictionary containing the final recorded number densities for each nuclide. Keys are nuclide names in integer id form. Values are number densities for the coupled nuclide in float format. This is modified in place. depth : int Current depth of traversal (root at 0). Should never be provided by user. """ t = self.t tol = self.tol phi = self.xscache['phi_g'][0] temp = self.temp xscache = self.xscache if self.log is not None: self._log_tree(depth, nuc, 1.0) prod = {} # decay info lam = data.decay_const(nuc) decay_branches = {} if lam == 0 else self._decay_branches(nuc) for decay_child, branch_ratio in decay_branches.items(): prod[decay_child] = lam * branch_ratio # reaction daughters for rx in self.rxs: try: child = rxname.child(nuc, rx) except RuntimeError: continue child_xs = xscache[nuc, rx, temp][0] rr = utils.from_barns(child_xs, 'cm2') * phi # reaction rate prod[child] = rr + prod.get(child, 0.0) # Cycle production dictionary for child in prod: # Grow matrix d = self._get_destruction(child) B = self._grow_matrix(A, prod[child], d) # Create initial density vector n = B.shape[0] N0 = np.zeros((n, 1), dtype=float) N0[0] = 1.0 # Compute matrix exponential and dot with density vector eB = linalg.expm(B * t) N_final = np.dot(eB, N0) # <-- DENSE #N_final = eB.dot(N0) # <-- SPARSE if self.log is not None: self._log_tree(depth + 1, child, N_final[-1]) # Check against tolerance and continue traversal if N_final[-1] > tol: self._traversal(child, B, out, depth=depth + 1) # On recursion exit or truncation, write data from this nuclide outval = N_final[-1, 0] + out.get(child, 0.0) if 0.0 < outval: out[child] = outval
def test_decay_const(): assert_equal(data.decay_const("H1"), 0.0) assert_equal(data.decay_const(922350001), np.log(2.0) / 1560.0)
def _create_decay_matrix(nucs): nnucs = len(nucs) nucsrange = np.arange(nnucs) A = np.zeros((nnucs, nnucs), dtype=float) A[nucsrange, nucsrange] = [-data.decay_const(nuc) for nuc in nucs] return A
def _traversal(self, nuc, A, out, depth=0): """Nuclide transmutation traversal method. This method will traverse the reaction tree recursively, using a DFS algorithm. On termination, the method will return all number densities after a given time that are a result of the starting nuclide. Parameters ---------- nuc : int ID of the active nuclide for the traversal. A : NumPy 2-dimensional array Current state of the coupled equation matrix. out : dict A dictionary containing the final recorded number densities for each nuclide. Keys are nuclide names in integer id form. Values are number densities for the coupled nuclide in float format. This is modified in place. depth : int Current depth of traversal (root at 0). Should never be provided by user. """ t = self.t tol = self.tol phi = self.xscache['phi_g'][0] temp = self.temp xscache = self.xscache if self.log is not None: self._log_tree(depth, nuc, 1.0) prod = {} # decay info lam = data.decay_const(nuc) decay_branches = {} if lam == 0 else self._decay_branches(nuc) for decay_child, branch_ratio in decay_branches.items(): prod[decay_child] = lam * branch_ratio # reaction daughters for rx in self.rxs: try: child = rxname.child(nuc, rx) except RuntimeError: continue child_xs = xscache[nuc, rx, temp][0] rr = utils.from_barns(child_xs, 'cm2') * phi # reaction rate prod[child] = rr + prod.get(child, 0.0) # Cycle production dictionary for child in prod: # Grow matrix d = self._get_destruction(child) B = self._grow_matrix(A, prod[child], d) # Create initial density vector n = B.shape[0] N0 = np.zeros((n, 1), dtype=float) N0[0] = 1.0 # Compute matrix exponential and dot with density vector eB = linalg.expm(B * t) N_final = np.dot(eB, N0) # <-- DENSE #N_final = eB.dot(N0) # <-- SPARSE if self.log is not None: self._log_tree(depth+1, child, N_final[-1]) # Check against tolerance and continue traversal if N_final[-1] > tol: self._traversal(child, B, out, depth=depth+1) # On recursion exit or truncation, write data from this nuclide outval = N_final[-1,0] + out.get(child, 0.0) if 0.0 < outval: out[child] = outval