def calculate_transition_probabilities(self): """ Updating the Macro Atom computations """ macro_tau_sobolevs = self.tau_sobolevs[self.atom_data.macro_atom_data['lines_idx'].values.astype(int)] beta_sobolevs = np.zeros_like(macro_tau_sobolevs) macro_atom.calculate_beta_sobolev(macro_tau_sobolevs, beta_sobolevs) transition_probabilities = self.atom_data.macro_atom_data['transition_probability'] * beta_sobolevs transition_up_filter = self.atom_data.macro_atom_data['transition_type'] == 1 j_blues = self.j_blues[self.atom_data.macro_atom_data['lines_idx'].values[transition_up_filter.__array__()]] macro_stimulated_emission = self.stimulated_emission_factor[ self.atom_data.macro_atom_data['lines_idx'].values[transition_up_filter.__array__()]] transition_probabilities[transition_up_filter.__array__()] *= j_blues * macro_stimulated_emission #reference_levels = np.hstack((0, self.atom_data.macro_atom_references['count_total'].__array__().cumsum())) #Normalizing the probabilities #TODO speedup possibility save the new blockreferences with 0 and last block block_references = np.hstack((self.atom_data.macro_atom_references['block_references'], len(self.atom_data.macro_atom_data))) macro_atom.normalize_transition_probabilities(transition_probabilities, block_references) return transition_probabilities
def calculate_transition_probabilities(self): """ Updating the Macro Atom computations """ macro_tau_sobolevs = self.tau_sobolevs[ self.atom_data.macro_atom_data['lines_idx'].values.astype(int)] beta_sobolevs = np.zeros_like(macro_tau_sobolevs) macro_atom.calculate_beta_sobolev(macro_tau_sobolevs, beta_sobolevs) transition_probabilities = self.atom_data.macro_atom_data[ 'transition_probability'] * beta_sobolevs transition_up_filter = self.atom_data.macro_atom_data[ 'transition_type'] == 1 j_blues = self.j_blues[self.atom_data.macro_atom_data['lines_idx']. values[transition_up_filter.__array__()]] macro_stimulated_emission = self.stimulated_emission_factor[ self.atom_data.macro_atom_data['lines_idx'].values[ transition_up_filter.__array__()]] transition_probabilities[transition_up_filter.__array__( )] *= j_blues * macro_stimulated_emission #reference_levels = np.hstack((0, self.atom_data.macro_atom_references['count_total'].__array__().cumsum())) #Normalizing the probabilities #TODO speedup possibility save the new blockreferences with 0 and last block block_references = np.hstack( (self.atom_data.macro_atom_references['block_references'], len(self.atom_data.macro_atom_data))) macro_atom.normalize_transition_probabilities(transition_probabilities, block_references) return transition_probabilities
def calculate_nlte_level_populations(self): """ Calculating the NLTE level populations for specific ions """ if not hasattr(self, 'beta_sobolevs'): self.beta_sobolevs = np.zeros_like( self.atom_data.lines['nu'].values) macro_atom.calculate_beta_sobolev(self.tau_sobolevs, self.beta_sobolevs) if self.nlte_options.get('coronal_approximation', False): beta_sobolevs = np.ones_like(self.beta_sobolevs) j_blues = np.zeros_like(self.j_blues) else: beta_sobolevs = self.beta_sobolevs j_blues = self.j_blues if self.nlte_options.get('classical_nebular', False): print "setting classical nebular = True" beta_sobolevs[:] = 1.0 for species in self.nlte_species: logger.info('Calculating rates for species %s', species) number_of_levels = self.level_populations.ix[species].size level_populations = self.level_populations.ix[species].values lnl = self.atom_data.nlte_data.lines_level_number_lower[species] lnu = self.atom_data.nlte_data.lines_level_number_upper[species] lines_index = self.atom_data.nlte_data.lines_idx[species] A_uls = self.atom_data.nlte_data.A_uls[species] B_uls = self.atom_data.nlte_data.B_uls[species] B_lus = self.atom_data.nlte_data.B_lus[species] r_lu_index = lnu * number_of_levels + lnl r_ul_index = lnl * number_of_levels + lnu r_ul_matrix = np.zeros((number_of_levels, number_of_levels), dtype=np.float64) r_ul_matrix.ravel()[r_ul_index] = A_uls r_ul_matrix.ravel()[r_ul_index] *= beta_sobolevs[lines_index] stimulated_emission_matrix = np.zeros_like(r_ul_matrix) stimulated_emission_matrix.ravel()[r_lu_index] = 1 - ( (level_populations[lnu] * B_uls) / (level_populations[lnl] * B_lus)) stimulated_emission_matrix[stimulated_emission_matrix < 0.] = 0.0 r_lu_matrix = np.zeros_like(r_ul_matrix) r_lu_matrix.ravel()[r_lu_index] = B_lus * j_blues[ lines_index] * beta_sobolevs[lines_index] r_lu_matrix *= stimulated_emission_matrix collision_matrix = self.atom_data.nlte_data.get_collision_matrix( species, self.t_electron) * self.electron_density rates_matrix = r_lu_matrix + r_ul_matrix + collision_matrix for i in xrange(number_of_levels): rates_matrix[i, i] = -np.sum(rates_matrix[:, i]) rates_matrix[0] = 1.0 x = np.zeros(rates_matrix.shape[0]) x[0] = 1.0 relative_level_populations = np.linalg.solve(rates_matrix, x) self.level_populations.ix[ species] = relative_level_populations * self.ion_populations.ix[ species] return
def calculate_nlte_level_populations(self): """ Calculating the NLTE level populations for specific ions """ if not hasattr(self, 'beta_sobolevs'): self.beta_sobolevs = np.zeros_like(self.atom_data.lines['nu'].values) macro_atom.calculate_beta_sobolev(self.tau_sobolevs, self.beta_sobolevs) if self.nlte_options.get('coronal_approximation', False): beta_sobolevs = np.ones_like(self.beta_sobolevs) j_blues = np.zeros_like(self.j_blues) else: beta_sobolevs = self.beta_sobolevs j_blues = self.j_blues if self.nlte_options.get('classical_nebular', False): print "setting classical nebular = True" beta_sobolevs[:] = 1.0 for species in self.nlte_species: logger.info('Calculating rates for species %s', species) number_of_levels = self.level_populations.ix[species].size level_populations = self.level_populations.ix[species].values lnl = self.atom_data.nlte_data.lines_level_number_lower[species] lnu = self.atom_data.nlte_data.lines_level_number_upper[species] lines_index = self.atom_data.nlte_data.lines_idx[species] A_uls = self.atom_data.nlte_data.A_uls[species] B_uls = self.atom_data.nlte_data.B_uls[species] B_lus = self.atom_data.nlte_data.B_lus[species] r_lu_index = lnu * number_of_levels + lnl r_ul_index = lnl * number_of_levels + lnu r_ul_matrix = np.zeros((number_of_levels, number_of_levels), dtype=np.float64) r_ul_matrix.ravel()[r_ul_index] = A_uls r_ul_matrix.ravel()[r_ul_index] *= beta_sobolevs[lines_index] stimulated_emission_matrix = np.zeros_like(r_ul_matrix) stimulated_emission_matrix.ravel()[r_lu_index] = 1 - ((level_populations[lnu] * B_uls) / ( level_populations[lnl] * B_lus)) stimulated_emission_matrix[stimulated_emission_matrix < 0.] = 0.0 r_lu_matrix = np.zeros_like(r_ul_matrix) r_lu_matrix.ravel()[r_lu_index] = B_lus * j_blues[lines_index] * beta_sobolevs[lines_index] r_lu_matrix *= stimulated_emission_matrix collision_matrix = self.atom_data.nlte_data.get_collision_matrix(species, self.t_electron) * self.electron_density rates_matrix = r_lu_matrix + r_ul_matrix + collision_matrix for i in xrange(number_of_levels): rates_matrix[i, i] = -np.sum(rates_matrix[:, i]) rates_matrix[0] = 1.0 x = np.zeros(rates_matrix.shape[0]) x[0] = 1.0 relative_level_populations = np.linalg.solve(rates_matrix, x) self.level_populations.ix[species] = relative_level_populations * self.ion_populations.ix[species] return