def treat_cycles(self): """Find and treat cycles in a TSD diagram. Returns: (list): The unique tree TSDs associated to a non-tree TSD. """ graphs = [self.graph] tree_graphs = [] cycles_left = True while cycles_left: for gr_index, graph in reversed_enumerate(graphs): graphs += disentangle_cycle(graph, find_cycle(graph)) del graphs[gr_index] cycles_left = False for graph_indx, graph in reversed_enumerate(graphs): if nx.is_arborescence(graph): tree_graphs.append(graph) del graphs[graph_indx] else: cycles_left = True tree_graphs_uniq = [] for t_graph in tree_graphs: for t_graph_uniq in tree_graphs_uniq: if nx.edges(t_graph) == nx.edges(t_graph_uniq): break # If the TSD is a new one else: tree_graphs_uniq.append(t_graph) return tree_graphs_uniq
def order_diagrams(diagrams): """Order the MBPT diagrams and return the number of diags for each type. Args: diagrams (list): The unordered MbptDiagrams. Returns: (tuple): First element are the ordered MbptDiagrams. Second element is the number of diagrams for each excitation level type. """ singles = [] doubles = [] triples = [] quadruples = [] quintuples_and_higher = [] for i_diag, diag in reversed_enumerate(diagrams): if diag.excitation_level == 1: singles.append(diag) elif diag.excitation_level == 2: doubles.append(diag) elif diag.excitation_level == 3: triples.append(diag) elif diag.excitation_level == 4: quadruples.append(diag) elif diag.excitation_level >= 5: quintuples_and_higher.append(diag) else: print("Zero or negative excitation level!\n") exit() del diagrams[i_diag] diagrams = singles + doubles + triples + quadruples \ + quintuples_and_higher for ind, diagram in enumerate(diagrams): diagram.tags[0] = ind attribute_conjugate(diagrams) diags_nb_per_type = { 'nb_diags': len(diagrams), 'singles': len(singles), 'doubles': len(doubles), 'triples': len(triples), 'quadruples': len(quadruples), 'quintuples+': len(quintuples_and_higher) } section_flags = { 'singles': singles[0].tags[0] if singles else -1, 'doubles': doubles[0].tags[0] if doubles else -1, 'triples': triples[0].tags[0] if triples else -1, 'quadruples': quadruples[0].tags[0] if quadruples else -1, 'quintuples+': quintuples_and_higher[0].tags[0] if quintuples_and_higher else -1 } return diagrams, diags_nb_per_type, section_flags
def treat_tsds(diagrams_time): """Order TSDs, produce their expressions, return also number of trees. Args: diagrams_time (list): All the associated TSDs. Returns: (tuple): List of TSDs, number of tree TSDs """ tree_tsds = [] for i_diag, diag in reversed_enumerate(diagrams_time): if diag.is_tree: tree_tsds.append(diag) del diagrams_time[i_diag] adg.diag.topologically_distinct_diagrams(tree_tsds) adg.diag.topologically_distinct_diagrams(diagrams_time) diagrams_time = tree_tsds + diagrams_time for index, t_diag in enumerate(diagrams_time): t_diag.tags.insert(0, index) if not t_diag.is_tree: t_diag.equivalent_trees = t_diag.treat_cycles() t_diag.expr = " + ".join("\\frac{1}{%s}" % adg.tsd.tree_time_structure_den(graph) for graph in t_diag.equivalent_trees) return diagrams_time, len(tree_tsds)
def order_and_remove_topologically_equiv(matrices, max_vertex): """Order the matrices in sub-list and remove topologically equivalent ones. Args: matrices (list): The adjacency matrices to be checked. max_vertex (int): The maximum vertex which has been filled. Returns: (list): The ordered topologically unique matrices. """ matrices_dict = {} for idx, matrix in reversed_enumerate(matrices): row0 = "".join("%i" % elem for elem in np.sort(matrix[0, :]).tolist()) if row0 in matrices_dict: matrices_dict[row0].append(matrix) else: matrices_dict[row0] = [matrix] del matrices[idx] for row_key in sorted(matrices_dict.keys()): matrices_dict[row_key] = \ check_topologically_equivalent(matrices_dict[row_key], max_vertex) matrices = [] for matrices_list in list(matrices_dict.values()): matrices += matrices_list return matrices
def topologically_distinct_diagrams(diagrams): """Return a list of diagrams all topologically distinct. Args: diagrams (list): The Diagrams of interest. Returns: (list): Topologically unique diagrams. """ import adg.tsd iso = nx.algorithms.isomorphism op_nm = iso.categorical_node_match('operator', False) anom_em = iso.categorical_multiedge_match('anomalous', False) for i_diag, diag in reversed_enumerate(diagrams): graph = diag.graph diag_io_degrees = diag.io_degrees for i_comp_diag, comp_diag in reversed_enumerate(diagrams[:i_diag]): if diag_io_degrees == comp_diag.io_degrees: # Check anomalous character of props for PBMBPT if isinstance(diag, adg.pbmbpt.ProjectedBmbptDiagram): doubled_graph = create_checkable_diagram(graph) doubled_comp_diag = create_checkable_diagram( comp_diag.graph) matcher = iso.DiGraphMatcher(doubled_graph, doubled_comp_diag, node_match=op_nm, edge_match=anom_em) # Check for topologically equivalent diags considering vertex # properties but not edge attributes, i.e. anomalous character else: matcher = iso.DiGraphMatcher(graph, comp_diag.graph, node_match=op_nm) if matcher.is_isomorphic(): # Store the set of permutations to recover the original TSD if isinstance(diag, adg.tsd.TimeStructureDiagram): diag.perms.update( update_permutations(comp_diag.perms, comp_diag.tags[0], matcher.mapping)) diag.tags += comp_diag.tags del diagrams[i_comp_diag] break return diagrams
def order_diagrams(diagrams, order): """Order the BIMSRG diagrams and return the number of diags for each type. Args: diagrams (list): The unordered BimsrgDiagrams. order (int): The order of the B-IMSRG truncation. Returns: (tuple): First element are the ordered BimsrgDiagrams. Second element is the number of diagrams for each type. Third element is flags for the output processing. """ diags_per_order = {n: [] for n in range(1, order + 1)} for i_diag, diag in reversed_enumerate(diagrams): diags_per_order[diag.max_degree / 2].append(diag) del diagrams[i_diag] diags_nb_per_type = { n: len(diags_per_order[n]) for n in range(1, order + 1) } diagrams = [] for n in range(1, order + 1): diagrams += sorted(diags_per_order[n], key=lambda diag: (diag.ext_io_degree, not diag.is_AB)) diags_nb_per_type['nb_diags'] = len(diagrams) section_flags = {1: 0} for ind, diagram in enumerate(diagrams): diagram.tags[0] = ind if ind == 0: section_flags['new_op_struct'] = [ind] elif diagram.ext_io_degree != diagrams[ind - 1].ext_io_degree: section_flags['new_op_struct'].append(ind) for n in range(2, order + 1): index = sum(len(diags_per_order[i]) for i in range(1, n)) section_flags[ n] = diagrams[index].tags[0] if diags_per_order[n] else -1 return diagrams, diags_nb_per_type, section_flags
def check_topologically_equivalent(matrices, max_vertex): """Exclude matrices that would spawn topologically equivalent graphs. Args: matrices (list): Adjacency matrices to be checked. max_vertex (int): The maximum vertex which have been filled. Returns: (list): The topologically unique matrices. >>> import numpy >>> mats = [numpy.array([[0, 2, 0, 0], [2, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]), \ numpy.array([[0, 0, 2, 0], [0, 0, 0, 0], [2, 0, 0, 1], [0, 0, 0, 0]])] >>> >>> mats = check_topologically_equivalent(mats, 2) >>> mats # doctest: +NORMALIZE_WHITESPACE [array([[0, 2, 0, 0], [2, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])] >>> >>> mats = check_topologically_equivalent([], 2) >>> mats # doctest: +NORMALIZE_WHITESPACE [] """ if not matrices: return [] vertices = list(range(matrices[0].shape[0])) permutations = [[0] + list(k) + vertices[max_vertex + 1:] for k in itertools.permutations(vertices[1:max_vertex + 1]) ] for ind_mat1, mat1 in reversed_enumerate(matrices[:-1]): mat1_1plus_sorted = np.sort(mat1[1:max_vertex + 1, :].flat) for ind_mat2 in range(len(matrices) - 1, ind_mat1, -1): mat2 = matrices[ind_mat2] # Basic check to avoid needless permutations if not (mat1_1plus_sorted - np.sort(mat2[1:max_vertex + 1, :].flat)).any(): # Test for all possible permutations for reordering in permutations: if not (mat1 - mat2[:, reordering][reordering, :]).any(): del matrices[ind_mat2] break else: continue break return matrices
def check_unconnected_spawn(matrices, max_filled_vertex): """Exclude some matrices that would spawn unconnected diagrams. Do several permutations among the rows and columns corresponding to already filled vertices, and check if one obtains a block-diagonal organisation, where the off-diagonals blocks connecting the already-filled and yet-unfilled parts of the matrix would be empty. In that case, remove the matrix. Args: matrices (list): The adjacency matrices to be checked. max_filled_vertex (int): The furthest vertex until which the matrices have been filled. >>> import numpy >>> mats = [numpy.array([[0, 2, 0], [2, 0, 0], [0, 0, 0]]), \ numpy.array([[0, 2, 1], [2, 0, 1], [0, 0, 0]])] >>> >>> check_unconnected_spawn(mats, 1) >>> mats # doctest: +NORMALIZE_WHITESPACE [array([[0, 2, 1], [2, 0, 1], [0, 0, 0]])] """ vertices = list(range(matrices[0].shape[0])) permutations = [ [0] + list(k) + vertices[max_filled_vertex + 1:] for k in itertools.permutations(vertices[1:max_filled_vertex + 1]) ] for ind_mat, matrix in reversed_enumerate(matrices): # Test for all possible permutations for reordering in permutations: mat = matrix[:, reordering][reordering, :] # Check for non-zero elements in off-diagonal blocks if not mat[:, vertices[:max_filled_vertex+1] ][vertices[max_filled_vertex+1:], :].any() \ and not mat[vertices[:max_filled_vertex+1], :][:, vertices[max_filled_vertex+1:]].any(): del matrices[ind_mat] break
def check_vertex_degree(matrices, three_body_use, nbody_max_observable, canonical_only, vertex_id): """Check the degree of a specific vertex in a set of matrices. Args: matrices (list): Adjacency matrices. three_body_use (bool): ``True`` if one uses three-body forces. nbody_max_observable (int): Maximum body number for the observable. canonical_only (bool): ``True`` if one draws only canonical diagrams. vertex_id (int): The position of the studied vertex. >>> test_matrices = [numpy.array([[0, 1, 2], [1, 0, 1], [0, 2, 0]]), \ numpy.array([[2, 0, 2], [1, 2, 3], [1, 0, 0]]), \ numpy.array([[0, 1, 3], [2, 0, 8], [2, 1, 0]])] >>> check_vertex_degree(test_matrices, True, 3, False, 0) >>> test_matrices #doctest: +NORMALIZE_WHITESPACE [array([[0, 1, 2], [1, 0, 1], [0, 2, 0]]), array([[2, 0, 2], [1, 2, 3], [1, 0, 0]])] >>> check_vertex_degree(test_matrices, False, 2, False, 0) >>> test_matrices #doctest: +NORMALIZE_WHITESPACE [array([[0, 1, 2], [1, 0, 1], [0, 2, 0]])] """ authorized_deg = [4] if three_body_use: authorized_deg.append(6) if not canonical_only or vertex_id == 0: authorized_deg.append(2) authorized_deg = tuple(authorized_deg) for i_mat, matrix in reversed_enumerate(matrices): vertex_degree = sum(matrix[index][vertex_id] + matrix[vertex_id][index] for index in list(range(matrix.shape[0]))) vertex_degree -= matrix[vertex_id][vertex_id] if (vertex_id != 0 and vertex_degree not in authorized_deg) \ or (vertex_id == 0 and vertex_degree > 2*nbody_max_observable): del matrices[i_mat]
def remove_disconnected_matrices(matrices): """Remove matrices corresponding to disconnected diagrams. Args: matrices (list): List of adjacency matrices. """ vertices = list(range(matrices[0].shape[0])) permutations = [[0] + list(k) for k in itertools.permutations(vertices[1:])] for idx, matrix in reversed_enumerate(matrices): for reordering in permutations: mat = matrix[:, reordering][reordering, :] for vert in vertices[1:]: # Check if the permuted matrix is block-diagonal if not mat[vertices[:vert], :][:, vertices[vert:]].any() \ and not mat[:, vertices[:vert] ][vertices[vert:], :].any(): del matrices[idx] break else: continue break
def generate_diagrams(commands, id_generator): """Return a list with diagrams of the appropriate type. Args: commands (Namespace): Flags for the run management. id_generator (UniqueID): A unique ID number generator. Returns: (list): All the diagrams of the appropriate Class and order. """ if commands.theory == "MBPT": diagrams = adg.mbpt.diagrams_generation(commands.order) elif commands.theory in ("BMBPT", "PBMBPT"): diagrams = adg.bmbpt.diagrams_generation(commands.order, commands.with_3NF, commands.nbody_observable, commands.canonical) elif commands.theory == "BIMSRG": diagrams, switch_flag = adg.bimsrg.diagrams_generation(commands.order) else: print("Invalid theory! Exiting program.") exit() print("Number of matrices produced: ", len(diagrams)) diags = [ nx.from_numpy_array(diagram, create_using=nx.MultiDiGraph(), parallel_edges=True) for diagram in diagrams ] if commands.theory == "MBPT": for i_diag, diag in reversed_enumerate(diags): if (nx.number_weakly_connected_components(diag)) != 1: del diags[i_diag] adg.diag.label_vertices(diags, commands.theory, switch_flag if commands.theory == 'BIMSRG' else -1) if commands.theory in ('BMBPT', "PBMBPT"): diagrams = [ adg.bmbpt.BmbptFeynmanDiagram(graph, id_generator.get()) for graph in diags ] elif commands.theory == 'MBPT': diagrams = [ adg.mbpt.MbptDiagram(graph, id_generator.get()) for graph in diags ] elif commands.theory == "BIMSRG": diagrams = [ adg.bimsrg.BimsrgDiagram(graph, id_generator.get()) for graph in diags ] if commands.theory == "PBMBPT": for idx, diagram in reversed_enumerate(diagrams): new_graphs = adg.pbmbpt.generate_anomalous_diags( diagram, 3 if commands.with_3NF else 2) new_diags = [ adg.pbmbpt.ProjectedBmbptDiagram(diag, id_generator.get(), idx, spawn_idx) for spawn_idx, diag in enumerate(new_graphs) ] del diagrams[idx] adg.pbmbpt.filter_new_diagrams(new_diags, diagrams) diagrams += new_diags return diagrams
def order_diagrams(diagrams): """Order the BMBPT diagrams and return number of diags for each type. Args: diagrams (list): Possibly redundant BmbptFeynmanDiagrams. Returns: (tuple): First element is the list of topologically unique, ordered diagrams. Second element is a dict with the number of diagrams for each major type. Third element is a dict with the identifiers of diagrams starting each output file section. """ diagrams_2_hf = [] diagrams_2_ehf = [] diagrams_2_not_hf = [] diagrams_3_hf = [] diagrams_3_ehf = [] diagrams_3_not_hf = [] for i_diag, diag in reversed_enumerate(diagrams): if diag.two_or_three_body == 2: if diag.hf_type == "HF": diagrams_2_hf.append(diag) elif diag.hf_type == "EHF": diagrams_2_ehf.append(diag) elif diag.hf_type == "noHF": diagrams_2_not_hf.append(diag) elif diag.two_or_three_body == 3: if diag.hf_type == "HF": diagrams_3_hf.append(diag) elif diag.hf_type == "EHF": diagrams_3_ehf.append(diag) elif diag.hf_type == "noHF": diagrams_3_not_hf.append(diag) del diagrams[i_diag] diagrams = diagrams_2_hf + diagrams_2_ehf + diagrams_2_not_hf \ + diagrams_3_hf + diagrams_3_ehf + diagrams_3_not_hf diags_nb_per_type = { 'nb_2_hf': len(diagrams_2_hf), 'nb_2_ehf': len(diagrams_2_ehf), 'nb_2_not_hf': len(diagrams_2_not_hf), 'nb_3_hf': len(diagrams_3_hf), 'nb_3_ehf': len(diagrams_3_ehf), 'nb_3_not_hf': len(diagrams_3_not_hf), 'nb_diags': len(diagrams), 'nb_2': (len(diagrams_2_hf) + len(diagrams_2_ehf) + len(diagrams_2_not_hf)), 'nb_3': (len(diagrams_3_hf) + len(diagrams_3_ehf) + len(diagrams_3_not_hf)) } section_flags = { 'two_body_hf': diagrams_2_hf[0].unique_id if diagrams_2_hf else -1, 'two_body_ehf': diagrams_2_ehf[0].unique_id if diagrams_2_ehf else -1, 'two_body_not_hf': diagrams_2_not_hf[0].unique_id if diagrams_2_not_hf else -1, 'three_body_hf': diagrams_3_hf[0].unique_id if diagrams_3_hf else -1, 'three_body_ehf': diagrams_3_ehf[0].unique_id if diagrams_3_ehf else -1, 'three_body_not_hf': diagrams_3_not_hf[0].unique_id if diagrams_3_not_hf else -1 } return diagrams, diags_nb_per_type, section_flags