def _generate_graph(self):
        self.create_masking_and_matches()
        combined_maskings = pd.concat(
            [self.masking_data, self.pos_masking_data])
        self.matches_df.loc[self.matches_df['category'] == 'POS',
                            'stacking_layer'] = 0
        self.matches_df.loc[(self.matches_df['stacking_layer'] < 1) &
                            (self.matches_df['probe_match_fk'].
                             isin(self.check_and_add_menu_board_pos_item())),
                            'stacking_layer'] = 2
        self._identify_incorrect_tags_in_pos_areas()
        filtered_mpis = self.matches_df.loc[~self.matches_df['category'].
                                            isin(['General'])]

        self.base_adj_graph = AdjacencyGraphBuilder.initiate_graph_by_dataframe(
            filtered_mpis,
            combined_maskings,
            ['category', 'in_menu_board_area', 'probe_group_id'],
            use_masking_only=True,
            minimal_overlap_ratio=0.4)
        self.base_adj_graph = self.remove_edges_between_probe_groups(
            self.base_adj_graph)
        self.base_adj_graph = self.remove_pos_to_pos_edges(self.base_adj_graph)
        self.conversion_data = self.calculate_width_conversion()

        self.condensed_adj_graph = AdjacencyGraphBuilder.condense_graph_by_level(
            'category', self.base_adj_graph)
        self.condensed_adj_graph = self.condensed_adj_graph.to_undirected()
        self._severe_connections_across_weakly_related_bays()
        self._pair_signage_with_fixture_block()
        self._filter_pos_edges_to_closest()
        self._consume_fully_encapsulated_nodes()
        self._remove_nodes_with_less_facings_than(
            self.minimum_facings_for_block)
        self._calculate_widths_of_flip_signs()

        # create component graph (split graph into separate components by removing horizontal connections
        horizontal_edges = self._filter_horizontal_edges_between_blocks(
            self.condensed_adj_graph)
        if horizontal_edges:
            self.adj_component_graph = self._remove_edges_from_graph(
                horizontal_edges, self.condensed_adj_graph)
        else:
            self.adj_component_graph = self.condensed_adj_graph.copy()
        self._assign_fixture_and_block_numbers_to_product_clusters()
        self._assign_block_numbers_to_pos_items()
        self._assign_width_values_to_all_nodes()
        self._assign_no_header_attribute()
        self._assign_product_above_header_attribute()

        # self.create_graph_image()

        return
 def _create_adjacency_graph_per_scene(self, scene_fk):
     """ This method creates the graph for the case count calculation"""
     filtered_matches = self._prepare_matches_for_graph_creation(scene_fk)
     if filtered_matches.empty:
         return None
     maskings = AdjacencyGraphBuilder._load_maskings(self.project_name, scene_fk)
     add_node_attr = ['display_in_scene_fk', 'display_rect_x', 'display_rect_y', 'display_name', 'display_brand']
     adj_g = AdjacencyGraphBuilder.initiate_graph_by_dataframe(filtered_matches,
                                                               maskings, add_node_attr, use_masking_only=True)
     adj_g = AdjacencyGraphBuilder.condense_graph_by_level('display_in_scene_fk', adj_g)
     filtered_adj_g = adj_g.edge_subgraph(self._filter_edges_by_degree(adj_g, requested_direction='UP'))
     filtered_adj_g = adj_g.edge_subgraph(self._filter_redundant_edges(filtered_adj_g))
     return filtered_adj_g
    def _create_adjacency_graph(self, scene_fk, population, sequence_params,
                                graph_key):
        """
        This method creates the Adjacency graph for the relevant scene.
        :param scene_fk: The relevant scene_fk.
        :param sequence_params: A dictionary with the relevant params of the sequence.
        :param graph_key: The relevant sequence entity.
        :return: Filtered adjacency graph.
        """
        report_label = self.report_label.replace(" ", "")
        before_file_name = "before_{}_{}.html".format(self.custom_entity_fk,
                                                      report_label)
        after_file_name = "after_{}_{}.html".format(self.custom_entity_fk,
                                                    report_label)
        filtered_matches = self._filter_graph_data(scene_fk, population,
                                                   sequence_params)

        if filtered_matches.empty:
            return 0

        masking_df = AdjacencyGraphBuilder._load_maskings(
            self.data_provider.project_name, scene_fk)

        allowed_product_filters = sequence_params[
            AdditionalAttr.ALLOWED_PRODUCTS_FILTERS]
        graph_attr = self._get_additional_attribute_for_graph(
            graph_key, allowed_product_filters)

        kwargs = {
            'minimal_overlap_ratio':
            sequence_params[AdditionalAttr.ADJACENCY_OVERLAP_RATIO],
        }  # AdditionalAttr.USE_MASKING_ONLY: True

        adj_g = AdjacencyGraphBuilder.initiate_graph_by_dataframe(
            filtered_matches, masking_df, graph_attr, **kwargs)

        if self.is_debug:
            self.plot_adj_graph(adj_g, self.project_name, scene_fk,
                                before_file_name)

        self.adjacency_graph = adj_g

        adj_g = self._filter_adjacency_graph(adj_g, graph_key, sequence_params)

        if self.is_debug:
            self.plot_adj_graph(adj_g, self.project_name, scene_fk,
                                after_file_name)

        return adj_g
    def _severe_connections_across_weakly_related_bays(self):
        edges_to_remove = []
        # for every condensed node, find all edges that span across bays
        for node, node_data in self.condensed_adj_graph.nodes(data=True):
            bay_numbers = node_data['bay_number'].values
            if len(bay_numbers) < 2:
                continue

            node_matches = self.matches_df[
                self.matches_df['scene_match_fk'].isin(node_data['members'])]
            bay_groups = node_matches.groupby(
                ['bay_number', 'shelf_number'],
                as_index=False)['scene_match_fk'].count()
            # we will allow bays that have one extra facing since it's probably just a tag placement error
            bay_groups = bay_groups[bay_groups['scene_match_fk'] > 1]
            bay_groups = bay_groups.groupby(
                ['bay_number'], as_index=False)['shelf_number'].count()
            max_shelves = bay_groups['shelf_number'].max()
            bay_groups['orphaned'] = \
                bay_groups['shelf_number'].apply(lambda x: True if x <= max_shelves * 0.5 else False)

            if len(bay_groups[bay_groups['orphaned']]) > 1:
                Log.error(
                    "Unable to severe bay connections for {} category. Too many bays would be orphaned"
                    .format(node_data['category'].value))
                continue
            elif len(bay_groups[bay_groups['orphaned']]) == 0:
                continue

            orphaned_bay = bay_groups[
                bay_groups['orphaned']]['bay_number'].iloc[0]

            sub_graph = self.base_adj_graph.subgraph(node_data['members'])
            for edge in sub_graph.edges():
                node1_bay = sub_graph.nodes[edge[0]]['bay_number'].value
                node2_bay = sub_graph.nodes[edge[1]]['bay_number'].value
                if node1_bay == node2_bay:
                    continue
                elif node1_bay == orphaned_bay or node2_bay == orphaned_bay:
                    edges_to_remove.append(edge)

        if edges_to_remove:
            edges_to_keep = [
                edge for edge in self.base_adj_graph.edges()
                if edge not in edges_to_remove and (
                    edge[1], edge[0]) not in edges_to_remove
            ]

            self.base_adj_graph = self.base_adj_graph.edge_subgraph(
                edges_to_keep)

            self.condensed_adj_graph = AdjacencyGraphBuilder.condense_graph_by_level(
                'category', self.base_adj_graph)
            self.condensed_adj_graph = self.condensed_adj_graph.to_undirected()
        return
 def _filter_adjacency_graph(self, adj_g, graph_key, sequence_params):
     """
     The method filters the relevant nodes for the sequence out of the graph.
     :param graph_key: The relevant sequence entity.
     :return: Filtered adjacency graph.
     """
     if graph_key != CalcConst.PRODUCT_FK or sequence_params[
             AdditionalAttr.REPEATING_OCCURRENCES]:
         adj_g = AdjacencyGraphBuilder.condense_graph_by_level(
             ColumnNames.GRAPH_KEY, adj_g)
         self.condensed_graph = adj_g
     return adj_g
 def _filter_adjacency_graph(self, adj_g, graph_key, sequence_params):
     """
     The method filters the relevant nodes for the sequence out of the graph.
     :param graph_key: The relevant sequence entity.
     :return: Filtered adjacency graph.
     """
     use_degrees = False
     direction = sequence_params[AdditionalAttr.DIRECTION]
     include_stacking = sequence_params[AdditionalAttr.INCLUDE_STACKING]
     if graph_key != CalcConst.PRODUCT_FK or sequence_params[AdditionalAttr.REPEATING_OCCURRENCES]:
         if sequence_params[AdditionalAttr.DIRECTION] in ['RIGHT', 'LEFT']:
             adj_g = AdjacencyGraphBuilder.condense_graph_by_level(ColumnNames.GRAPH_KEY, adj_g)
         use_degrees = True
     adj_g = self._filter_graph_by_edge_direction(adj_g, direction, include_stacking, use_degrees)
     return adj_g
    def get_merged_node_attributes_from_nodes(selected_nodes, graph):
        filtered_nodes = [
            n for i, n in graph.nodes(data=True) if i in selected_nodes
        ]

        attributes_list = [
            attr for attr, value in graph.node[selected_nodes[0]].items()
            if isinstance(value, NodeAttribute)
        ]

        node_attributes = {}
        for attribute_name in attributes_list:
            node_attributes[
                attribute_name] = AdjacencyGraphBuilder._chain_attribute(
                    attribute_name, filtered_nodes)

        # Total facing of all the products.
        total_facings = sum([n['facings'] for n in filtered_nodes])
        node_attributes.update({'facings': total_facings})

        return node_attributes