Ejemplo n.º 1
0
    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.current_date = datetime.now()
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.own_manufacturer_id = int(self.data_provider[Data.OWN_MANUFACTURER][self.data_provider[Data.OWN_MANUFACTURER]['param_name'] == 'manufacturer_id']['param_value'].tolist()[0])
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.toolbox = GENERALToolBox(data_provider)
        self.assortment = Assortment(self.data_provider, self.output, common=self.common)
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []

        self.template_path = self.get_template_path()
        self.template_data = self.get_template_data()
        self.sos_store_policies = self.get_sos_store_policies(self.visit_date.strftime('%Y-%m-%d'))
        self.result_values = self.get_result_values()

        self.scores = pd.DataFrame()
Ejemplo n.º 2
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.ps_data_provider = PsDataProvider(data_provider)
     self.kpi_result_values = self.ps_data_provider.get_result_values()
     self.common_v2 = Common(self.data_provider)
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.assortment = Assortment(self.data_provider)
     self.own_manufacturer_fk = int(
         self.data_provider.own_manufacturer.param_value.values[0])
     self.kpi_static_data = self.common_v2.kpi_static_data[[
         'pk', StaticKpis.TYPE
     ]]
     # self.custom_entity = self.ps_data_provider.get_custom_entities(Consts.PRODUCT_GROUP)
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.project_name = self.data_provider.project_name
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     self.external_targets = self._retrieve_completeness_external_targets()
     self.products_trax_cat = self._get_products_with_trax_categories()
     self.ass_groups_present = {
         Consts.DISTR_SNACKS: 0,
         Consts.DISTR_SABRA: 0
     }
Ejemplo n.º 3
0
 def __init__(self, data_provider, output):
     self.k_engine = BaseCalculationsScript(data_provider, output)
     self.output = output
     self.data_provider = data_provider
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.store_info = self.data_provider[Data.STORE_INFO]
     self.store_type = self.store_info['store_type'].iloc[0]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
     self.general_tools = CCKHGENERALToolBox(self.data_provider, self.output)
     self.template = CCKHTemplateConsts()
     self.kpi_static_data = self.get_kpi_static_data()
     self.kpi_results_queries = []
     self.commonV2 = CommonV2(self.data_provider)
     self.kpi_new_static_data = self.commonV2.get_new_kpi_static_data()
     self.manufacturer = int(self.data_provider.own_manufacturer.param_value.values[0])
     self.ps_data_provider = PsDataProvider(self.data_provider, self.output)
     self.external_targets = self.ps_data_provider.get_kpi_external_targets()
     self.assortment = Assortment(self.data_provider, self.output)
     self.templates_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION] ==
                                                 CCKHTemplateConsts.BASIC_SHEET]
     self.visibility_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION]
                                                  == CCKHTemplateConsts.VISIBILITY_SHEET]
     self.cooler_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION]
                                              == CCKHTemplateConsts.COOLER_SHEET]
Ejemplo n.º 4
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.session_id = self.data_provider.session_id
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     # self.tools = RISPARKWINEDEGENERALToolBox(
     #     self.data_provider, self.output, rds_conn=self.rds_conn)
     # self.New_kpi_static_data = self.get_new_kpi_static_data()
     self.kpi_results_new_tables_queries = []
     # self.store_assortment = PSAssortmentDataProvider(self.data_provider).execute()
     self.store_info = self.data_provider[Data.STORE_INFO]
     self.current_date = datetime.now()
     self.assortment = Assortment(self.data_provider, self.output)
     self.store_assortment = self.assortment.store_assortment
     self.common = Common(self.data_provider)
Ejemplo n.º 5
0
    def __init__(self, data_provider, output, common):
        self.output = output
        self.data_provider = data_provider
        self.common = common
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.session_id = self.data_provider.session_id
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.scene_results = self.data_provider[Data.SCENE_KPI_RESULTS]
        self.templates = self.data_provider[Data.TEMPLATES]
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.scif['store_fk'] = self.store_id
        self.scif = self.scif[~(self.scif['product_type'].isin([Const.IRRELEVANT, Const.EMPTY]))]
        self.scif = self.scif[self.scif['facings'] > 0]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.kpis = self.load_kpis()
        self.results_values = self.load_results_values()
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.manufacturer_fk = int(self.data_provider[Data.OWN_MANUFACTURER].iloc[0, 1])

        self.kpi_results = []

        self.assortment = Assortment(self.data_provider, self.output)
Ejemplo n.º 6
0
    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.common_v2 = CommonV2(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.template_info = self.data_provider.all_templates
        self.rds_conn = ProjectConnector(self.project_name,
                                         DbUsers.CalculationEng)
        self.ps_data_provider = PsDataProvider(self.data_provider)
        self.thresholds_and_results = {}
        self.result_df = []
        self.writing_to_db_time = datetime.timedelta(0)
        self.kpi_results_queries = []
        self.potential_products = {}
        self.shelf_square_boundaries = {}
        self.average_shelf_values = {}
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []
        self.all_template_data = parse_template(TEMPLATE_PATH, "KPI")
        self.spacing_template_data = parse_template(TEMPLATE_PATH, "Spacing")
        self.fixture_width_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                    "Fixture Width",
                                                    dtype=pd.Int64Dtype())
        self.facings_to_feet_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                      "Conversion Table",
                                                      dtype=pd.Int64Dtype())
        self.header_positions_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                       "Header Positions")
        self.flip_sign_positions_template = pd.read_excel(
            FIXTURE_WIDTH_TEMPLATE, "Flip Sign Positions")
        self.custom_entity_data = self.ps_data_provider.get_custom_entities(
            1005)
        self.ignore_stacking = False
        self.facings_field = 'facings' if not self.ignore_stacking else 'facings_ign_stack'
        self.INCLUDE_FILTER = 1
        self.assortment = Assortment(self.data_provider,
                                     output=self.output,
                                     ps_data_provider=self.ps_data_provider)
        self.store_assortment = self.assortment.get_lvl3_relevant_ass()

        self.kpi_new_static_data = self.common.get_new_kpi_static_data()
        try:
            self.mpis = self.match_product_in_scene.merge(self.products, on='product_fk', suffixes=['', '_p']) \
                        .merge(self.scene_info, on='scene_fk', suffixes=['', '_s']) \
                          .merge(self.template_info, on='template_fk', suffixes=['', '_t'])
        except KeyError:
            Log.warning('MPIS cannot be generated!')
            return
        self.adp = AltriaDataProvider(self.data_provider)
Ejemplo n.º 7
0
 def __init__(self, data_provider, output):
     GlobalSessionToolBox.__init__(self, data_provider, output)
     self.assortment = Assortment(data_provider)
     self.ps_data = PsDataProvider(data_provider)
     self.display_in_scene = data_provider.match_display_in_scene
     self.static_display = data_provider.static_display
     self.manufacturer_fk = int(self.manufacturer_fk)
     self._add_display_data_to_scif()
     self._add_client_name_and_sub_brand_data()
Ejemplo n.º 8
0
 def __init__(self, data_provider, output):
     GlobalSessionToolBox.__init__(self, data_provider, output)
     self.own_manufacturer_fk = int(self.data_provider.own_manufacturer.param_value.values[0])
     self.parser = Parser
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.assortment = Assortment(self.data_provider, self.output)
     self.ps_data = PsDataProvider(self.data_provider, self.output)
     self.kpi_external_targets = self.ps_data.get_kpi_external_targets(key_fields=Consts.KEY_FIELDS,
                                                                       data_fields=Consts.DATA_FIELDS)
Ejemplo n.º 9
0
    def __init__(self, output, data_provider):
        super(PepsicoUtil, self).__init__(data_provider)
        self.output = output
        self.common = Common(self.data_provider)
        # self.common_v1 = CommonV1(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK] if self.data_provider[Data.STORE_FK] is not None \
                                                            else self.session_info['store_fk'].values[0]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.display_scene = self.get_match_display_in_scene()
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []

        self.probe_groups = self.get_probe_group()
        self.match_product_in_scene = self.match_product_in_scene.merge(self.probe_groups, on='probe_match_fk',
                                                                        how='left')

        self.toolbox = GENERALToolBox(self.data_provider)
        self.commontools = PEPSICOUKCommonToolBox(self.data_provider, self.rds_conn)

        self.all_templates = self.commontools.all_templates
        self.custom_entities = self.commontools.custom_entities
        self.on_display_products = self.commontools.on_display_products
        self.exclusion_template = self.commontools.exclusion_template
        self.filtered_scif = self.commontools.filtered_scif.copy()
        self.filtered_matches = self.commontools.filtered_matches.copy()
        self.filtered_matches = self.filtered_matches.merge(self.probe_groups, on='probe_match_fk', how='left')

        self.filtered_scif_secondary = self.commontools.filtered_scif_secondary.copy()
        self.filtered_matches_secondary = self.commontools.filtered_matches_secondary.copy()

        self.scene_bay_shelf_product = self.commontools.scene_bay_shelf_product
        self.ps_data = PsDataProvider(self.data_provider, self.output)
        self.full_store_info = self.commontools.full_store_info.copy()
        self.external_targets = self.commontools.external_targets
        self.assortment = Assortment(self.commontools.data_provider, self.output)
        self.lvl3_ass_result = self.get_lvl3_relevant_assortment_result()
        self.own_manuf_fk = self.all_products[self.all_products['manufacturer_name'] == self.PEPSICO]['manufacturer_fk'].values[0]

        self.scene_kpi_results = self.get_results_of_scene_level_kpis()
        self.kpi_results_check = pd.DataFrame(columns=['kpi_fk', 'numerator', 'denominator', 'result', 'score',
                                                       'context'])
        self.sos_vs_target_targets = self.construct_sos_vs_target_base_df()

        self.all_targets_unpacked = self.commontools.all_targets_unpacked.copy()
        self.block_results = pd.DataFrame(columns=['Group Name', 'Score'])
        self.hero_type_custom_entity_df = self.get_hero_type_custom_entity_df()
Ejemplo n.º 10
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common_v2 = Common(self.data_provider)
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.assortment = Assortment(self.data_provider, self.output)
     self.own_manufacturer_fk = int(self.data_provider.own_manufacturer.param_value.values[0])
     self.db_handler = DBHandler(self.data_provider.project_name, self.data_provider.session_uid)
     self.previous_oos_results = self.db_handler.get_last_session_oos_results()
     self.kpi_result_types = self.db_handler.get_kpi_result_value()
     self.oos_store_results = list()
     self.initial_scif = self.scif.copy()
Ejemplo n.º 11
0
    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.project_name = self.data_provider.project_name
        self.commonV2 = CommonV2(self.data_provider)
        self.common = Common(self.data_provider)
        self.assortment = Assortment(self.data_provider, self.output)

        self.own_manuf_fk = int(
            self.data_provider.own_manufacturer.param_value.values[0])
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.new_kpi_static_data = self.commonV2.get_new_kpi_static_data()
        self.all_products_include_deleted = self.data_provider[
            Data.ALL_PRODUCTS_INCLUDING_DELETED]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
Ejemplo n.º 12
0
    def __init__(self, data_provider, common, output, template_path):
        self.output = output
        self.data_provider = data_provider
        self.common = common
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.session_id = self.data_provider.session_id
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.scene_results = self.data_provider[Data.SCENE_KPI_RESULTS]
        self.templates = self.data_provider[Data.TEMPLATES]
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.mpis = self.make_mpis()
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.jump_shelves = pd.read_excel(template_path).T.to_dict('index')
        self.scif = self.generate_scif()
        self.scif['store_fk'] = self.store_id
        self.scif = self.scif[~(self.scif['product_type'] == Const.IRRELEVANT)]
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.kpis = self.load_kpis()
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.dist_oos = self.dist_kpi_to_oos_kpi()
        self.manufacturer_fk = int(
            self.data_provider[Data.OWN_MANUFACTURER].iloc[0, 1])
        self.gp_manufacturer = self.get_gp_manufacturer()
        self.gp_categories = self.get_gp_categories()
        self.gp_brands = self.get_gp_brands()
        self.all_man = self.scif[['manufacturer_name', 'manufacturer_fk']].set_index('manufacturer_name') \
                                                                          ['manufacturer_fk'].to_dict()
        self.all_brands = self.scif[[
            'brand_name', 'brand_fk'
        ]].set_index('brand_name')['brand_fk'].to_dict()
        self.man_fk_filter = {
            'manufacturer_name': list(self.gp_manufacturer.keys())
        }
        self.cat_filter = {'category': list(self.gp_categories.keys())}
        self.brand_filter = {'brand_name': list(self.gp_brands.keys())}
        self.all_brands_filter = {'brand_name': list(self.all_brands.keys())}
        self.all_man_filter = {'manufacturer_name': list(self.all_man.keys())}
        self.kpi_results = []

        self.assortment = Assortment(self.data_provider, self.output)
        self.assortment.scif = self.scif
Ejemplo n.º 13
0
 def __init__(self, data_provider, output, common):
     self.output = output
     self.data_provider = data_provider
     self.common = common
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     self.kpi_static_data = self.common.get_kpi_static_data()
     self.kpi_results_queries = []
     self.manufacturer_fk = self.products['manufacturer_fk'][
         self.products['manufacturer_name'] ==
         'MONDELEZ INTERNATIONAL INC'].iloc[0]
     self.store_assortment = pd.DataFrame()
     self.assortment = Assortment(self.data_provider, common=self.common)
     self.store_number = self.get_store_number()
     self.ps_data_provider = PsDataProvider(self.data_provider, self.output)
     self.custom_entities = self.ps_data_provider.get_custom_entities(
         Const.PPG_ENTITY_TYPE_FK)
Ejemplo n.º 14
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common_v2 = CommonV2(self.data_provider)
     self.common_v1 = CommonV1(self.data_provider)
     self.data_provider.common_v2 = self.common_v2
     self.data_provider.common_v1 = self.common_v1
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     self.channel = self.get_store_channel(self.store_id)
     self.kpi_static_data = self.common_v2.get_kpi_static_data()
     self.data_provider.kpi_sheets = {}
     self.kpi_sheets = self.data_provider.kpi_sheets
     self.old_kpi_static_data = self.common_v1.get_kpi_static_data()
     for name in SHEETS_NAME:
         parsed_template = ParseTemplates.parse_template(TEMPLATE_PATH,
                                                         sheet_name=name)
         self.kpi_sheets[name] = parsed_template[parsed_template['Channel']
                                                 == self.channel]
     self.data_provider.sos = SOS(self.data_provider, output=None)
     self.data_provider.assortment = Assortment(self.data_provider,
                                                output=None)
Ejemplo n.º 15
0
 def main_calculation(self):
     """
     This function calculates the KPI results.
     """
     self.calculate_share_of_shelf()
     self.calculate_count_of_display()
     Assortment(self.data_provider, self.output,
                common=self.common).main_assortment_calculation()
Ejemplo n.º 16
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common = Common(self.data_provider)
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     self.kpi_static_data = self.common.get_new_kpi_static_data()
     self.kpi_results_queries = []
     self.assortment = Assortment(self.data_provider, self.output)
Ejemplo n.º 17
0
 def __init__(self, output, data_provider):
     super(StraussfritolayilUtil, self).__init__(data_provider)
     self.output = output
     self.common = Common(self.data_provider)
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.ps_data = PsDataProvider(self.data_provider, self.output)
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.brand_mix_df = self.get_brand_mix_df()
     self.add_sub_brand_to_scif()
     self.add_brand_mix_to_scif()
     self.match_probe_in_scene = self.ps_data.get_product_special_attribute_data(self.session_uid)
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     if not self.match_product_in_scene.empty:
         self.match_product_in_scene = self.match_product_in_scene.merge(self.scif[Consts.RELEVENT_FIELDS],
                                                                         on=["scene_fk", "product_fk"], how="left")
         self.filter_scif_and_mpis_to_contain_only_primary_shelf()
     else:
         unique_fields = [ele for ele in Consts.RELEVENT_FIELDS if ele not in ["product_fk", "scene_fk"]]
         self.match_product_in_scene = pd.concat([self.match_product_in_scene,
                                                  pd.DataFrame(columns=unique_fields)], axis=1)
     self.match_product_in_scene_wo_hangers = self.exclude_special_attribute_products(df=self.match_product_in_scene)
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_info = self.data_provider[Data.STORE_INFO]
     self.additional_attribute_2 = self.store_info[Consts.ADDITIONAL_ATTRIBUTE_2].values[0]
     self.additional_attribute_3 = self.store_info[Consts.ADDITIONAL_ATTRIBUTE_3].values[0]
     self.additional_attribute_4 = self.store_info[Consts.ADDITIONAL_ATTRIBUTE_4].values[0]
     self.store_id = self.store_info['store_fk'].values[0] if self.store_info['store_fk'] is not None else 0
     self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
     self.toolbox = GENERALToolBox(self.data_provider)
     self.kpi_external_targets = self.ps_data.get_kpi_external_targets(key_fields=Consts.KEY_FIELDS,
                                                                       data_fields=Consts.DATA_FIELDS)
     self.filter_external_targets()
     self.assortment = Assortment(self.data_provider, self.output)
     self.lvl3_assortment = self.set_updated_assortment()
     self.own_manuf_fk = int(self.data_provider.own_manufacturer.param_value.values[0])
     self.own_manufacturer_matches_wo_hangers = self.match_product_in_scene_wo_hangers[
         self.match_product_in_scene_wo_hangers['manufacturer_fk'] == self.own_manuf_fk]
Ejemplo n.º 18
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common = Common(self.data_provider)
     self.common_v1 = CommonV1(self.data_provider)
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.store_info = self.data_provider[Data.STORE_INFO]
     self.visit_type = self.store_info[
         Const.ADDITIONAL_ATTRIBUTE_2].values[0]
     self.all_templates = self.data_provider[Data.ALL_TEMPLATES]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.scif = self.scif.loc[~(self.scif[Const.PRODUCT_TYPE]
                                 == Const.IRRELEVANT)]  # Vitaly's request
     self.rds_conn = PSProjectConnector(self.project_name,
                                        DbUsers.CalculationEng)
     self.kpi_static_data = self.common.get_kpi_static_data()
     self.kpi_results_queries = []
     self.k_engine = BaseCalculationsGroup(data_provider, output)
     self.toolbox = GENERALToolBox(data_provider)
     self.assortment = Assortment(self.data_provider,
                                  self.output,
                                  common=self.common_v1)
     if not self.scif.empty:
         self.pepsico_fk = self.get_relevant_pk_by_name(
             Const.MANUFACTURER, Const.PEPSICO)
         self.categories_to_calculate = self.get_relevant_categories_for_session(
         )
         self.main_shelves = self.get_main_shelves()
    def __init__(self, data_provider, common, output):
        self.output = output
        self.data_provider = data_provider
        self.common = common
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.scene = self.scene_info.loc[0, 'scene_fk']
        self.templates = self.data_provider[Data.TEMPLATES]
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]

        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        # self.scif = self.scif[~(self.scif['product_type'] == 'Irrelevant')]
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.mdis = self.get_match_display_in_scene()
        self.mpis = self.get_mpis()
        self.manufacturer_fk = self.products['manufacturer_fk'][
            self.products['manufacturer_name'] ==
            'MONDELEZ INTERNATIONAL INC'].iloc[0]
        self.static_task_area_location = self.get_store_task_area()
        # self.vtw_points_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'Data',
        #                                 "VTW_POINTS_SCORE.xlsx")

        self.dmi_template = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), '..', 'Data',
            "MondelezDMI_KPITemplatev7.1.xlsx")
        self.points_template = pd.read_excel(self.dmi_template,
                                             sheetname='VTW_POINTS')
        self.goldzone_template = pd.read_excel(self.dmi_template,
                                               sheetname='GOLD_ZONE')

        self.assortment = Assortment(self.data_provider, common=self.common)
        # self.ps_data_provider = PsDataProvider(self.data_provider, self.output)
        self.store_areas = self.get_store_area_df()
Ejemplo n.º 20
0
class ToolBox(GlobalSessionToolBox):
    def __init__(self, data_provider, output):
        GlobalSessionToolBox.__init__(self, data_provider, output)
        self.assortment = Assortment(self.data_provider, self.output)

    def main_calculation(self):
        self.calculate_assortment()

    @kpi_runtime()
    def calculate_assortment(self):
        lvl3_result = self.assortment.calculate_lvl3_assortment()
        if lvl3_result.empty:
            return
        kpi_fks = lvl3_result[Consts.KPI_FK_LVL_2].unique()
        for kpi_fk in kpi_fks:
            df = lvl3_result[lvl3_result[Consts.KPI_FK_LVL_2] == kpi_fk]
            sku_kpi_fk = df[Consts.KPI_FK_LVL_3].values[0]
            assortment_group_fk = df[
                AssortmentGroupConsts.ASSORTMENT_GROUP_FK].values[0]
            assortment_fk = df[AssortmentProductConsts.ASSORTMENT_FK].values[0]
            df.apply(lambda row: self.write_to_db(
                fk=sku_kpi_fk,
                numerator_id=row[ProductsConsts.PRODUCT_FK],
                denominator_id=assortment_fk,
                result=row[Consts.IN_STORE],
                score=row[Consts.IN_STORE],
                identifier_parent=kpi_fk,
                should_enter=True),
                     axis=1)
            denominator = len(df)
            numerator = len(df[df[Consts.IN_STORE] == 1])
            result = numerator / float(denominator)
            self.write_to_db(fk=kpi_fk,
                             numerator_id=assortment_group_fk,
                             denominator_id=self.store_id,
                             result=result,
                             score=result,
                             numerator_result=numerator,
                             denominator_result=denominator,
                             identifier_result=kpi_fk)
Ejemplo n.º 21
0
class CSTOREToolBox:

    def __init__(self, data_provider, output, common):
        self.output = output
        self.data_provider = data_provider
        self.common = common
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.session_id = self.data_provider.session_id
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.scene_results = self.data_provider[Data.SCENE_KPI_RESULTS]
        self.templates = self.data_provider[Data.TEMPLATES]
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.scif['store_fk'] = self.store_id
        self.scif = self.scif[~(self.scif['product_type'].isin([Const.IRRELEVANT, Const.EMPTY]))]
        self.scif = self.scif[self.scif['facings'] > 0]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.kpis = self.load_kpis()
        self.results_values = self.load_results_values()
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.manufacturer_fk = int(self.data_provider[Data.OWN_MANUFACTURER].iloc[0, 1])

        self.kpi_results = []

        self.assortment = Assortment(self.data_provider, self.output)


    def main_calculation(self, *args, **kwargs):
        """
        This function calculates the KPI results.
        """
        # if not self.filter_df(self.scif, self.brand_filter).empty:
        self.calculate_displays()
        self.calculate_assortment()

        for result in self.kpi_results:
            self.write_to_db(**result)
        return

    def calculate_facings_sos(self):
        self.safety_func('Facings SOS', self.calculate_sos, [Const.FACING_SOS_KPI, {}])

    def safety_func(self, group, func, args):
        try:
            func(*args)
            Log.info('{} KPIs Calculated'.format(group))
        except Exception as e:
            Log.error('ERROR {} KPIs Failed to Calculate'.format(group))
            Log.error(e)

    def calculate_displays(self):
        relevant_kpis = self.kpis[self.kpis[Const.KPI_FAMILY] == Const.DISPLAY]
        relevant_kpis['num_types'] = self.name_to_col_name(relevant_kpis[Const.NUMERATOR])
        df_base = self.scif[self.scif['template_name'] == 'Displays - Mondelez Brands Only']
        df_base = df_base[df_base['manufacturer_fk'] == self.manufacturer_fk]
        df_base['numerator_result'], df_base['result'] = 1, 1
        if not df_base.empty:
            for i, kpi in relevant_kpis.iterrows():
                parent = relevant_kpis[relevant_kpis['type'] == Const.SOS_HIERARCHY[kpi['type']]] #  Note, parent is df, kpi is a series
                df = df_base.copy()
                df = self.update_and_rename_df(df, kpi, parent)
                df = self.transform_new_col(df, 'numerator_id', 'numerator_result')
                df.drop('scene_id', axis=1, inplace=True)
                df.drop_duplicates(inplace=True)
                df['result'] = df['numerator_result']
                if parent.empty:
                    df['denominator_id'] = self.store_id
                    df.drop('ident_parent', axis=1, inplace=True)
                self.update_results(df)
        else:
            df = pd.DataFrame([self.store_id], columns=['denominator_id'])
            df['numerator_id'] = self.manufacturer_fk
            df['numerator_result'], df['result'] = 0, 0
            df['kpi_name'] = [key for key, val in Const.SOS_HIERARCHY.items() if val == 'ihavenoparent'][0]
            self.update_results(df)

    def update_results(self, df):
        results = [val for key, val in df.to_dict('index').items()]
        self.kpi_results += results

    def update_and_rename_df(self, df, kpi, parent):
        df['ident_result'] = ['{}_{}'.format(row[kpi['num_types']], kpi['type'])
                              for i, row in df.iterrows()]
        df['ident_parent'] = ['{}_{}_{}'.format(row[kpi['num_types']], 'Parent', kpi['type'])
                              for i, row in df.iterrows()]
        parent_col = ['ident_parent']
        if not parent.empty:
            df['ident_parent'] = ['{}_{}'.format(row[parent['num_types'].iloc[0]], parent['type'].iloc[0])
                                  for i, row in df.iterrows()]  #parent is a df, hence the iloc
        df = df[['scene_id', 'numerator_result', 'result', kpi['num_types'], 'ident_result'] + parent_col]
        df.drop_duplicates(inplace=True)
        df.rename(columns={kpi['num_types']: 'numerator_id'}, inplace=True)
        df['kpi_name'] = kpi['type']
        return df

    def name_to_col_name(self, col):
        return ['{}_fk'.format(num) for num in col]

    def transform_new_col(self, df, group_col, sum_col):
        df[sum_col] = df.groupby(group_col)[sum_col].transform('sum')
        return df

    def grouper(self, filter, df):
        return self.filter_df(df, filter).groupby(filter.keys()[0])

    def calculate_assortment(self):
        lvl3_results = self.assortment.calculate_lvl3_assortment()
        if not lvl3_results.empty:
            for kpi in lvl3_results['kpi_fk_lvl3'].unique():
                lvl3_result = lvl3_results[lvl3_results['kpi_fk_lvl3']==kpi]
                lvl3_result['target'] = 1
                lvlx_result = pd.DataFrame()
                lvl1_result = pd.DataFrame()

                # For Dist, they have assortments, but want the results by category
                # and since there is only one policy per store (barring new which is
                # handled elsewhere) we will kust pretend that category_fk is the
                # level 2 assortment group.  God rest the soul of whomever needs
                # to implement additional policies.
                if kpi == 4000:
                    lvl3_result = lvl3_result.set_index('product_fk').join(self.all_products.set_index('product_fk')
                                                                           ['category_fk']).reset_index()\
                                                                            .drop_duplicates()
                    lvl3_result = lvl3_result.rename(columns={'assortment_group_fk': 'ass_grp_fk',
                                                              'category_fk': 'assortment_group_fk'})
                    lvlx_result = self.assortment_additional(lvl3_result)

                lvl2_result = self.assortment.calculate_lvl2_assortment(lvl3_result)
                if not lvl2_result['kpi_fk_lvl1'].any():
                    # lvl3_result['assortment_group_fk'] = self.manufacturer_fk
                    lvl2_result['assortment_group_fk'] = self.manufacturer_fk
                    lvl2_result['assortment_super_group_fk'] = self.store_id
                else:
                    lvl1_result = self.assortment.calculate_lvl1_assortment(lvl2_result)
                    lvl1_result['total'] = lvl2_result['total'].sum()
                    lvl1_result['passes'] = lvl2_result['passes'].sum()
                    lvl1_result['num_id'] = self.manufacturer_fk
                    lvl1_result['den_id'] = self.store_id

                lvl3_result['in_store'] = lvl3_result['in_store'].apply(lambda x:
                                                                        self.results_values[Const.RESULTS_TYPE_DICT[x]])

                self.parse_assortment_results(lvl3_result, 'kpi_fk_lvl3', 'product_fk', 'in_store', 'assortment_group_fk',
                                              'target', None, 'assortment_group_fk')
                self.parse_assortment_results(lvlx_result, 'kpi_fk_lvl3', 'product_fk', 'in_store', 'assortment_group_fk',
                                              'target', None, 'assortment_group_fk')
                self.parse_assortment_results(lvl2_result, 'kpi_fk_lvl2', 'assortment_group_fk', 'passes', 'assortment_fk',
                                              'total', 'assortment_group_fk', 'assortment_super_group_fk')
                self.parse_assortment_results(lvl1_result, 'kpi_fk_lvl1', 'num_id', 'passes',
                                              'den_id', 'total', 'assortment_super_group_fk', None)

            Log.info('Assortment KPIs Calculated')

    def parse_assortment_results(self, df, kpi_col, num_id_col, num_col, den_id_col, den_col, self_id, parent):
        for i, row in df.iterrows():
            kpi_res = {'kpi_fk': row[kpi_col],
                       'numerator_id': row[num_id_col],
                       'numerator_result': row[num_col],
                       'denominator_id': row[den_id_col],
                       'denominator_result': row[den_col],
                       'score': self.safe_divide(row[num_col], row[den_col]),
                       'result': self.safe_divide(row[num_col], row[den_col]),
                       'ident_result': row[self_id] if self_id else None,
                       'ident_parent': row[parent] if parent else None}
            self.kpi_results.append(kpi_res)

    def assortment_additional(self, lvl3_result):
        assort = set(lvl3_result['product_fk'])
        additional_sku_df = self.scif[~self.scif['product_fk'].isin(assort)]
        additional_sku_df = additional_sku_df[additional_sku_df['manufacturer_fk'] == self.manufacturer_fk]
        additional_sku_df = additional_sku_df[additional_sku_df['facings'] != 0][['product_fk', 'category_fk']]
        additional_sku_df['kpi_fk_lvl3'] = lvl3_result['kpi_fk_lvl3'].values[0]
        additional_sku_df['in_store'] = self.results_values['Additional']
        additional_sku_df['target'] = 0
        additional_sku_df = additional_sku_df.rename(columns={'category_fk': 'assortment_group_fk'})
        return additional_sku_df

    def safe_divide(self, num, den):
        res = num
        if num <= den:
            res = round((float(num) / den) * 100, 2) if num and den else 0
            res = '{:.2f}'.format(res)
        return res

    @staticmethod
    def filter_df(df, filters, exclude=0):
        cols = set(df.columns)
        for key, val in filters.items():
            if key not in cols:
                return pd.DataFrame()
            if not isinstance(val, list):
                val = [val]
            if exclude:
                df = df[~df[key].isin(val)]
            else:
                df = df[df[key].isin(val)]
        return df

    def load_kpis(self):
        return pd.read_sql_query(Const.KPI_QUERY, self.rds_conn.db)

    def load_results_values(self):
        return pd.read_sql_query(Const.RESULT_TYPE_QUERY, self.rds_conn.db).set_index('value')['pk'].to_dict()


    def write_to_db(self, kpi_name=None, score=0, result=None, target=None, numerator_result=None, scene_result_fk=None,
                    denominator_result=None, numerator_id=999, denominator_id=999, ident_result=None, ident_parent=None,
                    kpi_fk = None):
        """
        writes result in the DB
        :param kpi_name: str
        :param score: float
        :param display_text: str
        :param result: str
        :param threshold: int
        """
        if not kpi_fk and kpi_name:
            kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_name)
        self.common.write_to_db_result(fk=kpi_fk, score=score, result=result, should_enter=True, target=target,
                                       numerator_result=numerator_result, denominator_result=denominator_result,
                                       numerator_id=numerator_id, denominator_id=denominator_id,
                                       identifier_result=ident_result, identifier_parent=ident_parent,
                                       scene_result_fk=scene_result_fk)
Ejemplo n.º 22
0
class ToolBox(GlobalSessionToolBox):
    def __init__(self, data_provider, output):
        GlobalSessionToolBox.__init__(self, data_provider, output)
        self.own_manufacturer_fk = int(self.data_provider.own_manufacturer.param_value.values[0])
        self.parser = Parser
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.assortment = Assortment(self.data_provider, self.output)
        self.ps_data = PsDataProvider(self.data_provider, self.output)
        self.kpi_external_targets = self.ps_data.get_kpi_external_targets(key_fields=Consts.KEY_FIELDS,
                                                                          data_fields=Consts.DATA_FIELDS)
    def main_calculation(self):
        self.calculate_score_sos()
        self.calculate_oos_and_distribution(assortment_type="Core")
        self.calculate_oos_and_distribution(assortment_type="Launch")
        self.calculate_oos_and_distribution(assortment_type="Focus")
        self.calculate_hierarchy_sos(calculation_type='FACINGS')
        self.calculate_hierarchy_sos(calculation_type='LINEAR')

    @kpi_runtime()
    def calculate_oos_and_distribution(self, assortment_type):
        dis_numerator = total_facings = 0
        oos_store_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=assortment_type + Consts.OOS)
        oos_sku_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=assortment_type + Consts.OOS_SKU)
        dis_store_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=assortment_type + Consts.DISTRIBUTION)
        dis_cat_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=assortment_type + Consts.DISTRIBUTION_CAT)
        dis_sku_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=assortment_type + Consts.DISTRIBUTION_SKU)
        assortment_df = self.assortment.get_lvl3_relevant_ass()
        assortment_df = assortment_df[assortment_df['kpi_fk_lvl3'] == dis_sku_kpi_fk]
        product_fks = assortment_df['product_fk'].tolist()
        categories = list(set(self.all_products[self.all_products['product_fk'].isin(product_fks)]['category_fk']))
        categories_dict = dict.fromkeys(categories, (0, 0))

        # sku level distribution
        for sku in product_fks:
            # 2 for distributed and 1 for oos
            category_fk = self.all_products[self.all_products['product_fk'] == sku]['category_fk'].values[0]
            product_df = self.scif[self.scif['product_fk'] == sku]
            if product_df.empty:
                categories_dict[category_fk] = map(sum, zip(categories_dict[category_fk], [0, 1]))
                result = 1
                facings = 0
                # Saving OOS only if product wasn't in store
                self.common.write_to_db_result(fk=oos_sku_kpi_fk, numerator_id=sku, denominator_id=category_fk,
                                               result=result, numerator_result=result, denominator_result=result,
                                               score=facings, identifier_parent=assortment_type + "_OOS",
                                               should_enter=True)
            else:
                categories_dict[category_fk] = map(sum, zip(categories_dict[category_fk], [1, 1]))
                result = 2
                facings = product_df['facings'].values[0]
                dis_numerator += 1
                total_facings += facings
            self.common.write_to_db_result(fk=dis_sku_kpi_fk, numerator_id=sku, denominator_id=category_fk,
                                           result=result, numerator_result=result, denominator_result=result,
                                           score=facings, should_enter=True,
                                           identifier_parent=assortment_type + "_DIS_CAT_{}".format(str(category_fk)))

        # category level distribution
        for category_fk in categories_dict.keys():
            cat_numerator, cat_denominator = categories_dict[category_fk]
            cat_result = self.get_result(cat_numerator, cat_denominator)
            self.common.write_to_db_result(fk=dis_cat_kpi_fk, numerator_id=category_fk,
                                           denominator_id=self.store_id, result=cat_result, should_enter=True,
                                           numerator_result=cat_numerator, denominator_result=cat_denominator,
                                           score=cat_result, identifier_parent=assortment_type + "_DIS",
                                           identifier_result=assortment_type + "_DIS_CAT_{}".format(str(category_fk)))

        # store level oos and distribution
        denominator = len(product_fks)
        dis_result = self.get_result(dis_numerator, denominator)
        oos_result = 1 - dis_result
        oos_numerator = denominator - dis_numerator
        self.common.write_to_db_result(fk=oos_store_kpi_fk, numerator_id=self.own_manufacturer_fk,
                                       denominator_id=self.store_id, result=oos_result, numerator_result=oos_numerator,
                                       denominator_result=denominator, score=total_facings,
                                       identifier_result=assortment_type + "_OOS")
        self.common.write_to_db_result(fk=dis_store_kpi_fk, numerator_id=self.own_manufacturer_fk,
                                       denominator_id=self.store_id, result=dis_result, numerator_result=dis_numerator,
                                       denominator_result=denominator, score=total_facings,
                                       identifier_result=assortment_type + "_DIS")

    def get_kpi_fks(self, kpis_list):
        for kpi in kpis_list:
            self.common.get_kpi_fk_by_kpi_type(kpi_type=kpi)

    @staticmethod
    def calculate_sos_res(numerator, denominator):
        if denominator == 0:
            return 0, 0, 0
        result = round(numerator / float(denominator), 3)
        return result, numerator, denominator

    @kpi_runtime()
    def calculate_score_sos(self):
        relevant_template = self.kpi_external_targets[self.kpi_external_targets[ExternalTargetsConsts.OPERATION_TYPE]
                                                      == Consts.SOS_KPIS]
        relevant_rows = relevant_template.copy()
        lsos_score_kpi_fk = self.common.get_kpi_fk_by_kpi_type(Consts.LSOS_SCORE_KPI)
        store_denominator = len(relevant_rows)
        store_numerator = 0
        for i, kpi_row in relevant_template.iterrows():
            kpi_fk, num_type, num_value, deno_type, deno_value, target, target_range = kpi_row[Consts.RELEVANT_FIELDS]
            numerator_filters, denominator_filters = self.get_num_and_den_filters(num_type, num_value, deno_type,
                                                                                  deno_value)
            # Only straussil SKUs
            numerator_filters['manufacturer_fk'] = self.own_manufacturer_fk
            denominator_filters['manufacturer_fk'] = self.own_manufacturer_fk
            numerator_df = self.parser.filter_df(conditions=numerator_filters, data_frame_to_filter=self.scif)
            denominator_df = self.parser.filter_df(conditions=denominator_filters, data_frame_to_filter=self.scif)
            numerator_result = numerator_df['gross_len_ign_stack'].sum()
            denominator_result = denominator_df['gross_len_ign_stack'].sum()
            lsos_result = self.get_result(numerator_result, denominator_result)
            score = 1 if ((target - target_range) <= lsos_result <= (target + target_range)) else 0
            store_numerator += score
            self.common.write_to_db_result(fk=kpi_fk, numerator_id=self.own_manufacturer_fk,
                                           denominator_id=self.store_id, should_enter=True, target=target,
                                           numerator_result=numerator_result, denominator_result=denominator_result,
                                           result=lsos_result, score=score, identifier_parent='LSOS_SCORE',
                                           weight=target_range)
        store_result = self.get_result(store_numerator, store_denominator)
        self.common.write_to_db_result(fk=lsos_score_kpi_fk, numerator_id=self.own_manufacturer_fk,
                                       denominator_id=self.store_id, should_enter=True, target=store_denominator,
                                       numerator_result=store_numerator, denominator_result=store_denominator,
                                       result=store_numerator, score=store_result, identifier_result='LSOS_SCORE')

    @staticmethod
    def get_num_and_den_filters(numerator_type, numerator_value, denominator_type, denominator_value):
        if type(numerator_value) != list:
            numerator_value = [numerator_value]
        if type(denominator_value) != list:
            denominator_value = [denominator_value]
        numerator_filters = {numerator_type: numerator_value}
        denominator_filters = {denominator_type: denominator_value}
        return numerator_filters, denominator_filters

    @staticmethod
    def get_result(numerator, denominator):
        if denominator == 0:
            return 0
        else:
            return round(numerator / float(denominator), 4)

    def calculate_hierarchy_sos(self, calculation_type):
        brand_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=(calculation_type + Consts.SOS_BY_BRAND))
        brand_category_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=(calculation_type +
                                                                             Consts.SOS_BY_CAT_BRAND))
        sku_kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type=(calculation_type + Consts.SOS_BY_CAT_BRAND_SKU))
        calculation_param = "facings_ign_stack" if calculation_type == 'FACINGS' else "gross_len_ign_stack"
        sos_df = self.parser.filter_df(conditions={'rlv_sos_sc': 1, 'product_type': ['SKU', 'Empty']},
                                       data_frame_to_filter=self.scif)
        # brand level sos
        session_brands = set(sos_df['brand_fk'])
        brand_den = sos_df[calculation_param].sum()
        for brand_fk in session_brands:
            filters = {'brand_fk': brand_fk}
            brand_df = self.parser.filter_df(conditions=filters, data_frame_to_filter=sos_df)
            if brand_df.empty:
                continue
            manufacturer_fk = brand_df['manufacturer_fk'].values[0]
            brand_num = brand_df[calculation_param].sum()
            if brand_num == 0:
                continue
            brand_res, brand_num, brand_den = self.calculate_sos_res(brand_num, brand_den)
            self.common.write_to_db_result(fk=brand_kpi_fk, numerator_id=brand_fk,
                                           denominator_id=manufacturer_fk,
                                           result=brand_res, numerator_result=brand_num, denominator_result=brand_den,
                                           score=brand_res,
                                           identifier_result="{}_SOS_brand_{}".format(calculation_type, str(brand_fk)))
            # brand-category level sos
            brand_categories = set(self.parser.filter_df(conditions=filters,
                                                         data_frame_to_filter=sos_df)['category_fk'])
            for category_fk in brand_categories:
                cat_den = self.parser.filter_df(conditions={'category_fk': category_fk},
                                                data_frame_to_filter=sos_df)[calculation_param].sum()
                filters['category_fk'] = category_fk
                category_df = self.parser.filter_df(conditions=filters, data_frame_to_filter=sos_df)
                cat_num = category_df[calculation_param].sum()
                if cat_num == 0:
                    continue
                cat_res, cat_num, cat_den = self.calculate_sos_res(cat_num, cat_den)
                self.common.write_to_db_result(fk=brand_category_kpi_fk, numerator_id=brand_fk,
                                               context_id=manufacturer_fk,
                                               denominator_id=category_fk, result=cat_res, numerator_result=cat_num,
                                               should_enter=True, denominator_result=cat_den, score=cat_res,
                                               identifier_parent="{}_SOS_brand_{}".format(calculation_type,
                                                                                          str(brand_fk)),
                                               identifier_result="{}_SOS_cat_{}_brand_{}".format(calculation_type,
                                                                                                 str(category_fk),
                                                                                                 str(brand_fk)))
                product_fks = set(self.parser.filter_df(conditions=filters, data_frame_to_filter=sos_df)['product_fk'])
                for sku in product_fks:
                    filters['product_fk'] = sku
                    product_df = self.parser.filter_df(conditions=filters, data_frame_to_filter=sos_df)
                    sku_num = product_df[calculation_param].sum()
                    if sku_num == 0:
                        continue
                    sku_result, sku_num, sku_den = self.calculate_sos_res(sku_num, cat_num)
                    self.common.write_to_db_result(fk=sku_kpi_fk, numerator_id=sku, denominator_id=brand_fk,
                                                   result=sku_result, numerator_result=sku_num, should_enter=True,
                                                   denominator_result=cat_num, score=sku_num,
                                                   context_id=category_fk, weight=manufacturer_fk,
                                                   identifier_parent="{}_SOS_cat_{}_brand_{}".format(calculation_type,
                                                                                                     str(category_fk),
                                                                                                     str(brand_fk)))
                del filters['product_fk']
            del filters['category_fk']
Ejemplo n.º 23
0
class CCKHToolBox(CCKHConsts):
    LEVEL1 = 1
    LEVEL2 = 2
    LEVEL3 = 3

    def __init__(self, data_provider, output):
        self.k_engine = BaseCalculationsScript(data_provider, output)
        self.output = output
        self.data_provider = data_provider
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_type = self.store_info['store_type'].iloc[0]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.general_tools = CCKHGENERALToolBox(self.data_provider, self.output)
        self.template = CCKHTemplateConsts()
        self.kpi_static_data = self.get_kpi_static_data()
        self.kpi_results_queries = []
        self.commonV2 = CommonV2(self.data_provider)
        self.kpi_new_static_data = self.commonV2.get_new_kpi_static_data()
        self.manufacturer = int(self.data_provider.own_manufacturer.param_value.values[0])
        self.ps_data_provider = PsDataProvider(self.data_provider, self.output)
        self.external_targets = self.ps_data_provider.get_kpi_external_targets()
        self.assortment = Assortment(self.data_provider, self.output)
        self.templates_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION] ==
                                                    CCKHTemplateConsts.BASIC_SHEET]
        self.visibility_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION]
                                                     == CCKHTemplateConsts.VISIBILITY_SHEET]
        self.cooler_info = self.external_targets[self.external_targets[CCKHTemplateConsts.TEMPLATE_OPERATION]
                                                 == CCKHTemplateConsts.COOLER_SHEET]

    def get_kpi_static_data(self):
        """
        This function extracts the static KPI data and saves it into one global data frame.
        The data is taken from static.kpi / static.atomic_kpi / static.kpi_set.
        """
        query = CCKHQueries.get_all_kpi_data()
        kpi_static_data = pd.read_sql_query(query, self.rds_conn.db)
        return kpi_static_data

    def calculate_red_score(self):
        """
        This function calculates the KPI results.
        """
        scores_dict = {}
        results_list_new_db = []
        # assortments based calculations for availability
        availability_kpi_dict, availability_score_dict = self.get_availability_kpi_data()
        results_list_new_db.extend(availability_kpi_dict)
        scores_dict.update(availability_score_dict)
        # external target based calculations
        final_main_child = self.templates_info[self.templates_info['Tested KPI Group'] == self.RED_SCORE].iloc[0]
        all_kpi_dict, all_score_dict = self.get_all_kpi_data()
        results_list_new_db.extend(all_kpi_dict)
        scores_dict.update(all_score_dict)
        # aggregation to calculate red score
        max_points = sum([score[0] for score in scores_dict.values()])
        actual_points = sum([score[1] for score in scores_dict.values()])
        red_score = 0 if max_points == 0 else round((actual_points / float(max_points)) * 100, 2)
        set_fk = self.kpi_static_data['kpi_set_fk'].values[0]
        self.write_to_db_result(set_fk, (actual_points, max_points, red_score), level=self.LEVEL1)
        results_list_new_db.append(self.get_new_kpi_dict(self.get_new_kpi_fk(final_main_child), red_score,
                                                         red_score, actual_points, max_points,
                                                         target=max_points,
                                                         weight=actual_points,
                                                         identifier_result=self.RED_SCORE,
                                                         identifier_parent=CCKHConsts.WEB_HIERARCHY))
        results_list_new_db.append(self.get_new_kpi_dict(self.get_new_kpi_by_name(self.RED_SCORE), red_score,
                                                         red_score, actual_points, max_points,
                                                         target=max_points, weight=actual_points,
                                                         identifier_result=CCKHConsts.WEB_HIERARCHY))
        self.commonV2.save_json_to_new_tables(results_list_new_db)
        self.commonV2.commit_results_data()

    def get_availability_kpi_data(self):
        availability_results_list = []
        scores_dict = {}
        availability_assortment_df = self.assortment.calculate_lvl3_assortment()
        if availability_assortment_df.empty:
            Log.info("Availability KPI: session: {} does not have relevant assortments.".format(self.session_uid))
            return [], {}
        availability_kpi = self.kpi_new_static_data[self.kpi_new_static_data['type'].str.encode(
            HelperConsts.UTF8) == self.template.AVAILABILITY_KPI_TYPE.encode(HelperConsts.UTF8)].iloc[0]
        availability_new_kpi_fk = availability_kpi.pk
        scores = []
        # no need to validate_kpi_run because Availability is 1; seems it was added for the other KPIs
        kpi_availability_group = availability_assortment_df.groupby('kpi_fk_lvl2')
        for kpi_fk_lvl2, availability_kpi_df in kpi_availability_group:
            score, result, threshold = self.calculate_availability(availability_kpi_df)
            numerator, denominator, result_new_db = result, threshold, result
            if score is not False:
                if score is None:
                    points = 0
                else:
                    points = 1
                    scores.append((points, score))
                atomic_kpi_name = self.kpi_new_static_data[self.kpi_new_static_data['pk'] ==
                                                           kpi_fk_lvl2].iloc[0].type
                Log.info('Save availability atomic kpi: {}'.format(atomic_kpi_name))
                atomic_kpi = self.kpi_static_data[(self.kpi_static_data['kpi_name'].str.encode(HelperConsts.UTF8) ==
                                                   self.template.AVAILABILITY_KPI_TYPE.encode(HelperConsts.UTF8)) &
                                                  (self.kpi_static_data['atomic_kpi_name'] == atomic_kpi_name)]
                atomic_fk = atomic_kpi.iloc[0].atomic_kpi_fk
                self.write_to_db_result(atomic_fk, (score, result, threshold, points), level=self.LEVEL3)
                child_name = atomic_kpi.atomic_kpi_name.iloc[0]
                child_kpi_fk = self.get_new_kpi_by_name(child_name)  # kpi fk from new tables
                Log.info('Save availability for {} ID: {}'.format(child_name, child_kpi_fk))
                availability_results_list.append(self.get_new_kpi_dict(child_kpi_fk, result_new_db, score,
                                                                       numerator, denominator,
                                                                       weight=points, target=denominator,
                                                                       identifier_parent={
                                                                           'kpi_fk': availability_new_kpi_fk},
                                                                       ))
        max_points = sum([score[0] for score in scores])
        actual_points = sum([score[0] * score[1] for score in scores])
        percentage = 0 if max_points == 0 else round(
            (actual_points / float(max_points)) * 100, 2)

        kpi_fk = self.kpi_static_data[self.kpi_static_data['kpi_name'].str.encode(HelperConsts.UTF8) ==
                                      self.template.AVAILABILITY_KPI_TYPE.encode(HelperConsts.UTF8)][
            'kpi_fk'].values[0]
        self.write_to_db_result(kpi_fk, (actual_points, max_points,
                                         percentage), level=self.LEVEL2)
        scores_dict[self.template.AVAILABILITY_KPI_TYPE] = (max_points, actual_points)
        availability_results_list.append(self.get_new_kpi_dict(availability_new_kpi_fk, percentage, percentage,
                                                               actual_points, max_points,
                                                               target=max_points,
                                                               weight=actual_points,
                                                               identifier_result={
                                                                   'kpi_fk': availability_new_kpi_fk},
                                                               identifier_parent=self.RED_SCORE))
        return availability_results_list, scores_dict

    def get_all_kpi_data(self):
        results_list_new_db = []
        scores_dict = {}
        if self.templates_info.empty:
            Log.info("All KPI: session: {} doesnt have relevant external targets".format(self.session_uid))
            return [], {}
        main_children = self.templates_info[self.templates_info[self.template.KPI_GROUP] == self.RED_SCORE]
        for c in xrange(0, len(main_children)):
            main_child = main_children.iloc[c]
            main_child_kpi_fk = self.get_new_kpi_fk(main_child)  # kpi fk from new tables
            main_kpi_identifier = self.commonV2.get_dictionary(kpi_fk=main_child_kpi_fk)
            if self.validate_store_type(main_child):
                children = self.templates_info[self.templates_info
                                               [self.template.KPI_GROUP].str.encode(HelperConsts.UTF8) ==
                                               main_child[self.template.KPI_NAME].encode(HelperConsts.UTF8)]
                scores = []
                for i in xrange(len(children)):
                    child = children.iloc[i]
                    numerator, denominator, result_new_db, numerator_id = 0, 0, 0, None
                    kpi_weight = self.validate_kpi_run(child)
                    if kpi_weight is not False:
                        kpi_type = child[self.template.KPI_TYPE]
                        result = threshold = None
                        if kpi_type == self.SURVEY:
                            score, result, threshold, survey_answer_fk = self.check_survey(child)
                            threshold = None
                            numerator, denominator, result_new_db = 1, 1, score * 100
                            numerator_id = survey_answer_fk
                        elif kpi_type == self.SHARE_OF_SHELF:
                            score, result, threshold, result_new_db, numerator, denominator = \
                                self.calculate_share_of_shelf(child)
                        elif kpi_type == self.NUMBER_OF_SCENES:
                            scene_types = self.get_scene_types(child)
                            result = self.general_tools.calculate_number_of_scenes(
                                **{SCENE_TYPE_FIELD: scene_types})
                            numerator, denominator, result_new_db = result, 1, result
                            score = 1 if result >= 1 else 0
                        else:
                            Log.warning("KPI of type '{}' is not supported via assortments".format(kpi_type))
                            continue
                        if score is not False:
                            if score is None:
                                points = 0
                            else:
                                points = float(child[self.template.WEIGHT]
                                               ) if kpi_weight is True else kpi_weight
                                scores.append((points, score))
                            atomic_fk = self.get_atomic_fk(main_child, child)
                            self.write_to_db_result(
                                atomic_fk, (score, result, threshold, points), level=self.LEVEL3)
                            identifier_parent = main_kpi_identifier
                            child_name = '{}-{}'.format(child[self.template.TRANSLATION], 'Atomic') \
                                if main_child[self.template.KPI_NAME] == child[self.template.KPI_NAME] else child[
                                self.template.TRANSLATION]
                            child.set_value(self.template.TRANSLATION, child_name)
                            child_kpi_fk = self.get_new_kpi_fk(child)  # kpi fk from new tables
                            results_list_new_db.append(self.get_new_kpi_dict(child_kpi_fk, result_new_db, score,
                                                                             numerator, denominator,
                                                                             weight=points, target=denominator,
                                                                             identifier_parent=identifier_parent,
                                                                             numerator_id=numerator_id))
                max_points = sum([score[0] for score in scores])
                actual_points = sum([score[0] * score[1] for score in scores])
                percentage = 0 if max_points == 0 else round(
                    (actual_points / float(max_points)) * 100, 2)

                kpi_name = main_child[self.template.TRANSLATION]
                kpi_fk = self.kpi_static_data[self.kpi_static_data['kpi_name'].str.encode(HelperConsts.UTF8) ==
                                              kpi_name.encode(HelperConsts.UTF8)]['kpi_fk'].values[0]
                self.write_to_db_result(kpi_fk, (actual_points, max_points,
                                                 percentage), level=self.LEVEL2)
                scores_dict[kpi_name] = (max_points, actual_points)
                results_list_new_db.append(self.get_new_kpi_dict(main_child_kpi_fk, percentage, percentage,
                                                                 actual_points, max_points,
                                                                 target=max_points,
                                                                 weight=actual_points,
                                                                 identifier_result=main_kpi_identifier,
                                                                 identifier_parent=self.RED_SCORE))
        return results_list_new_db, scores_dict

    def validate_store_type(self, params):
        """
        This function checks whether or not a KPI is relevant for calculation, by the session's store type.
        """
        validation = False
        stores = params[self.template.STORE_TYPE]
        if not stores:
            validation = True
        elif isinstance(stores, (str, unicode)):
            if stores.upper() == self.template.ALL or self.store_type in stores.split(self.template.SEPARATOR):
                validation = True
        elif isinstance(stores, list):
            if self.store_type in stores:
                validation = True
        return validation

    def validate_kpi_run(self, params):
        """
        This function checks whether or not a KPI Atomic needs to be calculated, based on a customized template.
        """
        weight = params[self.template.WEIGHT]
        if str(weight).isdigit():
            validation = True
        else:
            kpi_group = params[self.template.KPI_GROUP]
            if kpi_group == 'Visibility':
                custom_template = self.visibility_info
            elif kpi_group in ('Ambient Space', 'Cooler Space'):
                custom_template = self.cooler_info
            else:
                return False
            condition = (custom_template[self.template.KPI_NAME] == params[self.template.KPI_NAME])
            if self.template.KPI_GROUP in custom_template.keys() and kpi_group != 'Visibility':
                condition &= (custom_template[self.template.KPI_GROUP]
                              == params[self.template.KPI_GROUP])
            kpi_data = custom_template[condition]
            if kpi_data.empty:
                return False
            try:
                weight = \
                    kpi_data[
                        kpi_data['store_type'].str.encode(HelperConsts.UTF8) == self.store_type.encode(
                            HelperConsts.UTF8)][
                        'Target'].values[0]
                validation = float(weight)
            except ValueError:
                validation = False
            except IndexError:
                Log.warning("{kpi}: No matching external targets for this session: {sess}".format(
                    kpi=kpi_group,
                    sess=self.session_uid))
                validation = False
        return validation

    def get_atomic_fk(self, pillar, params):
        """
        This function gets an Atomic KPI's FK out of the template data.
        """
        atomic_name = params[self.template.TRANSLATION]
        kpi_name = pillar[self.template.TRANSLATION]
        atomic_fk = self.kpi_static_data[(self.kpi_static_data['kpi_name'].str.encode(HelperConsts.UTF8) ==
                                          kpi_name.encode(HelperConsts.UTF8)) & (
                                                 self.kpi_static_data['atomic_kpi_name'].str.encode(
                                                     HelperConsts.UTF8) == atomic_name.encode(HelperConsts.UTF8))][
            'atomic_kpi_fk']
        if atomic_fk.empty:
            return None
        return atomic_fk.values[0]

    def get_new_kpi_fk(self, params):
        """
        This function gets an KPI's FK from new kpi table 'static.kpi_level_2' out of the template data .
        """
        kpi_name = params[self.template.TRANSLATION]
        return self.get_new_kpi_by_name(kpi_name)

    def get_new_kpi_by_name(self, kpi_name):
        kpi_fk = self.kpi_new_static_data[self.kpi_new_static_data['type'].str.encode(HelperConsts.UTF8) ==
                                          kpi_name.encode(HelperConsts.UTF8)]['pk']
        if kpi_fk.empty:
            return None
        return kpi_fk.values[0]

    def get_scene_types(self, params):
        """
        This function extracts the relevant scene types (==additional_attribute_1) from the template.
        """
        scene_types = params[self.template.SCENE_TYPE]
        if not scene_types or (isinstance(scene_types, (str, unicode)) and scene_types.upper() == self.template.ALL):
            return None
        return scene_types

    def calculate_availability(self, availability_kpi_df):
        """
        This function calculates Availability typed Atomics from a customized template, and saves the results to the DB.
        """
        all_targets = availability_kpi_df.target.unique()
        if not all_targets:
            return False, False, False
        target = float(all_targets[0])
        total_facings_count = availability_kpi_df.facings.sum()
        score = 1 if total_facings_count >= target else 0
        return score, total_facings_count, target

    def check_survey(self, params):
        """
        This function calculates Survey typed Atomics, and saves the results to the DB.
        """
        survey_id = int(float(params[self.template.SURVEY_ID]))
        target_answer = params[self.template.SURVEY_ANSWER]
        survey_answer, survey_answer_fk = self.general_tools.get_survey_answer(survey_data=('question_fk', survey_id))
        score = 1 if survey_answer == target_answer else 0
        return score, survey_answer, target_answer, survey_answer_fk

    def calculate_share_of_shelf(self, params):
        """
        This function calculates Facings Share of Shelf typed Atomics, and saves the results to the DB.
        """
        if params[self.template.SOS_NUMERATOR].startswith('~'):
            sos_filters = {params[self.template.SOS_ENTITY]: (params[self.template.SOS_NUMERATOR][1:],
                                                              self.general_tools.EXCLUDE_FILTER)}
        else:
            sos_filters = {params[self.template.SOS_ENTITY]: params[self.template.SOS_NUMERATOR]}
        general_filters = {}
        scene_types = self.get_scene_types(params)
        if isinstance(scene_types, (str, unicode)):
            scene_types = scene_types.split(self.template.SEPARATOR)
        if scene_types:
            general_filters[SCENE_TYPE_FIELD] = scene_types
        products_to_exclude = params[self.template.PRODUCT_TYPES_TO_EXCLUDE]
        if products_to_exclude:
            general_filters['product_type'] = (products_to_exclude.split(self.template.SEPARATOR),
                                               self.general_tools.EXCLUDE_FILTER)
        numerator_result = self.general_tools.calculate_availability(
            **dict(sos_filters, **general_filters))
        denominator_result = self.general_tools.calculate_availability(**general_filters)
        if denominator_result == 0:
            result = 0
        else:
            result = round((numerator_result / float(denominator_result)) * 100, 2)
        if params[self.template.TARGET]:
            target = float(params[self.template.TARGET]) * 100
            score = 1 if result >= target else 0
        else:
            score = target = None
        result_string = '{0}% ({1}/{2})'.format(result, int(numerator_result), int(denominator_result))
        return score, result_string, target, result, numerator_result, denominator_result

    def write_to_db_result(self, fk, score, level):
        """
        This function creates the result data frame of every KPI (atomic KPI/KPI/KPI set),
        and appends the insert SQL query into the queries' list, later to be written to the DB.
        """
        attributes = self.create_attributes_dict(fk, score, level)
        if level == self.LEVEL1:
            table = KPS_RESULT
        elif level == self.LEVEL2:
            table = KPK_RESULT
        elif level == self.LEVEL3:
            table = KPI_RESULT
        else:
            return
        query = insert(attributes, table)
        self.kpi_results_queries.append(query)

    def create_attributes_dict(self, fk, score, level):
        """
        This function creates a data frame with all attributes needed for saving in KPI results tables.

        """
        if level == self.LEVEL1:
            score_2, score_3, score_1 = score
            kpi_set_name = self.kpi_static_data[self.kpi_static_data['kpi_set_fk']
                                                == fk]['kpi_set_name'].values[0]
            attributes = pd.DataFrame([(kpi_set_name, self.session_uid, self.store_id, self.visit_date.isoformat(),
                                        format(score_1, '.2f'), score_2, score_3, fk)],
                                      columns=['kps_name', 'session_uid', 'store_fk', 'visit_date', 'score_1',
                                               'score_2', 'score_3', 'kpi_set_fk'])
        elif level == self.LEVEL2:
            score_2, score_3, score = score
            kpi_name = self.kpi_static_data[self.kpi_static_data['kpi_fk']
                                            == fk]['kpi_name'].values[0]
            attributes = pd.DataFrame([(self.session_uid, self.store_id, self.visit_date.isoformat(),
                                        fk, kpi_name, score, score_2, score_3)],
                                      columns=['session_uid', 'store_fk', 'visit_date', 'kpi_fk', 'kpk_name',
                                               'score', 'score_2', 'score_3'])
        elif level == self.LEVEL3:
            score, result, threshold, weight = score
            data = self.kpi_static_data[self.kpi_static_data['atomic_kpi_fk'] == fk]
            atomic_kpi_name = data['atomic_kpi_name'].values[0]
            kpi_fk = data['kpi_fk'].values[0]
            kpi_set_name = self.kpi_static_data[self.kpi_static_data['atomic_kpi_fk']
                                                == fk]['kpi_set_name'].values[0]
            attributes = pd.DataFrame([(atomic_kpi_name, self.session_uid, kpi_set_name, self.store_id,
                                        self.visit_date.isoformat(), datetime.utcnow().isoformat(),
                                        score, kpi_fk, fk, threshold, result, weight)],
                                      columns=['display_text', 'session_uid', 'kps_name', 'store_fk', 'visit_date',
                                               'calculation_time', 'score', 'kpi_fk', 'atomic_kpi_fk', 'threshold',
                                               'result', 'kpi_weight'])
        else:
            attributes = pd.DataFrame()
        return attributes.to_dict()

    @log_runtime('Saving to DB')
    def commit_results_data(self):
        """
        This function writes all KPI results to the DB, and commits the changes.
        """
        cur = self.rds_conn.db.cursor()
        delete_queries = CCKHQueries.get_delete_session_results_query(self.session_uid)
        for query in delete_queries:
            cur.execute(query)
        for query in self.kpi_results_queries:
            cur.execute(query)
        self.rds_conn.db.commit()

    def get_new_kpi_dict(self, kpi_fk, result, score, numerator_result, denominator_result,
                         score_after_action=0, weight=None, target=None, identifier_parent=None,
                         identifier_result=None, numerator_id=None):

        """
        This function gets all kpi info  and add the relevant numerator_id and denominator_id and return a dictionary
        with the passed data.
             :param kpi_fk: pk of kpi
             :param result
             :param score
             :param numerator_result
             :param denominator_result
             :param weight
             :param target
             :param identifier_parent
             :param identifier_result
             :param numerator_id
             :param score_after_action

             :returns dict in format of db result
        """
        numerator_id = self.manufacturer if numerator_id is None else numerator_id
        denominator_id = self.store_id
        return {'fk': kpi_fk,
                SessionResultsConsts.NUMERATOR_ID: numerator_id,
                SessionResultsConsts.DENOMINATOR_ID: denominator_id,
                SessionResultsConsts.DENOMINATOR_RESULT: denominator_result,
                SessionResultsConsts.NUMERATOR_RESULT: numerator_result,
                SessionResultsConsts.RESULT: result, SessionResultsConsts.SCORE: score,
                SessionResultsConsts.TARGET: target, SessionResultsConsts.WEIGHT: weight,
                'identifier_parent': identifier_parent, 'identifier_result': identifier_result,
                'score_after_actions': score_after_action, 'should_enter': True,
                }
Ejemplo n.º 24
0
 def __init__(self, data_provider, output):
     GlobalSessionToolBox.__init__(self, data_provider, output)
     self.assortment = Assortment(self.data_provider, self.output)
Ejemplo n.º 25
0
class ALTRIAUS_SANDToolBox:
    LEVEL1 = 1
    LEVEL2 = 2
    LEVEL3 = 3

    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.common_v2 = CommonV2(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.template_info = self.data_provider.all_templates
        self.rds_conn = ProjectConnector(self.project_name,
                                         DbUsers.CalculationEng)
        self.ps_data_provider = PsDataProvider(self.data_provider)
        self.thresholds_and_results = {}
        self.result_df = []
        self.writing_to_db_time = datetime.timedelta(0)
        self.kpi_results_queries = []
        self.potential_products = {}
        self.shelf_square_boundaries = {}
        self.average_shelf_values = {}
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []
        self.all_template_data = parse_template(TEMPLATE_PATH, "KPI")
        self.spacing_template_data = parse_template(TEMPLATE_PATH, "Spacing")
        self.fixture_width_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                    "Fixture Width",
                                                    dtype=pd.Int64Dtype())
        self.facings_to_feet_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                      "Conversion Table",
                                                      dtype=pd.Int64Dtype())
        self.header_positions_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                       "Header Positions")
        self.flip_sign_positions_template = pd.read_excel(
            FIXTURE_WIDTH_TEMPLATE, "Flip Sign Positions")
        self.custom_entity_data = self.ps_data_provider.get_custom_entities(
            1005)
        self.ignore_stacking = False
        self.facings_field = 'facings' if not self.ignore_stacking else 'facings_ign_stack'
        self.INCLUDE_FILTER = 1
        self.assortment = Assortment(self.data_provider,
                                     output=self.output,
                                     ps_data_provider=self.ps_data_provider)
        self.store_assortment = self.assortment.get_lvl3_relevant_ass()

        self.kpi_new_static_data = self.common.get_new_kpi_static_data()
        try:
            self.mpis = self.match_product_in_scene.merge(self.products, on='product_fk', suffixes=['', '_p']) \
                        .merge(self.scene_info, on='scene_fk', suffixes=['', '_s']) \
                          .merge(self.template_info, on='template_fk', suffixes=['', '_t'])
        except KeyError:
            Log.warning('MPIS cannot be generated!')
            return
        self.adp = AltriaDataProvider(self.data_provider)

    def main_calculation(self, *args, **kwargs):
        """
               This function calculates the KPI results.
               """
        self.calculate_signage_locations_and_widths('Cigarettes')
        self.calculate_signage_locations_and_widths('Smokeless')
        self.calculate_register_type()
        self.calculate_age_verification()
        self.calculate_juul_availability()
        self.calculate_assortment()
        self.calculate_vapor_kpis()

        kpi_set_fk = 2
        set_name = \
            self.kpi_static_data.loc[self.kpi_static_data['kpi_set_fk'] == kpi_set_fk]['kpi_set_name'].values[0]
        template_data = self.all_template_data.loc[
            self.all_template_data['KPI Level 1 Name'] == set_name]

        try:
            if set_name and not set(
                    template_data['Scene Types to Include'].values[0].encode(
                    ).split(', ')) & set(
                        self.scif['template_name'].unique().tolist()):
                Log.info('Category {} was not captured'.format(
                    template_data['category'].values[0]))
                return
        except Exception as e:
            Log.info(
                'KPI Set {} is not defined in the template'.format(set_name))

        for i, row in template_data.iterrows():
            try:
                kpi_name = row['KPI Level 2 Name']
                if kpi_name in KPI_LEVEL_2_cat_space:
                    # scene_type = [s for s in row['Scene_Type'].encode().split(', ')]
                    kpi_type = row['KPI Type']
                    scene_type = row['scene_type']

                    if row['Param1'] == 'Category' or 'sub_category':
                        category = row['Value1']

                        if kpi_type == 'category_space':
                            kpi_set_fk = \
                            self.kpi_new_static_data.loc[self.kpi_new_static_data['type'] == kpi_type]['pk'].values[0]
                            self.calculate_category_space(
                                kpi_set_fk,
                                kpi_name,
                                category,
                                scene_types=scene_type)

            except Exception as e:
                Log.info('KPI {} calculation failed due to {}'.format(
                    kpi_name.encode('utf-8'), e))
                continue
        return

    def calculate_vapor_kpis(self):
        category = 'Vapor'
        relevant_scif = self.scif[self.scif['template_name'] ==
                                  'JUUL Merchandising']
        if relevant_scif.empty:
            Log.info('No products found for {} category'.format(category))
            return

        relevant_scif = relevant_scif[
            (relevant_scif['category'].isin([category, 'POS']))
            & (relevant_scif['brand_name'] == 'Juul')]
        if relevant_scif.empty:
            return
        relevant_product_pks = relevant_scif[
            relevant_scif['product_type'] ==
            'SKU']['product_fk'].unique().tolist()
        relevant_scene_id = self.get_most_frequent_scene(relevant_scif)
        product_mpis = self.mpis[
            (self.mpis['product_fk'].isin(relevant_product_pks))
            & (self.mpis['scene_fk'] == relevant_scene_id)]

        if product_mpis.empty:
            Log.info('No products found for {} category'.format(category))
            return

        self.calculate_total_shelves(product_mpis, category, product_mpis)

        longest_shelf = \
            product_mpis[product_mpis['shelf_number'] ==
                         self.get_longest_shelf_number(product_mpis,
                                                       max_shelves_from_top=999)].sort_values(by='rect_x',
                                                                                              ascending=True)

        if longest_shelf.empty or longest_shelf.isnull().all().all():
            Log.warning(
                'The {} category items are in a non-standard location. The {} category will not be calculated.'
                .format(category, category))
            return

        relevant_pos = pd.DataFrame()
        self.calculate_fixture_width(relevant_pos, longest_shelf, category)
        return

    def calculate_assortment(self):
        if self.scif.empty or self.store_assortment.empty:
            Log.warning(
                'Unable to calculate assortment: SCIF or store assortment is empty'
            )
            return

        grouped_scif = self.scif.groupby('product_fk',
                                         as_index=False)['facings'].sum()
        assortment_with_facings = \
            pd.merge(self.store_assortment, grouped_scif, how='left', on='product_fk')
        assortment_with_facings.loc[:, 'facings'] = assortment_with_facings[
            'facings'].fillna(0)

        for product in assortment_with_facings.itertuples():
            score = 1 if product.facings > 0 else 0
            self.common_v2.write_to_db_result(
                product.kpi_fk_lvl3,
                numerator_id=product.product_fk,
                denominator_id=product.assortment_fk,
                numerator_result=product.facings,
                result=product.facings,
                score=score)

        number_of_skus_present = len(
            assortment_with_facings[assortment_with_facings['facings'] > 0])
        score = 1 if number_of_skus_present > 0 else 0
        kpi_fk = assortment_with_facings['kpi_fk_lvl2'].iloc[0]
        assortment_group_fk = assortment_with_facings[
            'assortment_group_fk'].iloc[0]
        self.common_v2.write_to_db_result(
            kpi_fk,
            numerator_id=assortment_group_fk,
            numerator_result=number_of_skus_present,
            denominator_result=len(assortment_with_facings),
            result=number_of_skus_present,
            score=score)

    def calculate_register_type(self):
        relevant_scif = self.scif[
            (self.scif['product_type'].isin(['POS', 'Other']))
            & (self.scif['category'] == 'POS Machinery')]
        if relevant_scif.empty:
            result = 0
            product_fk = 0
        else:
            result = 1
            product_fk = relevant_scif['product_fk'].iloc[0]

        kpi_fk = self.common_v2.get_kpi_fk_by_kpi_type('Register Type')
        self.common_v2.write_to_db_result(kpi_fk,
                                          numerator_id=product_fk,
                                          denominator_id=self.store_id,
                                          result=result)

    def calculate_age_verification(self):
        relevant_scif = self.scif[self.scif['brand_name'].isin(
            ['Age Verification'])]
        if relevant_scif.empty:
            result = 0
            product_fk = 0
        else:
            result = 1
            product_fk = relevant_scif['product_fk'].iloc[0]

        kpi_fk = self.common_v2.get_kpi_fk_by_kpi_type('Age Verification')
        self.common_v2.write_to_db_result(kpi_fk,
                                          numerator_id=product_fk,
                                          denominator_id=self.store_id,
                                          result=result)

    def calculate_juul_availability(self):
        relevant_scif = self.scif[(self.scif['brand_name'].isin(['Juul']))
                                  & (self.scif['product_type'].isin(['POS']))]
        juul_pos = relevant_scif['product_fk'].unique().tolist()

        kpi_fk = self.common_v2.get_kpi_fk_by_kpi_type('Juul POS Availability')
        if not juul_pos:
            return

        result = 1
        for product_fk in juul_pos:
            self.common_v2.write_to_db_result(kpi_fk,
                                              numerator_id=product_fk,
                                              denominator_id=self.store_id,
                                              result=result)

    def calculate_category_space(self,
                                 kpi_set_fk,
                                 kpi_name,
                                 category,
                                 scene_types=None):
        template = self.all_template_data.loc[
            (self.all_template_data['KPI Level 2 Name'] == kpi_name)
            & (self.all_template_data['Value1'] == category)]
        kpi_template = template.loc[template['KPI Level 2 Name'] == kpi_name]
        if kpi_template.empty:
            return None
        kpi_template = kpi_template.iloc[0]
        values_to_check = []

        filters = {
            'template_name': scene_types,
            'category': kpi_template['Value1']
        }

        if kpi_template['Value1'] in CATEGORIES:
            category_att = 'category'

        if kpi_template['Value1']:
            values_to_check = self.all_products.loc[
                self.all_products[category_att] ==
                kpi_template['Value1']][category_att].unique().tolist()

        for primary_filter in values_to_check:
            filters[kpi_template['Param1']] = primary_filter

            new_kpi_name = self.kpi_name_builder(kpi_name, **filters)

            result = self.calculate_category_space_length(
                new_kpi_name, **filters)
            filters['Category'] = kpi_template['KPI Level 2 Name']
            score = result
            numerator_id = self.products['category_fk'][
                self.products['category'] == kpi_template['Value1']].iloc[0]
            self.common_v2.write_to_db_result(kpi_set_fk,
                                              numerator_id=numerator_id,
                                              numerator_result=999,
                                              result=score,
                                              score=score)

    def calculate_category_space_length(self,
                                        kpi_name,
                                        threshold=0.5,
                                        retailer=None,
                                        exclude_pl=False,
                                        **filters):
        """
        :param threshold: The ratio for a bay to be counted as part of a category.
        :param filters: These are the parameters which the data frame is filtered by.
        :return: The total shelf width (in mm) the relevant facings occupy.
        """

        try:
            filtered_scif = self.scif[self.get_filter_condition(
                self.scif, **filters)]
            space_length = 0
            bay_values = []
            for scene in filtered_scif['scene_fk'].unique().tolist():
                scene_matches = self.mpis[self.mpis['scene_fk'] == scene]
                scene_filters = filters
                scene_filters['scene_fk'] = scene
                for bay in scene_matches['bay_number'].unique().tolist():
                    bay_total_linear = scene_matches.loc[
                        (scene_matches['bay_number'] == bay)
                        & (scene_matches['stacking_layer'] == 1) &
                        (scene_matches['status']
                         == 1)]['width_mm_advance'].sum()
                    scene_filters['bay_number'] = bay

                    tested_group_linear = scene_matches[
                        self.get_filter_condition(scene_matches,
                                                  **scene_filters)]

                    tested_group_linear_value = tested_group_linear[
                        'width_mm_advance'].sum()

                    if tested_group_linear_value:
                        bay_ratio = tested_group_linear_value / float(
                            bay_total_linear)
                    else:
                        bay_ratio = 0

                    if bay_ratio >= threshold:
                        category = filters['category']
                        max_facing = scene_matches.loc[
                            (scene_matches['bay_number'] == bay)
                            & (scene_matches['stacking_layer'] == 1
                               )]['facing_sequence_number'].max()
                        shelf_length = self.spacing_template_data.query(
                            'Category == "' + category + '" & Low <= "' +
                            str(max_facing) + '" & High >= "' +
                            str(max_facing) + '"')
                        shelf_length = int(shelf_length['Size'].iloc[-1])
                        bay_values.append(shelf_length)
                        space_length += shelf_length
        except Exception as e:
            Log.info('Linear Feet calculation failed due to {}'.format(e))
            space_length = 0

        return space_length

    def get_filter_condition(self, df, **filters):
        """
        :param df: The data frame to be filters.
        :param filters: These are the parameters which the data frame is filtered by.
                       Every parameter would be a tuple of the value and an include/exclude flag.
                       INPUT EXAMPLE (1):   manufacturer_name = ('Diageo', DIAGEOAUPNGROGENERALToolBox.INCLUDE_FILTER)
                       INPUT EXAMPLE (2):   manufacturer_name = 'Diageo'
        :return: a filtered Scene Item Facts data frame.
        """
        if not filters:
            return df['pk'].apply(bool)
        if self.facings_field in df.keys():
            filter_condition = (df[self.facings_field] > 0)
        else:
            filter_condition = None
        for field in filters.keys():
            if field in df.keys():
                if isinstance(filters[field], tuple):
                    value, exclude_or_include = filters[field]
                else:
                    value, exclude_or_include = filters[
                        field], self.INCLUDE_FILTER
                if not value:
                    continue
                if not isinstance(value, list):
                    value = [value]
                if exclude_or_include == self.INCLUDE_FILTER:
                    condition = (df[field].isin(value))
                elif exclude_or_include == self.EXCLUDE_FILTER:
                    condition = (~df[field].isin(value))
                elif exclude_or_include == self.CONTAIN_FILTER:
                    condition = (df[field].str.contains(value[0], regex=False))
                    for v in value[1:]:
                        condition |= df[field].str.contains(v, regex=False)
                else:
                    continue
                if filter_condition is None:
                    filter_condition = condition
                else:
                    filter_condition &= condition
            else:
                Log.warning('field {} is not in the Data Frame'.format(field))

        return filter_condition

    def kpi_name_builder(self, kpi_name, **filters):
        """
        This function builds kpi name according to naming convention
        """
        for filter in filters.keys():
            if filter == 'template_name':
                continue
            kpi_name = kpi_name.replace('{' + filter + '}',
                                        str(filters[filter]))
            kpi_name = kpi_name.replace("'", "\'")
        return kpi_name

    def calculate_signage_locations_and_widths(self, category):
        excluded_types = ['Other', 'Irrelevant', 'Empty']
        relevant_scif = self.scif[self.scif['template_name'] ==
                                  'Tobacco Merchandising Space']
        if relevant_scif.empty:
            Log.info('No products found for {} category'.format(category))
            return
        # need to include scene_id from previous relevant_scif
        # also need to split this shit up into different categories, i.e. smokeless, cigarettes
        # need to figure out how to deal with POS from smokeless being included with cigarette MPIS

        # get relevant SKUs from the cigarettes category
        relevant_scif = relevant_scif[relevant_scif['category'].isin(
            [category, 'POS'])]
        relevant_product_pks = relevant_scif[
            relevant_scif['product_type'] ==
            'SKU']['product_fk'].unique().tolist()
        relevant_pos_pks = \
            relevant_scif[(relevant_scif['product_type'] == 'POS') &
                          ~(relevant_scif['brand_name'] == 'Age Verification') &
                          ~(relevant_scif['product_name'] == 'General POS Other')]['product_fk'].unique().tolist()
        other_product_and_pos_pks = \
            relevant_scif[relevant_scif['product_type'].isin(excluded_types)]['product_fk'].tolist()
        relevant_scene_id = self.get_most_frequent_scene(relevant_scif)
        product_mpis = self.mpis[
            (self.mpis['product_fk'].isin(relevant_product_pks))
            & (self.mpis['scene_fk'] == relevant_scene_id)]

        if product_mpis.empty:
            Log.info('No products found for {} category'.format(category))
            return

        self.calculate_total_shelves(product_mpis, category)

        longest_shelf = \
            product_mpis[product_mpis['shelf_number'] ==
                         self.get_longest_shelf_number(product_mpis)].sort_values(by='rect_x', ascending=True)

        if longest_shelf.empty or longest_shelf.isnull().all().all():
            Log.warning(
                'The {} category items are in a non-standard location. The {} category will not be calculated.'
                .format(category, category))
            return

        # demarcation_line = longest_shelf['rect_y'].median() old method, had bugs due to longest shelf being lower
        demarcation_line = product_mpis['rect_y'].min()

        exclusion_line = -9999
        excluded_mpis = self.mpis[
            ~(self.mpis['product_fk'].isin(relevant_pos_pks +
                                           relevant_product_pks +
                                           other_product_and_pos_pks))
            & (self.mpis['rect_x'] < longest_shelf['rect_x'].max()) &
            (self.mpis['rect_x'] > longest_shelf['rect_x'].min()) &
            (self.mpis['scene_fk'] == relevant_scene_id) &
            (self.mpis['rect_y'] < demarcation_line)]
        # we need this line for when SCIF and MPIS don't match
        excluded_mpis = excluded_mpis[~excluded_mpis['product_type'].
                                      isin(excluded_types)]

        if not excluded_mpis.empty:
            exclusion_line = excluded_mpis['rect_y'].max()

        # we need to get POS stuff that falls within the x-range of the longest shelf (which is limited by category)
        # we also need to account for the fact that the images suck, so we're going to add/subtract 5% of the
        # max/min values to allow for POS items that fall slightly out of the shelf length range
        correction_factor = 0.05
        correction_value = (longest_shelf['rect_x'].max() -
                            longest_shelf['rect_x'].min()) * correction_factor
        pos_mpis = self.mpis[
            (self.mpis['product_fk'].isin(relevant_pos_pks))
            & (self.mpis['rect_x'] <
               (longest_shelf['rect_x'].max() + correction_value)) &
            (self.mpis['rect_x'] >
             (longest_shelf['rect_x'].min() - correction_value)) &
            (self.mpis['scene_fk'] == relevant_scene_id)]

        # DO NOT SET TO TRUE WHEN DEPLOYING
        # debug flag displays polygon_mask graph DO NOT SET TO TRUE WHEN DEPLOYING
        # DO NOT SET TO TRUE WHEN DEPLOYING
        relevant_pos = self.adp.get_products_contained_in_displays(
            pos_mpis, y_axis_threshold=35, debug=False)

        if relevant_pos.empty:
            Log.warning(
                'No polygon mask was generated for {} category - cannot compute KPIs'
                .format(category))
            # we need to attempt to calculate fixture width, even if there's no polygon mask
            self.calculate_fixture_width(relevant_pos, longest_shelf, category)
            return

        relevant_pos = relevant_pos[[
            'product_fk', 'product_name', 'left_bound', 'right_bound',
            'center_x', 'center_y'
        ]]
        relevant_pos = relevant_pos.reindex(
            columns=relevant_pos.columns.tolist() +
            ['type', 'width', 'position'])
        relevant_pos['width'] = \
            relevant_pos.apply(lambda row: self.get_length_of_pos(row, longest_shelf, category), axis=1)
        relevant_pos['type'] = \
            relevant_pos['center_y'].apply(lambda x: 'Header' if exclusion_line < x < demarcation_line else 'Flip Sign')
        relevant_pos = relevant_pos.sort_values(['center_x'], ascending=True)
        relevant_pos = self.remove_duplicate_pos_tags(relevant_pos)
        # generate header positions
        if category == 'Cigarettes':
            number_of_headers = len(
                relevant_pos[relevant_pos['type'] == 'Header'])
            if number_of_headers > len(
                    self.header_positions_template['Cigarettes Positions'].
                    dropna()):
                Log.warning(
                    'Number of Headers for Cigarettes is greater than max number defined in template!'
                )
            elif number_of_headers > 0:
                header_position_list = [
                    position.strip()
                    for position in self.header_positions_template[
                        self.header_positions_template['Number of Headers'] ==
                        number_of_headers]
                    ['Cigarettes Positions'].iloc[0].split(',')
                ]
                relevant_pos.loc[relevant_pos['type'] == 'Header',
                                 ['position']] = header_position_list
        elif category == 'Smokeless':
            relevant_pos = self.check_menu_scene_recognition(relevant_pos)
            number_of_headers = len(
                relevant_pos[relevant_pos['type'] == 'Header'])
            if number_of_headers > len(
                    self.header_positions_template['Smokeless Positions'].
                    dropna()):
                Log.warning(
                    'Number of Headers for Smokeless is greater than max number defined in template!'
                )
            elif number_of_headers > 0:
                header_position_list = [
                    position.strip()
                    for position in self.header_positions_template[
                        self.header_positions_template['Number of Headers'] ==
                        number_of_headers]
                    ['Smokeless Positions'].iloc[0].split(',')
                ]
                relevant_pos.loc[relevant_pos['type'] == 'Header',
                                 ['position']] = header_position_list

            relevant_pos = self.get_menu_board_items(relevant_pos,
                                                     longest_shelf, pos_mpis)
        # generate flip-sign positions
        if category == 'Cigarettes':
            relevant_template = \
                self.fixture_width_template.loc[(self.fixture_width_template['Fixture Width (facings)']
                                                 - len(longest_shelf)).abs().argsort()[:1]].dropna(axis=1)
            locations = relevant_template.columns[2:].tolist()
            right_bound = 0
            longest_shelf_copy = longest_shelf.copy()
            for location in locations:
                if right_bound > 0:
                    left_bound = right_bound + 1
                else:
                    left_bound = longest_shelf_copy.iloc[:relevant_template[
                        location].iloc[0]]['rect_x'].min()
                right_bound = longest_shelf_copy.iloc[:relevant_template[
                    location].iloc[0]]['rect_x'].max()
                if locations[-1] == location:
                    right_bound = right_bound + abs(right_bound * 0.05)
                flip_sign_pos = relevant_pos[
                    (relevant_pos['type'] == 'Flip Sign')
                    & (relevant_pos['center_x'] > left_bound) &
                    (relevant_pos['center_x'] < right_bound)]
                if flip_sign_pos.empty:
                    # add 'NO Flip Sign' product_fk
                    relevant_pos.loc[len(relevant_pos), ['position', 'product_fk', 'type']] = \
                        [location, NO_FLIP_SIGN_PK, 'Flip Sign']
                else:
                    relevant_pos.loc[flip_sign_pos.index,
                                     ['position']] = location
                longest_shelf_copy.drop(
                    longest_shelf_copy.iloc[:relevant_template[location].
                                            iloc[0]].index,
                    inplace=True)
        elif category == 'Smokeless':
            # if there are no flip signs found, there are no positions to assign
            number_of_flip_signs = len(
                relevant_pos[relevant_pos['type'] == 'Flip Sign'])
            if number_of_flip_signs > self.flip_sign_positions_template[
                    'Number of Flip Signs'].max():
                Log.warning(
                    'Number of Flip Signs for Smokeless is greater than max number defined in template!'
                )
            elif number_of_flip_signs > 0:
                flip_sign_position_list = [
                    position.strip()
                    for position in self.flip_sign_positions_template[
                        self.
                        flip_sign_positions_template['Number of Flip Signs'] ==
                        number_of_flip_signs]['Position'].iloc[0].split(',')
                ]
                relevant_pos.loc[relevant_pos['type'] == 'Flip Sign',
                                 ['position']] = flip_sign_position_list

            # store empty flip sign values
            for location in ['Secondary', 'Tertiary']:
                if location not in relevant_pos[
                        relevant_pos['type'] ==
                        'Flip Sign']['position'].tolist():
                    relevant_pos.loc[len(relevant_pos), ['position', 'product_fk', 'type']] = \
                        [location, NO_FLIP_SIGN_PK, 'Flip Sign']

        relevant_pos = relevant_pos.reindex(
            columns=relevant_pos.columns.tolist() + ['denominator_id'])

        # this is a bandaid fix that should be removed ->  'F7011A7C-1BB6-4007-826D-2B674BD99DAE'
        # removes POS items that were 'extra', i.e. more than max value in template
        # only affects smokeless
        relevant_pos.dropna(subset=['position'], inplace=True)

        relevant_pos.loc[:,
                         ['denominator_id']] = relevant_pos['position'].apply(
                             self.get_custom_entity_pk)

        for row in relevant_pos.itertuples():
            kpi_fk = self.common_v2.get_kpi_fk_by_kpi_name(row.type)
            self.common_v2.write_to_db_result(
                kpi_fk,
                numerator_id=row.product_fk,
                denominator_id=row.denominator_id,
                result=row.width,
                score=row.width)

        self.calculate_fixture_width(relevant_pos, longest_shelf, category)
        return

    def calculate_total_shelves(self,
                                longest_shelf,
                                category,
                                product_mpis=None):
        category_fk = self.get_category_fk_by_name(category)
        if product_mpis is None:
            product_mpis = self.mpis[
                (self.mpis['rect_x'] > longest_shelf['rect_x'].min())
                & (self.mpis['rect_x'] < longest_shelf['rect_x'].max()) &
                (self.mpis['scene_fk']
                 == longest_shelf['scene_fk'].fillna(0).mode().iloc[0])]
        total_shelves = len(product_mpis['shelf_number'].unique())

        kpi_fk = self.common_v2.get_kpi_fk_by_kpi_name('Total Shelves')
        self.common_v2.write_to_db_result(kpi_fk,
                                          numerator_id=category_fk,
                                          denominator_id=self.store_id,
                                          result=total_shelves)

    def calculate_fixture_width(self, relevant_pos, longest_shelf, category):
        correction_factor = 1 if category == 'Smokeless' else 2
        longest_shelf = longest_shelf[longest_shelf['stacking_layer'] == 1]
        category_fk = self.get_category_fk_by_name(category)
        # this is needed to remove intentionally duplicated 'Menu Board' POS 'Headers'
        relevant_pos = relevant_pos.drop_duplicates(subset=['position'])
        # try:
        #     width = relevant_pos[relevant_pos['type'] == 'Header']['width'].sum()
        # except KeyError:
        #     # needed for when 'width' doesn't exist
        #     width = 0

        # if relevant_pos.empty or width == 0:
        width = round(
            len(longest_shelf) + correction_factor /
            float(self.facings_to_feet_template[category + ' Facings'].iloc[0])
        )

        kpi_fk = self.common_v2.get_kpi_fk_by_kpi_name('Fixture Width')
        self.common_v2.write_to_db_result(kpi_fk,
                                          numerator_id=category_fk,
                                          denominator_id=self.store_id,
                                          result=width)

    def get_category_fk_by_name(self, category_name):
        return self.all_products[self.all_products['category'] ==
                                 category_name]['category_fk'].iloc[0]

    def check_menu_scene_recognition(self, relevant_pos):
        mdis = self.adp.get_match_display_in_scene()
        mdis = mdis[mdis['display_name'] == 'Menu POS']

        if mdis.empty:
            return relevant_pos

        dummy_sku_for_menu_pk = 9282  # 'Other (Smokeless Tobacco)'
        dummy_sky_for_menu_name = 'Menu POS (Scene Recognized Item)'
        location_type = 'Header'
        width = 1
        center_x = mdis['x'].iloc[0]
        center_y = mdis['y'].iloc[0]
        relevant_pos.loc[len(relevant_pos), ['product_fk', 'product_name', 'center_x', 'center_y', 'type', 'width']] = \
            [dummy_sku_for_menu_pk, dummy_sky_for_menu_name, center_x, center_y, location_type, width]
        relevant_pos = relevant_pos.sort_values(
            ['center_x'], ascending=True).reset_index(drop=True)
        return relevant_pos

    def get_menu_board_items(self, relevant_pos, longest_shelf, pos_mpis):
        # get the placeholder item
        menu_board_dummy = relevant_pos[relevant_pos['product_name'] ==
                                        'Menu POS (Scene Recognized Item)']

        # if no placeholder, this function isn't relevant
        if menu_board_dummy.empty:
            return relevant_pos

        center_x = menu_board_dummy['center_x'].iloc[0]
        center_y = menu_board_dummy['center_y'].iloc[0]
        position = menu_board_dummy['position'].iloc[0]

        demarcation_line = longest_shelf['rect_y'].min()
        upper_demarcation_line = center_y - (demarcation_line - center_y)

        distance_in_facings = 2

        try:
            left_bound = longest_shelf[
                longest_shelf['rect_x'] < center_x].sort_values(
                    by=['rect_x'],
                    ascending=False)['rect_x'].iloc[int(distance_in_facings) -
                                                    1]
        except IndexError:
            # if there are no POS items found to the left of the 'Menu POS' scene recognition tag, use the tag itself
            # in theory this should never happen
            left_bound = center_x

        try:
            right_bound = longest_shelf[
                longest_shelf['rect_x'] > center_x].sort_values(
                    by=['rect_x'],
                    ascending=True)['rect_x'].iloc[int(distance_in_facings) -
                                                   1]
        except IndexError:
            # if there are no POS items found to the right of the 'Menu POS' scene recognition tag, use the tag itself
            # this is more likely to happen for the right bound than the left bound
            right_bound = center_x

        pos_mpis = pos_mpis[(pos_mpis['rect_x'] > left_bound)
                            & (pos_mpis['rect_x'] < right_bound) &
                            (pos_mpis['rect_y'] > upper_demarcation_line) &
                            (pos_mpis['rect_y'] < demarcation_line)]

        if pos_mpis.empty:
            return relevant_pos

        # remove the placeholder item
        relevant_pos = relevant_pos[~(relevant_pos['product_name'] ==
                                      'Menu POS (Scene Recognized Item)')]

        location_type = 'Header'
        width = 1

        for row in pos_mpis.itertuples():
            relevant_pos.loc[len(relevant_pos), ['product_fk', 'product_name', 'center_x',
                                                 'center_y', 'type', 'width', 'position']] = \
                [row.product_fk, row.product_name, row.rect_x, row.rect_y, location_type, width, position]

        return relevant_pos

    @staticmethod
    def remove_duplicate_pos_tags(relevant_pos_df):
        duplicate_results = \
            relevant_pos_df[relevant_pos_df.duplicated(subset=['left_bound', 'right_bound'], keep=False)]

        duplicate_results_without_other = duplicate_results[
            ~duplicate_results['product_name'].str.contains('Other')]

        results_without_duplicates = \
            relevant_pos_df[~relevant_pos_df.duplicated(subset=['left_bound', 'right_bound'], keep=False)]

        if duplicate_results_without_other.empty:
            return relevant_pos_df[~relevant_pos_df.duplicated(
                subset=['left_bound', 'right_bound'], keep='first')]
        else:
            results = pd.concat(
                [duplicate_results_without_other, results_without_duplicates])
            # we need to sort_index to fix the sort order to reflect center_x values
            return results.drop_duplicates(
                subset=['left_bound', 'right_bound']).sort_index()

    def get_custom_entity_pk(self, name):
        return self.custom_entity_data[self.custom_entity_data['name'] ==
                                       name]['pk'].iloc[0]

    def get_length_of_pos(self, row, longest_shelf, category):
        width_in_facings = len(
            longest_shelf[(longest_shelf['rect_x'] > row['left_bound']) &
                          (longest_shelf['rect_x'] < row['right_bound'])]) + 2
        category_facings = category + ' Facings'
        return self.facings_to_feet_template.loc[(
            self.facings_to_feet_template[category_facings] -
            width_in_facings).abs().argsort()[:1]]['POS Width (ft)'].iloc[0]

    @staticmethod
    def get_longest_shelf_number(relevant_mpis, max_shelves_from_top=3):
        # returns the shelf_number of the longest shelf
        try:
            longest_shelf = \
                relevant_mpis[relevant_mpis['shelf_number'] <= max_shelves_from_top].groupby('shelf_number').agg(
                    {'scene_match_fk': 'count'})['scene_match_fk'].idxmax()
        except ValueError:
            longest_shelf = pd.DataFrame()

        return longest_shelf

    @staticmethod
    def get_most_frequent_scene(relevant_scif):
        try:
            relevant_scene_id = relevant_scif['scene_id'].fillna(
                0).mode().iloc[0]
        except IndexError:
            relevant_scene_id = 0
        return relevant_scene_id

    def commit(self):
        self.common_v2.commit_results_data()
Ejemplo n.º 26
0
class PEPSICORUToolBox:
    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.common_v1 = CommonV1(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.visit_type = self.store_info[
            Const.ADDITIONAL_ATTRIBUTE_2].values[0]
        self.all_templates = self.data_provider[Data.ALL_TEMPLATES]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.scif = self.scif.loc[~(self.scif[Const.PRODUCT_TYPE]
                                    == Const.IRRELEVANT)]  # Vitaly's request
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []
        self.k_engine = BaseCalculationsGroup(data_provider, output)
        self.toolbox = GENERALToolBox(data_provider)
        self.assortment = Assortment(self.data_provider,
                                     self.output,
                                     common=self.common_v1)
        if not self.scif.empty:
            self.pepsico_fk = self.get_relevant_pk_by_name(
                Const.MANUFACTURER, Const.PEPSICO)
            self.categories_to_calculate = self.get_relevant_categories_for_session(
            )
            self.main_shelves = self.get_main_shelves()

    def main_calculation(self):
        """
        This function calculates the KPI results.
        """
        self.calculate_share_of_shelf()
        self.calculate_count_of_displays()
        self.calculate_assortment()

    def get_main_shelves(self):
        """
        This function returns a list with the main shelves of this session
        """
        main_shelves_template_groups = [
            group
            for group in self.scif[Const.TEMPLATE_GROUP].unique().tolist()
            if Const.MAIN_SHELF in group.upper()
        ]
        main_shelves = self.scif[self.scif[Const.TEMPLATE_GROUP].isin(
            main_shelves_template_groups)][
                Const.TEMPLATE_NAME].unique().tolist()
        return main_shelves

    def get_main_shelf_by_category(self, current_category):
        """
        This function gets a category and return the relevant scene type for the SOS
        :param current_category: One of the product's categories. E.g: Snacks.
        :return: The relevant scene type to the current category
        """
        main_shelves_for_category = []
        for main_shelf in self.main_shelves:
            if current_category.upper() in main_shelf.upper():
                main_shelves_for_category.append(main_shelf)
        return main_shelves_for_category

    @staticmethod
    def get_category_from_template_name(template_name):
        """
        This function gets a template name (scene_type) and return it's relevant category.
        :param template_name: The scene type.
        :return: category name
        """
        if Const.SNACKS.upper() in template_name.upper():
            return Const.SNACKS
        elif Const.BEVERAGES.upper() in template_name.upper():
            return Const.BEVERAGES
        elif Const.JUICES.upper() in template_name.upper():
            return Const.JUICES
        else:
            Log.warning(
                "Couldn't find a matching category for template name = {}".
                format(template_name))
            return None

    def get_relevant_categories_for_session(self):
        """
        This function returns a list of the relevant categories according to the store type.
        The parameter additional_attribute_2 defines the visit type for each store.
        We have 3 types: Visit LRB (Beverages and Juices), Visit Snack and Visit (= All of them).
        The function is doing intersection between the categories in SCIF and the categories by store type.
        :return: List of the relevant categories
        """
        categories_in_scif = self.scif[Const.CATEGORY].unique().tolist()
        if None in categories_in_scif:
            categories_in_scif.remove(None)
        if not categories_in_scif:
            Log.warning("No categories at scene item facts!")
            return []

        store_type = self.store_info[Const.ADDITIONAL_ATTRIBUTE_2].values[0]
        if not store_type:
            Log.warning(
                "Invalid additional_attribute_2 for store id = {}".format(
                    self.store_id))
            return []
        if Const.SNACKS.upper() in store_type.upper():
            relevant_categories = [Const.SNACKS]
        elif Const.LRB.upper() in store_type.upper():
            relevant_categories = [Const.JUICES, Const.BEVERAGES]
        else:
            relevant_categories = [Const.SNACKS, Const.JUICES, Const.BEVERAGES]
        categories_for_session = list(
            set(relevant_categories).intersection(set(categories_in_scif)))
        if not categories_for_session:
            Log.warning(
                "There aren't matching categories in scif for this store.")
        return categories_for_session

    def get_relevant_sub_categories_for_category(self, category):
        """
        This function returns a list of the relevant categories according to the scene_types in the session
        :param category: The relevant category
        :return: List of the relevant sub categories for this category
        """
        filtered_scif = self.scif.loc[
            (self.scif[Const.CATEGORY] == category)
            & (self.scif[Const.MANUFACTURER_NAME] == Const.PEPSICO) &
            (self.scif[Const.TEMPLATE_NAME].isin(self.main_shelves))]
        sub_categories = filtered_scif[Const.SUB_CATEGORY].unique().tolist()
        if None in sub_categories:
            sub_categories.remove(None)
        if not sub_categories:
            Log.warning("No relevant sub categories for category = {}".format(
                category))
        return sub_categories

    def get_relevant_brands_for_sub_category(self, sub_category):
        """
        This function returns a list of the relevant categories according to the scene_types in the session
        :param sub_category: The relevant sub category
        :return: List of the relevant brands for this category
        """
        filtered_scif = self.scif.loc[
            (self.scif[Const.SUB_CATEGORY] == sub_category)
            & (self.scif[Const.MANUFACTURER_NAME] == Const.PEPSICO) &
            (self.scif[Const.TEMPLATE_NAME].isin(self.main_shelves))]
        brands_list = filtered_scif[Const.BRAND_NAME].unique().tolist()
        if None in brands_list:
            brands_list.remove(None)
        if not brands_list:
            Log.warning("No relevant brands for sub category = {}".format(
                sub_category))
        return brands_list

    def get_relevant_pk_by_name(self, filter_by, filter_param):
        """
        This function gets a filter name and returns the relevant pk.
        If the filter_by is equal to category it will be the field name because in SCIF there isn't category_name
        :param filter_by: filter by name E.g: 'category', 'brand'.
        :param filter_param: The param to filter by. E.g: if filter_by = 'category', filter_param could be 'Snack'
        :return: The relevant pk
        """
        pk_field = filter_by + Const.FK
        field_name = filter_by + Const.NAME if Const.CATEGORY not in filter_by else filter_by
        return self.scif.loc[self.scif[field_name] ==
                             filter_param][pk_field].values[0]

    def get_target_for_count_of_displays(self):
        """
        This function reads the project's template and returns the targets for all of the levels.
        It iterates over the relevant row and aggregate the result per level.
        :return: target_by_store (int), target_by_category (dict), target_by_scene (dict).
        Plus it return the scene_type list from the template
        """
        targets = pd.read_excel(TEMPLATE_PATH).fillna(0)
        store_number_1 = self.store_info['store_number_1'].values[0]
        if not store_number_1:
            Log.warning("No valid store number 1 for store_fk = {}".format(
                self.store_id))
            return
        targets = targets.loc[targets[Const.STORE_NUMBER_1] == store_number_1]
        has_targets = False
        store_target, category_targets, scene_targets, scene_types_from_template = None, None, None, None
        if not targets.empty:
            has_targets = True
            targets = targets.drop([Const.STORE_NAME, Const.STORE_NUMBER_1],
                                   axis=1)
            target_row = (targets.iloc[0])[(
                targets.iloc[0]) > 0]  # Takes only the ones with target > 0
            scene_types_from_template = targets.columns.tolist()
            scene_types_with_targets = target_row.keys().tolist()
            # Targets by store:
            store_target = target_row.sum()
            # Targets by category and scenes:
            category_targets = {key: 0 for key in self.categories_to_calculate}
            scene_targets = {key: 0 for key in scene_types_from_template}
            for scene_type in scene_types_with_targets:
                for category in category_targets:
                    if category.upper() in scene_type.upper():
                        # category_targets[category] += 1
                        category_targets[category] += target_row[scene_type]
                scene_targets[scene_type] = target_row[scene_type]
        return has_targets, store_target, category_targets, scene_targets, scene_types_from_template

    def get_relevant_scene_types_from_list(self, scene_types_from_template):
        """
        There's a gap between the actual name and the name is the template because of the visit type.
        So this function returns the a dictionary the narrows it.
        :param scene_types_from_template: Scene type list from the template
        :return: A dictionary where the keys are the names from the templates and values are the actual names
        """
        scene_types_dict = dict.fromkeys(scene_types_from_template)
        relevant_templates_for_visit_type = self.all_templates.loc[(
            self.all_templates[Const.ADDITIONAL_ATTRIBUTE_2] == self.visit_type
        ) & (~self.all_templates[Const.TEMPLATE_NAME].isin(self.main_shelves)
             )][Const.TEMPLATE_NAME].unique().tolist()
        for scene_type in scene_types_from_template:
            for template in relevant_templates_for_visit_type:
                if scene_type.upper() in template.upper():
                    scene_types_dict[scene_type] = template
        # Remove irrelevant scene types from the dictionary
        for key in scene_types_dict.keys():
            if not scene_types_dict[key]:
                del scene_types_dict[key]
        return scene_types_dict

    def calculate_count_of_displays(self):
        """
        This function will calculate the Count of # of Pepsi Displays KPI
        :return:
        """
        # Notice! store_target is Integer, scene_type_list is a list and the rest are dictionaries
        has_target, store_target, category_targets, scene_targets, scene_type_list = self.get_target_for_count_of_displays(
        )
        if not store_target:
            Log.warning(
                "No targets were defined for this store (pk = {})".format(
                    self.store_id))
            return
        # Filtering out the main shelves
        if has_target:
            relevant_scenes_dict = self.get_relevant_scene_types_from_list(
                scene_type_list)
            relevant_template_name_list = relevant_scenes_dict.values()
            filtered_scif = self.scif.loc[self.scif[Const.TEMPLATE_NAME].isin(
                relevant_template_name_list)]

            display_count_store_level_fk = self.common.get_kpi_fk_by_kpi_type(
                Const.DISPLAY_COUNT_STORE_LEVEL)
            scene_types_in_store = len(filtered_scif[Const.SCENE_FK].unique())
            identifier_parent_store_level = self.common.get_dictionary(
                kpi_fk=display_count_store_level_fk)
            count_store_level = 0

            # Calculate count of display - category_level
            display_count_category_level_fk = self.common.get_kpi_fk_by_kpi_type(
                Const.DISPLAY_COUNT_CATEGORY_LEVEL)
            for category in self.categories_to_calculate:
                current_category_target = category_targets[category]
                if not current_category_target:
                    continue
                category_fk = self.get_relevant_pk_by_name(
                    Const.CATEGORY, category)
                relevant_scenes = [
                    scene_type for scene_type in relevant_template_name_list
                    if category.upper() in scene_type.upper()
                ]
                filtered_scif_by_cat = filtered_scif.loc[filtered_scif[
                    Const.TEMPLATE_NAME].isin(relevant_scenes)]
                display_count_category_level_identifier = self.common.get_dictionary(
                    kpi_fk=display_count_category_level_fk, category=category)
                # scene_types_in_cate = 0
                # result_cat_level = 0
                # if not filtered_scif_by_cat.empty:
                #     scene_types_in_cate = len(filtered_scif_by_cat[Const.SCENE_FK].unique())
                #     result_cat_level = 1.0 if scene_types_in_cate >= current_category_target else scene_types_in_cate / float(
                #         current_category_target)
                # self.common.write_to_db_result(fk=display_count_category_level_fk, numerator_id=self.pepsico_fk,
                #                                numerator_result=scene_types_in_cate,
                #                                denominator_id=category_fk, denominator_result=current_category_target,
                #                                identifier_result=display_count_category_level_identifier,
                #                                identifier_parent=identifier_parent_store_level,
                #                                result=result_cat_level, should_enter=True)
                scene_count_in_cate = 0
                result_cat_level = 0
                if not filtered_scif_by_cat.empty:
                    actual_scene_names_in_cate = filtered_scif_by_cat[
                        Const.TEMPLATE_NAME].unique().tolist()
                    reverse_scene_dict = {}
                    for scene_type, actual_scene_name in relevant_scenes_dict.iteritems(
                    ):
                        for sc in actual_scene_names_in_cate:
                            if actual_scene_name == sc:
                                reverse_scene_dict[
                                    actual_scene_name] = scene_type
                    df = filtered_scif_by_cat[[
                        Const.TEMPLATE_NAME, 'scene_id'
                    ]].drop_duplicates()
                    df['scene_type'] = df[Const.TEMPLATE_NAME].apply(
                        lambda x: reverse_scene_dict.get(x))
                    by_scene_count_in_cat = df.groupby(['scene_type']).count()
                    for i, row in by_scene_count_in_cat.iterrows():
                        scene_count_in_cate += scene_targets[i] if row[Const.TEMPLATE_NAME]>=scene_targets[i] \
                            else row[Const.TEMPLATE_NAME]
                    result_cat_level = 1.0 if scene_count_in_cate >= current_category_target else scene_count_in_cate / float(
                        current_category_target)
                self.common.write_to_db_result(
                    fk=display_count_category_level_fk,
                    numerator_id=self.pepsico_fk,
                    numerator_result=scene_count_in_cate,
                    denominator_id=category_fk,
                    denominator_result=current_category_target,
                    identifier_result=display_count_category_level_identifier,
                    identifier_parent=identifier_parent_store_level,
                    result=result_cat_level,
                    should_enter=True)

            # Calculate count of display - scene_level
            display_count_scene_level_fk = self.common.get_kpi_fk_by_kpi_type(
                Const.DISPLAY_COUNT_SCENE_LEVEL)
            for scene_type in relevant_scenes_dict.keys():
                scene_type_target = scene_targets[scene_type]
                if not scene_type_target:
                    continue
                actual_scene_name = relevant_scenes_dict[scene_type]
                relevant_category = self.get_category_from_template_name(
                    actual_scene_name)
                relevant_category_fk = self.get_relevant_pk_by_name(
                    Const.CATEGORY, relevant_category)
                scene_type_score = len(
                    filtered_scif[filtered_scif[Const.TEMPLATE_NAME] ==
                                  actual_scene_name][Const.SCENE_FK].unique())

                result_scene_level = 1.0 if scene_type_score >= scene_type_target else scene_type_score / float(
                    scene_type_target)
                scene_type_fk = self.all_templates.loc[self.all_templates[
                    Const.TEMPLATE_NAME] == actual_scene_name][
                        Const.TEMPLATE_FK].values[0]
                parent_identifier = self.common.get_dictionary(
                    kpi_fk=display_count_category_level_fk,
                    category=relevant_category)
                self.common.write_to_db_result(
                    fk=display_count_scene_level_fk,
                    numerator_id=self.pepsico_fk,
                    numerator_result=scene_type_score,
                    denominator_id=relevant_category_fk,
                    denominator_result=scene_type_target,
                    identifier_parent=parent_identifier,
                    context_id=scene_type_fk,
                    result=result_scene_level,
                    should_enter=True)
                count_store_level += scene_type_target if scene_type_score >= scene_type_target else scene_type_score

            # Calculate count of display - store_level
            result_store_level = 1.0 if count_store_level >= store_target else count_store_level / float(
                store_target)
            self.common.write_to_db_result(
                fk=display_count_store_level_fk,
                numerator_id=self.pepsico_fk,
                numerator_result=count_store_level,
                denominator_id=self.store_id,
                denominator_result=store_target,
                identifier_result=identifier_parent_store_level,
                result=result_store_level,
                should_enter=True)

    # def calculate_count_of_displays_old(self):
    #     """
    #     This function will calculate the Count of # of Pepsi Displays KPI
    #     :return:
    #     """
    #     # Notice! store_target is Integer, scene_type_list is a list and the rest are dictionaries
    #     store_target, category_targets, scene_targets, scene_type_list = self.get_target_for_count_of_displays()
    #     if not store_target:
    #         Log.warning("No targets were defined for this store (pk = {})".format(self.store_id))
    #         return
    #     # Filtering out the main shelves
    #     relevant_scenes_dict = self.get_relevant_scene_types_from_list(scene_type_list)
    #     relevant_template_name_list = relevant_scenes_dict.values()
    #     filtered_scif = self.scif.loc[self.scif[Const.TEMPLATE_NAME].isin(relevant_template_name_list)]
    #
    #     # Calculate count of display - store_level
    #     display_count_store_level_fk = self.common.get_kpi_fk_by_kpi_type(Const.DISPLAY_COUNT_STORE_LEVEL)
    #     scene_types_in_store = len(filtered_scif[Const.SCENE_FK].unique())
    #     result_store_level = 100 if scene_types_in_store >= store_target else scene_types_in_store / float(store_target)
    #     # self.common.write_to_db_result(fk=display_count_store_level_fk, numerator_id=self.pepsico_fk,
    #     #                                numerator_result=scene_types_in_store,
    #     #                                denominator_id=self.store_id, denominator_result=store_target,
    #     #                                identifier_result=display_count_store_level_fk,
    #     #                                result=result_store_level, should_enter=True)
    #
    #     identifier_parent_store_level = self.common.get_dictionary(kpi_fk=display_count_store_level_fk)
    #     self.common.write_to_db_result(fk=display_count_store_level_fk, numerator_id=self.pepsico_fk,
    #                                    numerator_result=scene_types_in_store,
    #                                    denominator_id=self.store_id, denominator_result=store_target,
    #                                    identifier_result=identifier_parent_store_level,
    #                                    result=result_store_level, should_enter=True)
    #
    #     # Calculate count of display - category_level
    #     display_count_category_level_fk = self.common.get_kpi_fk_by_kpi_type(Const.DISPLAY_COUNT_CATEGORY_LEVEL)
    #     for category in self.categories_to_calculate:
    #         current_category_target = category_targets[category]
    #         if not current_category_target:
    #             continue
    #         category_fk = self.get_relevant_pk_by_name(Const.CATEGORY, category)
    #         relevant_scenes = [scene_type for scene_type in relevant_template_name_list if
    #                            category.upper() in scene_type.upper()]
    #         filtered_scif_by_cat = filtered_scif.loc[filtered_scif[Const.TEMPLATE_NAME].isin(relevant_scenes)]
    #         if filtered_scif_by_cat.empty:
    #             continue
    #         scene_types_in_cate = len(filtered_scif_by_cat[Const.SCENE_FK].unique())
    #         result_cat_level = 100 if scene_types_in_cate >= current_category_target else scene_types_in_cate / float(
    #             current_category_target)
    #         display_count_category_level_identifier = self.common.get_dictionary(kpi_fk=display_count_category_level_fk,
    #                                                                              category=category)
    #         # self.common.write_to_db_result(fk=display_count_store_level_fk, numerator_id=self.pepsico_fk,
    #         #                                numerator_result=scene_types_in_cate,
    #         #                                denominator_id=category_fk, denominator_result=current_category_target,
    #         #                                identifier_result=display_count_category_level_identifier,
    #         #                                identifier_parent=display_count_category_level_fk,
    #         #                                result=result_cat_level, should_enter=True)
    #         self.common.write_to_db_result(fk=display_count_store_level_fk, numerator_id=self.pepsico_fk,
    #                                        numerator_result=scene_types_in_cate,
    #                                        denominator_id=category_fk, denominator_result=current_category_target,
    #                                        identifier_result=display_count_category_level_identifier,
    #                                        identifier_parent=identifier_parent_store_level,
    #                                        result=result_cat_level, should_enter=True)
    #
    #
    #     # Calculate count of display - scene_level
    #     display_count_scene_level_fk = self.common.get_kpi_fk_by_kpi_type(Const.DISPLAY_COUNT_SCENE_LEVEL)
    #     for scene_type in relevant_scenes_dict.keys():
    #         scene_type_target = scene_targets[scene_type]
    #         if not scene_type_target:
    #             continue
    #         actual_scene_name = relevant_scenes_dict[scene_type]
    #         relevant_category = self.get_category_from_template_name(actual_scene_name)
    #         relevant_category_fk = self.get_relevant_pk_by_name(Const.CATEGORY, relevant_category)
    #         scene_type_score = len(
    #             filtered_scif[filtered_scif[Const.TEMPLATE_NAME] == actual_scene_name][Const.SCENE_FK].unique())
    #
    #         result_scene_level = 100 if scene_type_score >= scene_type_target else scene_types_in_store / float(
    #             scene_type_target)
    #         scene_type_fk = self.all_templates.loc[self.all_templates[Const.TEMPLATE_NAME] == actual_scene_name][
    #             Const.TEMPLATE_FK].values[0]
    #         display_count_scene_level_identifier = self.common.get_dictionary(kpi_fk=display_count_category_level_fk,
    #                                                                           category=relevant_category)
    #         # parent_identifier = self.common.get_dictionary(kpi_fk=display_count_category_level_fk,
    #         #                                                category=relevant_category)
    #         # self.common.write_to_db_result(fk=display_count_scene_level_fk, numerator_id=self.pepsico_fk,
    #         #                                numerator_result=scene_types_in_store,
    #         #                                denominator_id=relevant_category_fk, denominator_result=scene_type_target,
    #         #                                identifier_result=display_count_scene_level_identifier,
    #         #                                identifier_parent=parent_identifier, context_id=scene_type_fk,
    #         #                                result=result_scene_level, should_enter=True)
    #
    #         self.common.write_to_db_result(fk=display_count_scene_level_fk, numerator_id=self.pepsico_fk,
    #                                        numerator_result=scene_types_in_store,
    #                                        denominator_id=relevant_category_fk, denominator_result=scene_type_target,
    #                                        identifier_result=display_count_scene_level_identifier,
    #                                        identifier_parent=identifier_parent_store_level, context_id=scene_type_fk,
    #                                        result=result_scene_level, should_enter=True)

    def calculate_assortment(self):
        lvl3_result = self.assortment.calculate_lvl3_assortment()
        # lvl3_result = self.get_lvl3_assortment_result_main_shelf()
        self.category_assortment_calculation(lvl3_result)
        self.store_assortment_calculation(lvl3_result)

    def get_lvl3_assortment_result_main_shelf(self):
        assortment_result = self.assortment.get_lvl3_relevant_ass()
        if not self.main_shelves and not assortment_result.empty:
            assortment_result.drop(assortment_result.index[0:], inplace=True)
        if assortment_result.empty:
            return assortment_result
        filters = {Const.TEMPLATE_NAME: self.main_shelves}
        filtered_scif = self.scif[self.toolbox.get_filter_condition(
            self.scif, **filters)]
        products_in_session = filtered_scif.loc[
            filtered_scif['facings'] > 0]['product_fk'].values
        assortment_result.loc[
            assortment_result['product_fk'].isin(products_in_session),
            'in_store'] = 1
        return assortment_result

    @log_runtime('Share of shelf pepsicoRU')
    def calculate_share_of_shelf(self):
        """
        The function filters only the relevant scene (type = Main Shelf in category) and calculates the linear SOS and
        the facing SOS for each level (Manufacturer, Category, Sub-Category, Brand).
        The identifier for every kpi will be the current kpi_fk and the relevant attribute according to the level
        E.g sub_category_fk for level 3 or brand_fk for level 4.
        :return:
        """
        # Get all of the KPI fk in advance
        facings_stores_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.FACINGS_MANUFACTURER_SOS)
        facings_cat_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.FACINGS_CATEGORY_SOS)
        facings_sub_cat_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.FACINGS_SUB_CATEGORY_SOS)
        facings_brand_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.FACINGS_BRAND_SOS)

        linear_store_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.LINEAR_MANUFACTURER_SOS)
        linear_cat_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.LINEAR_CATEGORY_SOS)
        linear_sub_cat_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.LINEAR_SUB_CATEGORY_SOS)
        linear_brand_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.LINEAR_BRAND_SOS)

        filter_man_param = {Const.MANUFACTURER_NAME: Const.PEPSICO}
        general_filters = {Const.TEMPLATE_NAME: self.main_shelves}
        facings_level_1_identifier = self.common.get_dictionary(
            kpi_fk=facings_stores_kpi_fk)
        linear_level_1_identifier = self.common.get_dictionary(
            kpi_fk=linear_store_kpi_fk)
        num_facings = denom_facings = num_linear = denom_linear = result_facings = result_linear = 0

        if self.main_shelves:
            num_facings, denom_facings, num_linear, denom_linear = self.calculate_sos(
                sos_filters=filter_man_param, **general_filters)
            result_facings = num_facings / float(
                denom_facings) if denom_facings else 0
            result_linear = num_linear / float(
                denom_linear) if denom_linear else 0

        # Facings level 1
        self.common.write_to_db_result(
            fk=facings_stores_kpi_fk,
            numerator_id=self.pepsico_fk,
            identifier_result=facings_level_1_identifier,
            numerator_result=num_facings,
            denominator_id=self.store_id,
            denominator_result=denom_facings,
            result=result_facings,
            should_enter=True)
        # Linear level 1
        self.common.write_to_db_result(
            fk=linear_store_kpi_fk,
            numerator_id=self.pepsico_fk,
            identifier_result=linear_level_1_identifier,
            numerator_result=num_linear * 100,
            denominator_id=self.store_id,
            denominator_result=denom_linear * 100,
            result=result_linear,
            should_enter=True)

        for category in self.categories_to_calculate:
            current_category_fk = self.get_relevant_pk_by_name(
                Const.CATEGORY, category)
            main_shelves_for_category = self.get_main_shelf_by_category(
                category)
            if main_shelves_for_category:
                filter_params = {
                    Const.CATEGORY: category,
                    Const.TEMPLATE_NAME: main_shelves_for_category
                }
                facings_cat_identifier = self.common.get_dictionary(
                    kpi_fk=facings_cat_kpi_fk, category_fk=current_category_fk)
                linear_cat_identifier = self.common.get_dictionary(
                    kpi_fk=linear_cat_kpi_fk, category_fk=current_category_fk)
                num_facings, denom_facings, num_linear, denom_linear = self.calculate_sos(
                    sos_filters=filter_man_param, **filter_params)

                result_facings = num_facings / float(
                    denom_facings) if denom_facings else 0
                result_linear = num_linear / float(
                    denom_linear) if denom_linear else 0

                # Facings level 2
                self.common.write_to_db_result(
                    fk=facings_cat_kpi_fk,
                    numerator_id=self.pepsico_fk,
                    numerator_result=num_facings,
                    denominator_id=current_category_fk,
                    denominator_result=denom_facings,
                    identifier_result=facings_cat_identifier,
                    identifier_parent=facings_level_1_identifier,
                    result=result_facings,
                    should_enter=True)
                # Linear level 2
                self.common.write_to_db_result(
                    fk=linear_cat_kpi_fk,
                    numerator_id=self.pepsico_fk,
                    numerator_result=num_linear * 100,
                    denominator_id=current_category_fk,
                    denominator_result=denom_linear * 100,
                    identifier_result=linear_cat_identifier,
                    identifier_parent=linear_level_1_identifier,
                    result=result_linear,
                    should_enter=True)

                for sub_cat in self.get_relevant_sub_categories_for_category(
                        category):
                    current_sub_category_fk = self.get_relevant_pk_by_name(
                        Const.SUB_CATEGORY, sub_cat)
                    filter_sub_cat_param = {
                        Const.SUB_CATEGORY: sub_cat,
                        Const.CATEGORY: category,
                        Const.TEMPLATE_NAME: main_shelves_for_category
                    }
                    facings_sub_cat_identifier = self.common.get_dictionary(
                        kpi_fk=facings_sub_cat_kpi_fk,
                        sub_category_fk=current_sub_category_fk)
                    linear_sub_cat_identifier = self.common.get_dictionary(
                        kpi_fk=linear_sub_cat_kpi_fk,
                        sub_category_fk=current_sub_category_fk)
                    num_facings, denom_facings, num_linear, denom_linear = self.calculate_sos(
                        sos_filters=filter_man_param, **filter_sub_cat_param)

                    if denom_facings and denom_linear:
                        # Facings level 3
                        self.common.write_to_db_result(
                            fk=facings_sub_cat_kpi_fk,
                            numerator_id=self.pepsico_fk,
                            numerator_result=num_facings,
                            denominator_id=current_sub_category_fk,
                            denominator_result=denom_facings,
                            identifier_result=facings_sub_cat_identifier,
                            identifier_parent=facings_cat_identifier,
                            result=num_facings / float(denom_facings),
                            should_enter=True)
                        # Linear level 3
                        self.common.write_to_db_result(
                            fk=linear_sub_cat_kpi_fk,
                            numerator_id=self.pepsico_fk,
                            numerator_result=num_linear * 100,
                            denominator_id=current_sub_category_fk,
                            denominator_result=denom_linear * 100,
                            identifier_result=linear_sub_cat_identifier,
                            identifier_parent=linear_cat_identifier,
                            result=num_linear / float(denom_linear),
                            should_enter=True)

                        for brand_name in self.get_relevant_brands_for_sub_category(
                                sub_cat):
                            current_brand_fk = self.get_relevant_pk_by_name(
                                Const.BRAND, brand_name)
                            filter_sos_brand = {
                                Const.BRAND_NAME: brand_name,
                                Const.SUB_CATEGORY: sub_cat,
                                Const.MANUFACTURER_NAME: Const.PEPSICO
                            }
                            filter_general_brand_param = {
                                Const.SUB_CATEGORY: sub_cat,
                                Const.CATEGORY: category,
                                Const.TEMPLATE_NAME: main_shelves_for_category
                            }
                            facings_brand_identifier = self.common.get_dictionary(
                                kpi_fk=facings_brand_kpi_fk,
                                brand_fk=current_brand_fk)
                            linear_brand_identifier = self.common.get_dictionary(
                                kpi_fk=linear_brand_kpi_fk,
                                brand_fk=current_brand_fk)
                            num_facings, denom_facings, num_linear, denom_linear = self.calculate_sos(
                                sos_filters=filter_sos_brand,
                                **filter_general_brand_param)

                            if denom_facings and denom_linear:
                                # Facings level 4
                                self.common.write_to_db_result(
                                    fk=facings_brand_kpi_fk,
                                    numerator_id=current_brand_fk,
                                    numerator_result=num_facings,
                                    denominator_id=current_sub_category_fk,
                                    denominator_result=denom_facings,
                                    identifier_result=facings_brand_identifier,
                                    identifier_parent=
                                    facings_sub_cat_identifier,
                                    result=num_facings / float(denom_facings),
                                    should_enter=True)
                                # Linear level 4
                                self.common.write_to_db_result(
                                    fk=linear_brand_kpi_fk,
                                    numerator_id=current_brand_fk,
                                    numerator_result=num_linear * 100,
                                    denominator_id=current_sub_category_fk,
                                    denominator_result=denom_linear * 100,
                                    identifier_result=linear_brand_identifier,
                                    identifier_parent=linear_sub_cat_identifier,
                                    result=num_linear / float(denom_linear),
                                    should_enter=True)

    # Utils functions with a slight change from the SDK factory:
    def calculate_sos(self,
                      sos_filters,
                      include_empty=Const.EXCLUDE_EMPTY,
                      **general_filters):
        """
        :param sos_filters: These are the parameters on which ths SOS is calculated (out of the general DF).
        :param include_empty: This dictates whether Empty-typed SKUs are included in the calculation.
        :param general_filters: These are the parameters which the general data frame is filtered by.
        :return: The numerator facings, denominator facings, numerator linear and denominator linear.
        """
        if include_empty == Const.EXCLUDE_EMPTY:
            general_filters[Const.PRODUCT_TYPE] = (Const.EMPTY,
                                                   Const.EXCLUDE_FILTER)
        numerator_facings, numerator_linear = self.calculate_share_space(
            **dict(sos_filters, **general_filters))
        denominator_facings, denominator_linear = self.calculate_share_space(
            **general_filters)
        return numerator_facings, denominator_facings, numerator_linear / 1000.0, denominator_linear / 1000.0

    def calculate_share_space(self, **filters):
        """
        :param filters: These are the parameters which the data frame is filtered by.
        :return: The total number of facings and the shelf width (in mm) according to the filters.
        """
        filtered_scif = self.scif[self.toolbox.get_filter_condition(
            self.scif, **filters)]
        sum_of_facings = filtered_scif['facings'].sum()
        space_length = filtered_scif['net_len_ign_stack'].sum()
        return sum_of_facings, space_length

    def category_assortment_calculation(self, lvl3_result):
        """
        This function calculates 3 levels of assortment :
        level3 is assortment SKU
        level2 is assortment groups
        """
        osa_product_level_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OSA_SKU_LEVEL)
        oos_product_level_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OOS_SKU_LEVEL)
        osa_category_level_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OSA_CATEGORY_LEVEL)
        oos_category_level_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OOS_CATEGORY_LEVEL)

        if not lvl3_result.empty:
            cat_df = self.all_products[['product_fk', 'category_fk']]
            lvl3_with_cat = lvl3_result.merge(cat_df,
                                              on='product_fk',
                                              how='left')
            lvl3_with_cat = lvl3_with_cat[
                lvl3_with_cat['category_fk'].notnull()]

            for result in lvl3_with_cat.itertuples():
                if result.in_store == 1:
                    score = Const.DISTRIBUTION
                else:
                    score = Const.OOS
                # Distribution
                self.common_v1.write_to_db_result_new_tables(
                    fk=osa_product_level_fk,
                    numerator_id=result.product_fk,
                    numerator_result=score,
                    result=score,
                    denominator_id=result.category_fk,
                    denominator_result=1,
                    score=score,
                    score_after_actions=score)
                if score == Const.OOS:
                    # OOS
                    self.common_v1.write_to_db_result_new_tables(
                        oos_product_level_fk,
                        numerator_id=result.product_fk,
                        numerator_result=score,
                        result=score,
                        denominator_id=result.category_fk,
                        denominator_result=1,
                        score=score,
                        score_after_actions=score)
            category_fk_list = lvl3_with_cat['category_fk'].unique()
            for cat in category_fk_list:
                lvl3_result_cat = lvl3_with_cat[lvl3_with_cat["category_fk"] ==
                                                cat]
                lvl2_result = self.assortment.calculate_lvl2_assortment(
                    lvl3_result_cat)
                for result in lvl2_result.itertuples():
                    denominator_res = result.total
                    res = np.divide(float(result.passes),
                                    float(denominator_res))
                    # Distribution
                    self.common_v1.write_to_db_result_new_tables(
                        fk=osa_category_level_kpi_fk,
                        numerator_id=self.pepsico_fk,
                        numerator_result=result.passes,
                        denominator_id=cat,
                        denominator_result=denominator_res,
                        result=res,
                        score=res,
                        score_after_actions=res)

                    # OOS
                    self.common_v1.write_to_db_result_new_tables(
                        fk=oos_category_level_kpi_fk,
                        numerator_id=self.pepsico_fk,
                        numerator_result=denominator_res - result.passes,
                        denominator_id=cat,
                        denominator_result=denominator_res,
                        result=1 - res,
                        score=(1 - res),
                        score_after_actions=1 - res)
                self.assortment.LVL2_HEADERS.extend(['passes', 'total'])
        return

    def store_assortment_calculation(self, lvl3_result):
        """
        This function calculates the KPI results.
        """
        dist_store_level_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OSA_STORE_LEVEL)
        oos_store_level_kpi_fk = self.common.get_kpi_fk_by_kpi_type(
            Const.OOS_STORE_LEVEL)
        # for result in lvl3_result.itertuples():
        #     if result.in_store == 1:
        #         score = Const.DISTRIBUTION
        #     else:
        #         score = Const.OOS
        #     # Distribution
        #     self.common_v1.write_to_db_result_new_tables(fk=?????, numerator_id=result.product_fk,
        #                                                 numerator_result=score,
        #                                                 result=score, denominator_id=self.store_id,
        #                                                 denominator_result=1, score=score)
        #     if score == Const.OOS:
        #         # OOS
        #         self.common_v1.write_to_db_result_new_tables(fk=?????, numerator_id=result.product_fk,
        #                                                     numerator_result=score,
        #                                                     result=score, denominator_id=self.store_id,
        #                                                     denominator_result=1, score=score,
        #                                                     score_after_actions=score)

        if not lvl3_result.empty:
            lvl2_result = self.assortment.calculate_lvl2_assortment(
                lvl3_result)
            for result in lvl2_result.itertuples():
                denominator_res = result.total
                if not pd.isnull(result.target) and not pd.isnull(
                        result.group_target_date
                ) and result.group_target_date <= self.visit_date:
                    denominator_res = result.target
                res = np.divide(float(result.passes), float(denominator_res))
                # Distribution
                self.common_v1.write_to_db_result_new_tables(
                    fk=dist_store_level_kpi_fk,
                    numerator_id=self.pepsico_fk,
                    denominator_id=self.store_id,
                    numerator_result=result.passes,
                    denominator_result=denominator_res,
                    result=res,
                    score=res,
                    score_after_actions=res)

                # OOS
                self.common_v1.write_to_db_result_new_tables(
                    fk=oos_store_level_kpi_fk,
                    numerator_id=self.pepsico_fk,
                    numerator_result=denominator_res - result.passes,
                    denominator_id=self.store_id,
                    denominator_result=denominator_res,
                    result=1 - res,
                    score=1 - res,
                    score_after_actions=1 - res)
        return
Ejemplo n.º 27
0
class PERFETTICNToolBox:
    LEVEL1 = 1
    LEVEL2 = 2
    LEVEL3 = 3

    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.kpi_static_data = self.common.get_new_kpi_static_data()
        self.kpi_results_queries = []
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.assortment = Assortment(self.data_provider, self.output)
        self.template = self.data_provider.all_templates

    def main_calculation(self, *args, **kwargs):

        self.display_count()
        self.assortment_calculation()
        self.common.commit_results_data_to_new_tables()

    def display_count(self):
        """This function calculates how many displays find from types secondary shelf
        """
        num_brands = {}
        display_info = self.scif['template_fk']
        display_fks = display_info.unique()
        template_seco = self.template[
            self.template['included_in_secondary_shelf_report'] ==
            'Y']['template_fk']
        display_fks = list(
            filter(lambda x: x in template_seco.values, display_fks))
        count_fk = self.kpi_static_data[self.kpi_static_data['client_name'] ==
                                        'COUNT OF DISPLAY']['pk'].iloc[0]
        for value in display_fks:
            num_brands[value] = display_info[display_info == value].count()
            score = num_brands[value]
            self.common.write_to_db_result_new_tables(count_fk, value, None,
                                                      score, score, score,
                                                      score)

        return

    def assortment_calculation(self):
        """
        This function calculates 3 levels of assortment :
        level3 is assortment SKU
        level2 is assortment groups
        level1 how many groups passed out of all
        """
        lvl3_result = self.assortment.calculate_lvl3_assortment()

        for result in lvl3_result.itertuples():
            score = result.in_store
            if score >= 1:
                score = 100
            self.common.write_to_db_result_new_tables(
                result.kpi_fk_lvl3, result.product_fk, result.in_store, score,
                result.assortment_group_fk, 1, score)
        if not lvl3_result.empty:
            lvl2_result = self.assortment.calculate_lvl2_assortment(
                lvl3_result)
            for result in lvl2_result.itertuples():
                denominator_res = result.total
                res = np.divide(float(result.passes), float(denominator_res))
                if result.passes >= 1:
                    score = 100
                else:
                    score = 0
                self.common.write_to_db_result_new_tables(
                    result.kpi_fk_lvl2, result.assortment_group_fk,
                    result.passes, (res * 100),
                    result.assortment_super_group_fk, denominator_res, score)

            if not lvl2_result.empty:
                lvl1_result = self.assortment.calculate_lvl1_assortment(
                    lvl2_result)
                for result in lvl1_result.itertuples():
                    denominator_res = result.total
                    res = np.divide(float(result.passes),
                                    float(denominator_res))
                    if res >= 0:
                        score = 100
                    else:
                        score = 0
                    self.common.write_to_db_result_new_tables(
                        fk=result.kpi_fk_lvl1,
                        numerator_id=result.assortment_super_group_fk,
                        numerator_result=result.passes,
                        denominator_result=denominator_res,
                        result=(res * 100),
                        score=score)
        return
Ejemplo n.º 28
0
class MOLSONCOORSHR_SANDToolBox:

    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.current_date = datetime.now()
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_info = self.data_provider[Data.STORE_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.own_manufacturer_id = int(self.data_provider[Data.OWN_MANUFACTURER][self.data_provider[Data.OWN_MANUFACTURER]['param_name'] == 'manufacturer_id']['param_value'].tolist()[0])
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        self.toolbox = GENERALToolBox(data_provider)
        self.assortment = Assortment(self.data_provider, self.output, common=self.common)
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []

        self.template_path = self.get_template_path()
        self.template_data = self.get_template_data()
        self.sos_store_policies = self.get_sos_store_policies(self.visit_date.strftime('%Y-%m-%d'))
        self.result_values = self.get_result_values()

        self.scores = pd.DataFrame()

    def get_sos_store_policies(self, visit_date):
        query = MOLSONCOORSHR_SANDQueries.get_sos_store_policies(visit_date)
        store_policies = pd.read_sql_query(query, self.rds_conn.db)
        return store_policies

    def get_result_values(self):
        query = MOLSONCOORSHR_SANDQueries.get_result_values()
        result_values = pd.read_sql_query(query, self.rds_conn.db)
        return result_values

    def main_calculation(self):
        """
        This function starts the KPI results calculation.
        """
        if not self.template_data or KPIS_TEMPLATE_SHEET not in self.template_data:
            Log.error('KPIs template sheet is empty or not found')
            return

        self.kpis_calculation()
        self.common.commit_results_data()

    def kpis_calculation(self, kpi_group=''):
        """
        This is a recursive function.
        The function calculates each level KPI cascading from the highest level to the lowest.
        """
        total_score = total_potential_score = total_calculated = 0
        kpis = self.template_data['KPIs'][self.template_data['KPIs']['KPI Group'] == kpi_group]
        for index, kpi in kpis.iterrows():

            child_kpi_group = kpi['Child KPI Group']
            kpi_type = kpi['KPI Type'].lower()
            score_function = kpi['Score Function'].lower()

            if not child_kpi_group:
                if kpi_type in [LINEAR_SOS_VS_TARGET, FACINGS_SOS_VS_TARGET]:
                    score, potential_score, calculated = self.calculate_sos_vs_target(kpi)
                elif kpi_type in [FACINGS_VS_TARGET, DISTRIBUTION]:
                    score, potential_score, calculated = self.calculate_assortment_vs_target(kpi)
                else:
                    Log.error("KPI of type '{}' is not supported".format(kpi_type))
                    score = potential_score = calculated = 0
            else:
                score, potential_score, calculated = self.kpis_calculation(child_kpi_group)

            if score_function in [WEIGHTED_SCORE, SUM_OF_SCORES]:
                total_score += score
                total_potential_score += potential_score
                total_calculated += calculated
            else:
                total_score += 0
                total_potential_score += 0
                total_calculated += 0

            if child_kpi_group and calculated:
                if kpi['KPI name Eng'] == 'Store Score':
                    kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi['KPI name Eng'])
                    parent_fk = self.common.get_kpi_fk_by_kpi_type(kpi['KPI Group']) if kpi['KPI Group'] else 0
                    numerator_id = self.own_manufacturer_id
                    denominator_id = self.store_id
                    identifier_result = self.common.get_dictionary(kpi_fk=kpi_fk)
                    identifier_parent = self.common.get_dictionary(kpi_fk=parent_fk)
                    self.common.write_to_db_result(fk=kpi_fk,
                                                   numerator_id=numerator_id,
                                                   numerator_result=0,
                                                   denominator_id=denominator_id,
                                                   denominator_result=0,
                                                   result=score,
                                                   score=score,
                                                   weight=potential_score,
                                                   target=potential_score,
                                                   identifier_result=identifier_result,
                                                   identifier_parent=identifier_parent,
                                                   should_enter=True
                                                   )
                else:
                    kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi['KPI name Eng'])
                    parent_fk = self.common.get_kpi_fk_by_kpi_type(kpi['KPI Group']) if kpi['KPI Group'] else 0
                    numerator_id = self.own_manufacturer_id
                    denominator_id = self.store_id
                    identifier_result = self.common.get_dictionary(kpi_fk=kpi_fk)
                    identifier_parent = self.common.get_dictionary(kpi_fk=parent_fk)
                    self.common.write_to_db_result(fk=kpi_fk,
                                                   numerator_id=numerator_id,
                                                   numerator_result=0,
                                                   denominator_id=denominator_id,
                                                   denominator_result=0,
                                                   result=score,
                                                   score=score,
                                                   weight=potential_score,
                                                   target=potential_score,
                                                   identifier_result=identifier_result,
                                                   identifier_parent=identifier_parent,
                                                   should_enter=True
                                                   )

        return total_score, total_potential_score, total_calculated

    @kpi_runtime()
    def calculate_assortment_vs_target(self, kpi):
        """
        The function filters only the relevant scenes by Location Type and calculates the Assortment scores
        according to rules set in the target.
        :return:
        """
        lvl3_result = self.calculate_assortment_vs_target_lvl3(kpi)
        for row in lvl3_result.itertuples():
            numerator_id = row.product_fk
            numerator_result = row.distributed if kpi['KPI Type'] == 'Distribution' else row.facings
            denominator_id = self.store_id
            denominator_result = row.target
            # denominator_result_after_actions = 0 if row.target < row.facings else row.target - row.facings
            if kpi['KPI Type'] == 'Distribution':
                if row.result_distributed:
                    result = self.result_values[(self.result_values['result_type'] == 'PRESENCE') &
                                                (self.result_values['result_value'] == 'DISTRIBUTED')]['result_value_fk'].tolist()[0]
                    score = 100
                else:
                    result = self.result_values[(self.result_values['result_type'] == 'PRESENCE') &
                                                (self.result_values['result_value'] == 'OOS')]['result_value_fk'].tolist()[0]
                    score = 0
            else:
                result = row.result_facings
                score = round(result*100, 0)
            identifier_details = self.common.get_dictionary(kpi_fk=row.kpi_fk_lvl3)
            identifier_kpi = self.common.get_dictionary(kpi_fk=row.kpi_fk_lvl2)
            self.common.write_to_db_result(fk=row.kpi_fk_lvl3,
                                           numerator_id=numerator_id,
                                           numerator_result=numerator_result,
                                           denominator_id=denominator_id,
                                           denominator_result=denominator_result,
                                           # denominator_result_after_actions=denominator_result_after_actions,
                                           result=result,
                                           score=score,
                                           identifier_result=identifier_details,
                                           identifier_parent=identifier_kpi,
                                           should_enter=True
                                           )

        score = potential_score = 0
        if not lvl3_result.empty:
            lvl2_result = self.calculate_assortment_vs_target_lvl2(lvl3_result)
            for row in lvl2_result.itertuples():
                numerator_id = self.own_manufacturer_id
                numerator_result = row.distributed if kpi['KPI Type'] == 'Distribution' else row.facings
                denominator_id = self.store_id
                denominator_result = row.target
                result = row.result_distributed if kpi['KPI Type'] == 'Distribution' else row.result_facings
                score += self.score_function(result*100, kpi)
                potential_score += round(float(kpi['Weight'])*100, 0)
                identifier_kpi = self.common.get_dictionary(kpi_fk=row.kpi_fk_lvl2)
                identifier_parent = self.common.get_dictionary(kpi_fk=self.common.get_kpi_fk_by_kpi_type(kpi['KPI Group']))
                self.common.write_to_db_result(fk=row.kpi_fk_lvl2,
                                               numerator_id=numerator_id,
                                               numerator_result=numerator_result,
                                               denominator_id=denominator_id,
                                               denominator_result=denominator_result,
                                               result=score,
                                               score=score,
                                               weight=potential_score,
                                               target=potential_score,
                                               identifier_result=identifier_kpi,
                                               identifier_parent=identifier_parent,
                                               should_enter=True
                                               )
        if len(lvl3_result) > 0:
            calculated = 1
        else:
            calculated = 0

        return score, potential_score, calculated

    def calculate_assortment_vs_target_lvl3(self, kpi):
        location_types = kpi['Location Type'].split(', ')
        kpi_fk_lvl3 = self.common.get_kpi_fk_by_kpi_type(kpi['KPI name Eng'] + ' - SKU')
        kpi_fk_lvl2 = self.common.get_kpi_fk_by_kpi_type(kpi['KPI name Eng'])

        assortment_result = self.assortment.get_lvl3_relevant_ass()
        if assortment_result.empty:
            return assortment_result
        assortment_result = assortment_result[(assortment_result['kpi_fk_lvl3'] == kpi_fk_lvl3) &
                                              (assortment_result['kpi_fk_lvl2'] == kpi_fk_lvl2)]
        if assortment_result.empty:
            return assortment_result

        assortment_result['target'] = assortment_result.apply(lambda x: json.loads(x['additional_attributes']).get('Target'), axis=1)
        assortment_result['target'] = assortment_result['target'].fillna(0)
        assortment_result = assortment_result[assortment_result['target'] > 0]

        assortment_result['weight'] = assortment_result.apply(lambda x: json.loads(x['additional_attributes']).get('Weight'), axis=1)
        assortment_result['weight'] = assortment_result['weight'].fillna(0)
        assortment_total_weights = assortment_result[['assortment_fk', 'weight']].groupby('assortment_fk').agg({'weight': 'sum'}).reset_index()
        assortment_result = assortment_result.merge(assortment_total_weights, how='left', left_on='assortment_fk', right_on='assortment_fk', suffixes=['', '_total'])

        facings = 'facings_ign_stack' if kpi['Ignore Stacking'] else 'facings'

        products_in_session = self.scif[(self.scif[facings] > 0) & (self.scif['location_type'].isin(location_types))][['product_fk', 'facings']]\
            .groupby('product_fk').agg({'facings': 'sum'}).reset_index()
        lvl3_result = assortment_result.merge(products_in_session, how='left', left_on='product_fk', right_on='product_fk')
        lvl3_result['facings'] = lvl3_result['facings'].fillna(0)
        lvl3_result['distributed'] = lvl3_result.apply(lambda x: 1 if x['facings'] else 0, axis=1)

        lvl3_result['result_facings'] = lvl3_result.apply(lambda x: self.assortment_vs_target_result(x, 'facings'), axis=1)
        lvl3_result['result_distributed'] = lvl3_result.apply(lambda x: self.assortment_vs_target_result(x, 'distributed'), axis=1)

        return lvl3_result

    def calculate_assortment_vs_target_lvl2(self, lvl3_result):
        lvl2_result = lvl3_result.groupby(['kpi_fk_lvl2', self.assortment.ASSORTMENT_FK, self.assortment.ASSORTMENT_GROUP_FK])\
            .agg({'facings': 'sum', 'distributed': 'sum', 'target': 'sum', 'result_facings': 'sum', 'result_distributed': 'sum'}).reset_index()
        return lvl2_result

    @staticmethod
    def assortment_vs_target_result(x, y):
        if x[y] < x['target']:
            return round(x[y] / float(x['target']) * float(x['weight']) / x['weight_total'], 5)
        else:
            return round(1 * float(x['weight']) / x['weight_total'], 5)

    @kpi_runtime()
    def calculate_sos_vs_target(self, kpi):
        """
        The function filters only the relevant scenes by Location Type and calculates the linear SOS and
        the facing SOS according to Manufacturer and Category set in the target.
         :return:
        """
        location_type = kpi['Location Type']
        kpi_fk = self.common.get_kpi_fk_by_kpi_type(SOS_MANUFACTURER_CATEGORY + ('_' + location_type if location_type else ''))

        sos_store_policies = self.sos_store_policies[self.sos_store_policies['kpi_fk'] == str(kpi_fk)]

        sos_store_policy = None
        store_policy_passed = 0
        for index, policy in sos_store_policies.iterrows():
            sos_store_policy = policy
            store_policy = json.loads(policy['store_policy'])
            store_policy_passed = 1
            for key in store_policy.keys():
                if key in self.store_info.columns.tolist():
                    if self.store_info[key][0] in store_policy[key]:
                        continue
                    else:
                        store_policy_passed = 0
                        break
                else:
                    Log.error("Store Policy attribute is not found: '{}'").format(key)
                    store_policy_passed = 0
                    break
            if store_policy_passed:
                break

        score = potential_score = 0
        if store_policy_passed:

            general_filters = {LOCATION_TYPE: location_type}
            sos_policy = json.loads(sos_store_policy['sos_policy'])
            numerator_sos_filters = {MANUFACTURER_NAME: sos_policy[NUMERATOR][MANUFACTURER], CATEGORY: sos_policy[DENOMINATOR][CATEGORY]}
            denominator_sos_filters = {CATEGORY: sos_policy[DENOMINATOR][CATEGORY]}

            numerator_id = self.all_products.loc[self.all_products[MANUFACTURER_NAME] == sos_policy[NUMERATOR][MANUFACTURER]][MANUFACTURER + '_fk'].values[0]
            denominator_id = self.all_products.loc[self.all_products[CATEGORY] == sos_policy[DENOMINATOR][CATEGORY]][CATEGORY + '_fk'].values[0]

            ignore_stacking = kpi['Ignore Stacking'] if kpi['Ignore Stacking'] else 0

            numer_facings, numer_linear = self.calculate_share_space(ignore_stacking=ignore_stacking, **dict(numerator_sos_filters, **general_filters))
            denom_facings, denom_linear = self.calculate_share_space(ignore_stacking=ignore_stacking, **dict(denominator_sos_filters, **general_filters))

            if kpi['KPI Type'].lower() == LINEAR_SOS_VS_TARGET:
                numerator_result = round(numer_linear, 0)
                denominator_result = round(denom_linear, 0)
                result = numer_linear / float(denom_linear) if denom_linear else 0
            elif kpi['KPI Type'].lower() == FACINGS_SOS_VS_TARGET:
                numerator_result = numer_facings
                denominator_result = denom_facings
                result = numer_facings / float(denom_facings) if denom_facings else 0
            else:
                Log.error("KPI Type is invalid: '{}'").format(kpi['KPI Type'])
                numerator_result = denominator_result = result = 0

            if sos_store_policy['target']:
                sos_target = round(float(sos_store_policy['target'])*100, 0)
            else:
                Log.error("SOS target is not set for Store ID {}").format(self.store_id)
                sos_target = 0

            result_vs_target = result/(float(sos_target)/100)*100 if sos_target else 0
            score = self.score_function(result_vs_target, kpi)
            potential_score = round(float(kpi['Weight'])*100, 0)

            identifier_kpi = self.common.get_dictionary(kpi_fk=kpi_fk)
            identifier_parent = self.common.get_dictionary(kpi_fk=self.common.get_kpi_fk_by_kpi_type(kpi['KPI Group']))
            self.common.write_to_db_result(fk=kpi_fk,
                                           numerator_id=numerator_id,
                                           numerator_result=numerator_result,
                                           denominator_id=denominator_id,
                                           denominator_result=denominator_result,
                                           result=result,
                                           score=score,
                                           weight=potential_score,
                                           target=sos_target,
                                           identifier_result=identifier_kpi,
                                           identifier_parent=identifier_parent,
                                           should_enter=True
                                           )

        else:
            Log.warning("Store Policy is not found for Store ID {}".format(self.store_id))

        return score, potential_score, store_policy_passed

    def calculate_share_space(self, ignore_stacking=1, **filters):
        """
        :param filters: These are the parameters which the data frame is filtered by.
        :param ignore_stacking: 1 is to ignore stacking.
        :return: The total number of facings and the shelf width (in mm) according to the filters.
        """
        filtered_scif = self.scif[self.toolbox.get_filter_condition(self.scif, **filters)]
        if ignore_stacking:
            sum_of_facings = filtered_scif['facings_ign_stack'].sum()
            space_length = filtered_scif['net_len_ign_stack'].sum()
        else:
            sum_of_facings = filtered_scif['facings'].sum()
            space_length = filtered_scif['net_len_ign_stack'].sum()

        return sum_of_facings, space_length

    @staticmethod
    def get_template_path():
        return os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'Data', KPIS_TEMPLATE_NAME)

    def get_template_data(self):
        template_data = {}
        try:
            sheet_names = pd.ExcelFile(self.template_path).sheet_names
            for sheet in sheet_names:
                template_data[sheet] = parse_template(self.template_path, sheet, lower_headers_row_index=0)
        except IOError as e:
            Log.error('Template {} does not exist. {}'.format(KPIS_TEMPLATE_NAME, repr(e)))
        return template_data

    @staticmethod
    def score_function(score, kpi):
        weight = float(kpi['Weight']) if kpi['Weight'] else 1
        score_function = kpi['Score Function'].lower()
        l_threshold = float(kpi['Lower Threshold'])*100 if kpi['Lower Threshold'] else 0
        h_threshold = float(kpi['Higher Threshold'])*100 if kpi['Higher Threshold'] else 100

        if score < l_threshold:
            score = 0
        elif score >= h_threshold:
            score = 100

        if score_function in [WEIGHTED_SCORE]:
            score = round(score*weight, 0)
        else:
            score = round(score, 0)

        return score
Ejemplo n.º 29
0
class PepsicoUtil(UnifiedKPISingleton):

    LEVEL1 = 1
    LEVEL2 = 2
    LEVEL3 = 3

    EXCLUSION_TEMPLATE_PATH = os.path.join(
        os.path.dirname(os.path.realpath(__file__)), '..', 'Data',
        'Inclusion_Exclusion_Template_Rollout.xlsx')
    ADDITIONAL_DISPLAY = 'additional display'
    INCLUDE_EMPTY = True
    EXCLUDE_EMPTY = False
    OPERATION_TYPES = []

    SOS_VS_TARGET = 'SOS vs Target'
    HERO_SKU_SPACE_TO_SALES_INDEX = 'Hero SKU Space to Sales Index'
    HERO_SKU_SOS_VS_TARGET = 'Hero SKU SOS vs Target'
    LINEAR_SOS_INDEX = 'Linear SOS Index'
    PEPSICO = 'PEPSICO'
    SHELF_PLACEMENT = 'Shelf Placement'
    HERO_SKU_PLACEMENT_TOP = 'Hero SKU Placement by shelf numbers_Top'
    HERO_PLACEMENT = 'Hero Placement'
    HERO_SKU_STACKING = 'Hero SKU Stacking'
    HERO_SKU_PRICE = 'Hero SKU Price'
    HERO_SKU_PROMO_PRICE = 'Hero SKU Promo Price'
    BRAND_FULL_BAY_KPIS = ['Brand Full Bay_90', 'Brand Full Bay']
    BRAND_FULL_BAY = 'Brand Full Bay'
    HERO_PREFIX = 'Hero SKU'
    ALL = 'ALL'
    HERO_SKU_OOS_SKU = 'Hero SKU OOS - SKU'
    HERO_SKU_OOS = 'Hero SKU OOS'
    HERO_SKU_AVAILABILITY = 'Hero SKU Availability'
    BRAND_SPACE_TO_SALES_INDEX = 'Brand Space to Sales Index'
    BRAND_SPACE_SOS_VS_TARGET = 'Brand Space SOS vs Target'
    SUB_BRAND_SPACE_TO_SALES_INDEX = 'Sub Brand Space to Sales Index'
    SUB_BRAND_SPACE_SOS_VS_TARGET = 'Sub Brand Space SOS vs Target'
    PEPSICO_SEGMENT_SPACE_TO_SALES_INDEX = 'PepsiCo Segment Space to Sales Index'
    PEPSICO_SEGMENT_SOS_VS_TARGET = 'PepsiCo Segment SOS vs Target'
    PEPSICO_SUB_SEGMENT_SPACE_TO_SALES_INDEX = 'PepsiCo Sub Segment Space to Sales Index'
    PEPSICO_SUB_SEGMENT_SOS_VS_TARGET = 'PepsiCo Sub Segment SOS vs Target'

    PLACEMENT_BY_SHELF_NUMBERS_TOP = 'Placement by shelf numbers_Top'
    TOTAL_LINEAR_SPACE = 'Total Linear Space'
    NUMBER_OF_FACINGS = 'Number of Facings'
    NUMBER_OF_BAYS = 'Number of bays'
    NUMBER_OF_SHELVES = 'Number of shelves'
    PRODUCT_BLOCKING = 'Product Blocking'
    PRODUCT_BLOCKING_ADJACENCY = 'Product Blocking Adjacency'
    SHELF_PLACEMENT_VERTICAL_LEFT = 'Shelf Placement Vertical_Left'
    SHELF_PLACEMENT_VERTICAL_CENTER = 'Shelf Placement Vertical_Center'
    SHELF_PLACEMENT_VERTICAL_RIGHT = 'Shelf Placement Vertical_Right'
    NUMBER_OF_SHELVES_TEMPL_COLUMN = 'No of Shelves in Fixture (per bay) (key)'
    RELEVANT_SHELVES_TEMPL_COLUMN = 'Shelves From Bottom To Include (data)'
    SHELF_PLC_TARGETS_COLUMNS = [
        'kpi_operation_type_fk', 'operation_type', 'kpi_level_2_fk', 'type',
        NUMBER_OF_SHELVES_TEMPL_COLUMN, RELEVANT_SHELVES_TEMPL_COLUMN,
        'KPI Parent'
    ]
    SHELF_PLC_TARGET_COL_RENAME = {
        'kpi_operation_type_fk_x': 'kpi_operation_type_fk',
        'operation_type_x': 'operation_type',
        'kpi_level_2_fk_x': 'kpi_level_2_fk',
        'type_x': 'type',
        NUMBER_OF_SHELVES_TEMPL_COLUMN + '_x': NUMBER_OF_SHELVES_TEMPL_COLUMN,
        RELEVANT_SHELVES_TEMPL_COLUMN + '_x': RELEVANT_SHELVES_TEMPL_COLUMN,
        'KPI Parent_x': 'KPI Parent'
    }
    HERO_SKU_AVAILABILITY_SKU = 'Hero SKU Availability - SKU'
    HERO_SKU_PLACEMENT_BY_SHELF_NUMBERS = 'Hero SKU Placement by shelf numbers'

    HERO_SKU_AVAILABILITY_BY_HERO_TYPE = 'Hero SKU Availability by Hero Type'
    SHARE_OF_ASSORTMENT_BY_HERO_TYPE = 'Share of Assortment by Hero Type'
    HERO_SKU_LABEL = 'Hero SKU'
    HERO_TYPE = 'hero_type'
    HERO_SKU_SOS_OF_CAT_BY_HERO_TYPE = 'Hero SKU SOS of Category by Hero Type'
    CATEGORY_FULL_BAY = 'Category Full Bay'
    CSN = 'CSN'
    PRICE = 'Price'
    PROMO_PRICE = 'Promo Price'
    LINEAR_SPACE_PER_PRODUCT = 'Linear Space Per Product'
    FACINGS_PER_PRODUCT = 'Facings per Product'
    PRICE_SCENE = 'Price Scene'
    PROMO_PRICE_SCENE = 'Promo Price Scene'
    HERO_SKU_SOS = 'Hero SKU SOS'
    BRAND_SOS = 'Brand SOS'
    SUB_BRAND_SOS = 'Sub Brand SOS'
    PEPSICO_SEGMENT_SOS = 'PepsiCo Segment SOS'
    BRAND_SOS_OF_SEGMENT = 'Brand SOS of Segment'
    BINS_NOT_RECOGNIZED = 'Bins_not_recognized'

    def __init__(self, output, data_provider):
        super(PepsicoUtil, self).__init__(data_provider)
        self.output = output
        self.common = Common(self.data_provider)
        # self.common_v1 = CommonV1(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK] if self.data_provider[Data.STORE_FK] is not None \
                                                            else self.session_info['store_fk'].values[0]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.display_scene = self.get_match_display_in_scene()
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []

        self.probe_groups = self.get_probe_group()
        self.match_product_in_scene = self.match_product_in_scene.merge(
            self.probe_groups, on='probe_match_fk', how='left')

        self.toolbox = GENERALToolBox(self.data_provider)
        self.commontools = PEPSICOUKCommonToolBox(self.data_provider,
                                                  self.rds_conn)

        self.all_templates = self.commontools.all_templates
        self.custom_entities = self.commontools.custom_entities
        self.on_display_products = self.commontools.on_display_products
        self.exclusion_template = self.commontools.exclusion_template
        self.filtered_scif = self.commontools.filtered_scif.copy()
        self.filtered_matches = self.commontools.filtered_matches.copy()
        self.filtered_matches = self.filtered_matches.merge(
            self.probe_groups, on='probe_match_fk', how='left')

        self.filtered_scif_secondary = self.commontools.filtered_scif_secondary.copy(
        )
        self.filtered_matches_secondary = self.commontools.filtered_matches_secondary.copy(
        )

        self.scene_bay_shelf_product = self.commontools.scene_bay_shelf_product
        self.ps_data = PsDataProvider(self.data_provider, self.output)
        self.full_store_info = self.commontools.full_store_info.copy()
        self.external_targets = self.commontools.external_targets
        self.assortment = Assortment(self.commontools.data_provider,
                                     self.output)
        self.lvl3_ass_result = self.get_lvl3_relevant_assortment_result()
        self.own_manuf_fk = self.all_products[
            self.all_products['manufacturer_name'] ==
            self.PEPSICO]['manufacturer_fk'].values[0]

        self.scene_kpi_results = self.get_results_of_scene_level_kpis()
        self.kpi_results_check = pd.DataFrame(columns=[
            'kpi_fk', 'numerator', 'denominator', 'result', 'score', 'context'
        ])
        self.sos_vs_target_targets = self.construct_sos_vs_target_base_df()

        self.all_targets_unpacked = self.commontools.all_targets_unpacked.copy(
        )
        self.block_results = pd.DataFrame(columns=['Group Name', 'Score'])
        self.hero_type_custom_entity_df = self.get_hero_type_custom_entity_df()

    def get_match_display_in_scene(self):
        query = PEPSICOUK_Queries.get_match_display(self.session_uid)
        match_display = pd.read_sql_query(query, self.rds_conn.db)
        return match_display

    def get_probe_group(self):
        query = PEPSICOUK_Queries.get_probe_group(self.session_uid)
        probe_group = pd.read_sql_query(query, self.rds_conn.db)
        return probe_group

    @staticmethod
    def get_full_bay_and_positional_filters(parameters):
        filters = {parameters['Parameter 1']: parameters['Value 1']}
        if parameters['Parameter 2']:
            filters.update({parameters['Parameter 2']: parameters['Value 2']})
        if parameters['Parameter 3']:
            filters.update({parameters['Parameter 3']: parameters['Value 3']})
        return filters

    # @staticmethod
    # def get_stack_data(row):
    #     is_stack = False
    #     sequences_list = row['all_sequences'][0:-1].split(',')
    #     count_sequences = collections.Counter(sequences_list)
    #     repeating_items = [c > 1 for c in count_sequences.values()]
    #     if repeating_items:
    #         if any(repeating_items):
    #             is_stack = True
    #     return is_stack

    @staticmethod
    def split_and_strip(value):
        return map(lambda x: x.strip(), str(value).split(','))

    def construct_sos_vs_target_base_df(self):
        sos_targets = self.get_relevant_sos_vs_target_kpi_targets()
        sos_targets = sos_targets.drop_duplicates(subset=[
            'kpi_operation_type_fk', 'kpi_level_2_fk', 'numerator_value',
            'denominator_value', 'type'
        ],
                                                  keep='first')
        sos_targets = sos_targets.drop(
            ['key_json', 'data_json', 'start_date', 'end_date'], axis=1)
        if not sos_targets.empty:
            sos_targets['numerator_id'] = sos_targets.apply(
                self.retrieve_relevant_item_pks,
                axis=1,
                args=('numerator_type', 'numerator_value'))
            sos_targets['denominator_id'] = sos_targets.apply(
                self.retrieve_relevant_item_pks,
                axis=1,
                args=('denominator_type', 'denominator_value'))
            sos_targets['identifier_parent'] = sos_targets['KPI Parent'].apply(
                lambda x: self.common.get_dictionary(kpi_fk=int(float(x))))
        return sos_targets

    def get_relevant_sos_vs_target_kpi_targets(self, brand_vs_brand=False):
        sos_vs_target_kpis = self.external_targets[
            self.external_targets['operation_type'] == self.SOS_VS_TARGET]
        sos_vs_target_kpis = sos_vs_target_kpis.drop_duplicates(subset=[
            'operation_type', 'kpi_level_2_fk', 'key_json', 'data_json'
        ])
        relevant_targets_df = pd.DataFrame(
            columns=sos_vs_target_kpis.columns.values.tolist())
        if not sos_vs_target_kpis.empty:
            policies_df = self.commontools.unpack_external_targets_json_fields_to_df(
                sos_vs_target_kpis, field_name='key_json')
            policy_columns = policies_df.columns.values.tolist()
            del policy_columns[policy_columns.index('pk')]
            store_dict = self.full_store_info.to_dict('records')[0]
            for column in policy_columns:
                store_att_value = store_dict.get(column)
                policies_df = policies_df[policies_df[column].isin(
                    [store_att_value, self.ALL])]
            kpi_targets_pks = policies_df['pk'].values.tolist()
            relevant_targets_df = sos_vs_target_kpis[
                sos_vs_target_kpis['pk'].isin(kpi_targets_pks)]
            # relevant_targets_df = relevant_targets_df.merge(policies_df, on='pk', how='left')
            data_json_df = self.commontools.unpack_external_targets_json_fields_to_df(
                relevant_targets_df, 'data_json')
            relevant_targets_df = relevant_targets_df.merge(data_json_df,
                                                            on='pk',
                                                            how='left')

            kpi_data = self.kpi_static_data[['pk', 'type']].drop_duplicates()
            kpi_data.rename(columns={'pk': 'kpi_level_2_fk'}, inplace=True)
            relevant_targets_df = relevant_targets_df.merge(
                kpi_data,
                left_on='kpi_level_2_fk',
                right_on='kpi_level_2_fk',
                how='left')
            linear_sos_fk = self.common.get_kpi_fk_by_kpi_type(
                self.LINEAR_SOS_INDEX)
            if brand_vs_brand:
                relevant_targets_df = relevant_targets_df[
                    relevant_targets_df['KPI Parent'] == linear_sos_fk]
            else:
                relevant_targets_df = relevant_targets_df[~(
                    relevant_targets_df['KPI Parent'] == linear_sos_fk)]
        return relevant_targets_df

    def retrieve_relevant_item_pks(self, row, type_field_name,
                                   value_field_name):
        try:
            if row[type_field_name].endswith("_fk"):
                item_id = row[value_field_name]
            else:
                # print row[type_field_name], ' :', row[value_field_name]
                item_id = self.custom_entities[
                    self.custom_entities['name'] ==
                    row[value_field_name]]['pk'].values[0]
        except KeyError as e:
            Log.error('No id found for field {}. Error: {}'.format(
                row[type_field_name], e))
            item_id = None
        return item_id

    def calculate_sos(self, sos_filters, **general_filters):
        numerator_linear = self.calculate_share_space(
            **dict(sos_filters, **general_filters))
        denominator_linear = self.calculate_share_space(**general_filters)
        return float(numerator_linear), float(denominator_linear)

    def calculate_share_space(self, **filters):
        filtered_scif = self.filtered_scif[self.toolbox.get_filter_condition(
            self.filtered_scif, **filters)]
        space_length = filtered_scif['updated_gross_length'].sum()
        return space_length

    def add_kpi_result_to_kpi_results_df(self, result_list):
        self.kpi_results_check.loc[len(self.kpi_results_check)] = result_list

    def get_results_of_scene_level_kpis(self):
        scene_kpi_results = pd.DataFrame()
        if not self.scene_info.empty:
            scene_kpi_results = self.ps_data.get_scene_results(
                self.scene_info['scene_fk'].drop_duplicates().values)
        return scene_kpi_results

    def get_store_data_by_store_id(self):
        store_id = self.store_id if self.store_id else self.session_info[
            'store_fk'].values[0]
        query = PEPSICOUK_Queries.get_store_data_by_store_id(store_id)
        query_result = pd.read_sql_query(query, self.rds_conn.db)
        return query_result

    def get_facings_scene_bay_shelf_product(self):
        self.filtered_matches['count'] = 1
        aggregate_df = self.filtered_matches.groupby(
            ['scene_fk', 'bay_number', 'shelf_number', 'product_fk'],
            as_index=False).agg({'count': np.sum})
        return aggregate_df

    def get_lvl3_relevant_assortment_result(self):
        assortment_result = self.assortment.get_lvl3_relevant_ass()
        # if assortment_result.empty:
        #     return assortment_result
        # products_in_session = self.filtered_scif.loc[self.filtered_scif['facings'] > 0]['product_fk'].values
        # assortment_result.loc[assortment_result['product_fk'].isin(products_in_session), 'in_store'] = 1
        return assortment_result

    @staticmethod
    def get_block_and_adjacency_filters(target_series):
        filters = {target_series['Parameter 1']: target_series['Value 1']}
        if target_series['Parameter 2']:
            filters.update(
                {target_series['Parameter 2']: target_series['Value 2']})

        if target_series['Parameter 3']:
            filters.update(
                {target_series['Parameter 3']: target_series['Value 3']})
        return filters

    @staticmethod
    def get_block_filters(target_series):
        if isinstance(target_series['Value 1'], list):
            filters = {target_series['Parameter 1']: target_series['Value 1']}
        else:
            filters = {
                target_series['Parameter 1']: [target_series['Value 1']]
            }

        if target_series['Parameter 2']:
            if isinstance(target_series['Value 2'], list):
                filters.update(
                    {target_series['Parameter 2']: target_series['Value 2']})
            else:
                filters.update(
                    {target_series['Parameter 2']: [target_series['Value 2']]})

        if target_series['Parameter 3']:
            if isinstance(target_series['Value 2'], list):
                filters.update(
                    {target_series['Parameter 3']: target_series['Value 3']})
            else:
                filters.update(
                    {target_series['Parameter 3']: [target_series['Value 3']]})
        return filters

    def reset_filtered_scif_and_matches_to_exclusion_all_state(self):
        self.filtered_scif = self.commontools.filtered_scif.copy()
        self.filtered_matches = self.commontools.filtered_matches.copy()

    def reset_secondary_filtered_scif_and_matches_to_exclusion_all_state(self):
        self.filtered_scif_secondary = self.commontools.filtered_scif_secondary.copy(
        )
        self.filtered_matches_secondary = self.commontools.filtered_matches_secondary.copy(
        )

    def get_available_hero_sku_list(self, dependencies_df):
        hero_list = dependencies_df[
            (dependencies_df['kpi_type'] == self.HERO_SKU_AVAILABILITY_SKU)
            & (dependencies_df['numerator_result'] == 1
               )]['numerator_id'].unique().tolist()
        return hero_list

    def get_unavailable_hero_sku_list(self, dependencies_df):
        hero_list = dependencies_df[
            (dependencies_df['kpi_type'] == self.HERO_SKU_AVAILABILITY_SKU)
            & (dependencies_df['numerator_result'] == 0
               )]['numerator_id'].unique().tolist()
        return hero_list

    def get_hero_type_custom_entity_df(self):
        hero_type_df = self.custom_entities[self.custom_entities['entity_type']
                                            == self.HERO_TYPE]
        hero_type_df.rename(columns={'pk': 'entity_fk'}, inplace=True)
        return hero_type_df
Ejemplo n.º 30
0
class HEINEKENTWToolBox:
    LEVEL1 = 1
    LEVEL2 = 2
    LEVEL3 = 3

    DIST_STORE_LVL1 = 1014
    OOS_STORE_LVL1 = 1015
    DIST_STORE_LVL2 = 1016
    OOS_STORE_LVL2 = 1017

    DIST_CATEGORY_LVL1 = 1018
    OOS_CATEGORY_LVL1 = 1019
    DIST_CATEGORY_LVL2 = 1020
    OOS_CATEGORY_LVL2 = 1021

    DISTRIBUTION = 4
    OOS = 5

    MANUFACTURER_FK = 175  # heinken manfucturer

    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.rds_conn = PSProjectConnector(self.project_name,
                                           DbUsers.CalculationEng)
        self.kpi_static_data = self.common.get_new_kpi_static_data()
        self.kpi_results_queries = []
        self.assortment = Assortment(self.data_provider, self.output)

    def main_calculation(self, *args, **kwargs):
        """
        This function calculates the KPI results.
        """
        lvl3_result = self.assortment.calculate_lvl3_assortment()
        self.category_assortment_calculation(lvl3_result)
        self.store_assortment_calculation(lvl3_result)
        self.common.commit_results_data_to_new_tables()

        # self.common.commit_results_data_to_new_tables()

    def category_assortment_calculation(self, lvl3_result):
        """
        This function calculates 3 levels of assortment :
        level3 is assortment SKU
        level2 is assortment groups
        """
        if not lvl3_result.empty:
            # cat_df = self.scif[['product_fk', 'category_fk']]
            cat_df = self.all_products[['product_fk', 'category_fk']]
            lvl3_with_cat = lvl3_result.merge(cat_df,
                                              on='product_fk',
                                              how='left')
            lvl3_with_cat = lvl3_with_cat[
                lvl3_with_cat['category_fk'].notnull()]

            for result in lvl3_with_cat.itertuples():
                if result.in_store == 1:
                    score = self.DISTRIBUTION
                else:
                    score = self.OOS

                # Distrubtion
                self.common.write_to_db_result_new_tables(
                    fk=self.DIST_CATEGORY_LVL1,
                    numerator_id=result.product_fk,
                    numerator_result=score,
                    result=score,
                    denominator_id=result.category_fk,
                    denominator_result=1,
                    score=score,
                    score_after_actions=score)
                if score == self.OOS:
                    # OOS
                    self.common.write_to_db_result_new_tables(
                        fk=self.OOS_CATEGORY_LVL1,
                        numerator_id=result.product_fk,
                        numerator_result=score,
                        result=score,
                        denominator_id=result.category_fk,
                        denominator_result=1,
                        score=score,
                        score_after_actions=score)

            category_list = lvl3_with_cat['category_fk'].unique()
            for cat in category_list:
                lvl3_result_cat = lvl3_with_cat[lvl3_with_cat["category_fk"] ==
                                                cat]
                lvl2_result = self.assortment.calculate_lvl2_assortment(
                    lvl3_result_cat)
                for result in lvl2_result.itertuples():
                    denominator_res = result.total
                    res = np.divide(float(result.passes),
                                    float(denominator_res))
                    # Distrubtion
                    self.common.write_to_db_result_new_tables(
                        fk=self.DIST_CATEGORY_LVL2,
                        numerator_id=self.MANUFACTURER_FK,
                        numerator_result=result.passes,
                        denominator_id=cat,
                        denominator_result=denominator_res,
                        result=res,
                        score=res,
                        score_after_actions=res)

                    # OOS
                    self.common.write_to_db_result_new_tables(
                        fk=self.OOS_CATEGORY_LVL2,
                        numerator_id=self.MANUFACTURER_FK,
                        numerator_result=denominator_res - result.passes,
                        denominator_id=cat,
                        denominator_result=denominator_res,
                        result=1 - res,
                        score=(1 - res),
                        score_after_actions=1 - res)
        return

    def store_assortment_calculation(self, lvl3_result):
        """
        This function calculates the KPI results.
        """

        for result in lvl3_result.itertuples():
            if result.in_store == 1:
                score = self.DISTRIBUTION
            else:
                score = self.OOS

            # Distrubtion
            self.common.write_to_db_result_new_tables(
                fk=self.DIST_STORE_LVL1,
                numerator_id=result.product_fk,
                numerator_result=score,
                result=score,
                denominator_id=self.store_id,
                denominator_result=1,
                score=score)
            if score == self.OOS:
                # OOS
                self.common.write_to_db_result_new_tables(
                    fk=self.OOS_STORE_LVL1,
                    numerator_id=result.product_fk,
                    numerator_result=score,
                    result=score,
                    denominator_id=self.store_id,
                    denominator_result=1,
                    score=score,
                    score_after_actions=score)

        if not lvl3_result.empty:
            lvl2_result = self.assortment.calculate_lvl2_assortment(
                lvl3_result)
            for result in lvl2_result.itertuples():
                denominator_res = result.total
                if not pd.isnull(result.target) and not pd.isnull(
                        result.group_target_date
                ) and result.group_target_date <= self.assortment.current_date:
                    denominator_res = result.target
                res = np.divide(float(result.passes), float(denominator_res))
                # Distrubtion
                self.common.write_to_db_result_new_tables(
                    fk=self.DIST_STORE_LVL2,
                    numerator_id=self.MANUFACTURER_FK,
                    denominator_id=self.store_id,
                    numerator_result=result.passes,
                    denominator_result=denominator_res,
                    result=res,
                    score=res,
                    score_after_actions=res)

                # OOS
                self.common.write_to_db_result_new_tables(
                    fk=self.OOS_STORE_LVL2,
                    numerator_id=self.MANUFACTURER_FK,
                    numerator_result=denominator_res - result.passes,
                    denominator_id=self.store_id,
                    denominator_result=denominator_res,
                    result=1 - res,
                    score=1 - res,
                    score_after_actions=1 - res)
        return