def write_to_db_result(self, df=None, level=None, kps_name_temp=None):
        """
        This function writes KPI results to old tables

        """
        if level == 'level3':
            df['atomic_kpi_fk'] = self.kpi_fetcher.get_atomic_kpi_fk(
                df['name'][0])
            df['kpi_fk'] = df['kpi_fk'][0]
            df_dict = df.to_dict()
            df_dict.pop('name', None)
            query = insert(df_dict, KPI_RESULT)
            self.kpi_results_queries.append(query)
        elif level == 'level2':
            kpi_name = df['kpk_name'][0].encode('utf-8')
            df['kpi_fk'] = self.kpi_fetcher.get_kpi_fk(kpi_name)
            df_dict = df.to_dict()
            # df_dict.pop("kpk_name", None)
            query = insert(df_dict, KPK_RESULT)
            self.kpi_results_queries.append(query)
        elif level == 'level1':
            df['kpi_set_fk'] = self.kpi_fetcher.get_kpi_set_fk()
            df_dict = df.to_dict()
            query = insert(df_dict, KPS_RESULT)
            self.kpi_results_queries.append(query)
Exemple #2
0
 def write_to_db_result(self,
                        fk,
                        level,
                        score,
                        set_type=Const.SOVI,
                        **kwargs):
     """
     This function creates the result data frame of every KPI (atomic KPI/KPI/KPI set),
     and appends the insert SQL query into the queries' list, later to be written to the DB.
     """
     if kwargs:
         kwargs['score'] = score
         attributes = self.create_attributes_dict(fk=fk,
                                                  level=level,
                                                  set_type=set_type,
                                                  **kwargs)
     else:
         attributes = self.create_attributes_dict(fk=fk,
                                                  score=score,
                                                  set_type=set_type,
                                                  level=level)
     if level == self.common_db.LEVEL1:
         table = self.common_db.KPS_RESULT
     elif level == self.common_db.LEVEL2:
         table = self.common_db.KPK_RESULT
     elif level == self.common_db.LEVEL3:
         table = self.common_db.KPI_RESULT
     else:
         return
     query = insert(attributes, table)
     if set_type == Const.SOVI:
         self.common_db.kpi_results_queries.append(query)
     else:
         self.common_db_integ.kpi_results_queries.append(query)
Exemple #3
0
 def write_to_db_result(self,
                        level,
                        kpi_set_fk,
                        atomic_name=None,
                        result=None,
                        display_text=None,
                        score=None):
     """
     This function creates the result data frame of every KPI (atomic KPI/KPI/KPI set),
     and appends the insert SQL query into the queries' list, later to be written to the DB.
     """
     attributes = self.create_attributes_dict(level,
                                              kpi_set_fk,
                                              result,
                                              atomic_name,
                                              display_text,
                                              score=score)
     if level == self.LEVEL1:
         table = KPS_RESULT
     elif level == self.LEVEL3:
         table = KPI_RESULT
     else:
         return
     query = insert(attributes, table)
     self.kpi_results_queries.append(query)
 def write_to_db_result(self,
                        kpi_set_fk,
                        result,
                        level,
                        score=None,
                        threshold=None,
                        kpi_name=None,
                        kpi_fk=None):
     """
     This function the result data frame of every KPI (atomic KPI/KPI/KPI set),
     and appends the insert SQL query into the queries' list, later to be written to the DB.
     """
     attributes = self.create_attributes_dict(kpi_set_fk,
                                              result=result,
                                              level=level,
                                              score=score,
                                              threshold=threshold,
                                              kpi_name=kpi_name,
                                              kpi_fk=kpi_fk)
     if level == self.LEVEL1:
         table = KPS_RESULT
     elif level == self.LEVEL2:
         table = KPK_RESULT
     elif level == self.LEVEL3:
         table = KPI_RESULT
     else:
         return
     query = insert(attributes, table)
     self.kpi_results_queries.append(query)
Exemple #5
0
 def write_to_db_level_2_result(self, kpi_name, set_name, score, weight):
     kpi_fk = self._get_kpi_fk(kpi_name, set_name)
     attributes = self._create_level_2_attributes_dict(fk=kpi_fk,
                                                       score=score,
                                                       weight=weight)
     query = insert(attributes, KPK_RESULT)
     self._kpk_results_queries.append(query)
Exemple #6
0
 def write_to_db_level_3_result(self, atomic_kpi_name, kpi_name,
                                kpi_set_name, score, threshold, result):
     atomic_kpi_fk = self._get_atomic_fk(atomic_kpi_name=atomic_kpi_name,
                                         kpi_name=kpi_name,
                                         kpi_set_name=kpi_set_name)
     attributes = self._create_level_3_attributes_dict(
         atomic_kpi_fk, score, threshold, result)
     query = insert(attributes, KPI_RESULT)
     self._kpi_results_queries.append(query)
 def write_to_db_result_new_tables(self, fk, numerator_id, numerator_result, result, denominator_id=None,
                                   denominator_result=None, score=None):
     """
         This function creates the result data frame of new rables KPI,
         and appends the insert SQL query into the queries' list, later to be written to the DB.
         """
     table = KPI_NEW_TABLE
     attributes = self.create_attributes_dict_new_tables(fk, numerator_id, numerator_result, denominator_id,
                                                         denominator_result, result, score)
     query = insert(attributes, table)
     self.kpi_results_queries.append(query)
Exemple #8
0
 def get_activation_query(self, store_fk, product_fk, date):
     # if date in self.stores_min_dates[store_fk]:
     attributes = pd.DataFrame(
         [(store_fk, product_fk, str(date), None)],
         columns=['store_fk', 'product_fk', 'start_date', 'is_current'])
     # else:
     #     end_date = datetime.strptime(date, "%Y-%m-%d") + timedelta(7)
     #     attributes = pd.DataFrame([(store_fk, product_fk, str(date), None, str(end_date))],
     #                               columns=['store_fk', 'product_fk', 'start_date', 'is_current', 'end_date'])
     query = insert(attributes.to_dict(), TOP_SKU_TABLE)
     return query
Exemple #9
0
 def insert_into_kpi_lvl_2(self):
     if self.remove_duplicates:
         self.template_data = self.template_data.drop_duplicates(
             subset=['type'], keep='first')
     for i, row in self.template_data.iterrows():
         attributes = self.create_attributes_dict(row)
         query = insert(attributes, Consts.STATIC_KPI_LVL_2)
         self.insert_queries.append(query)
     merged_queries = self.merge_insert_queries()
     # print merged_queries
     self.commit_to_db(merged_queries)
 def write_session_data(self):
     created_time = datetime.utcnow().isoformat()
     attributes = pd.DataFrame([(self.object, self.object_fk, self.status, self.reason, self.comment, self.user,
                                 created_time, self.session_uid)],
                               columns=['object', 'object_fk', 'status', 'reason', 'comment', 'user',
                                        'created_time', 'session_uid'])
     query = insert(attributes.to_dict(), DB_TABLE)
     cur = self.aws_connector.db.cursor()
     cur.execute("delete from {} where object_fk = '{}' and object = '{}'".format(DB_TABLE, self.object_fk, OBJECT))
     cur.execute(query)
     self.aws_connector.db.commit()
Exemple #11
0
 def get_custom_scif_query(session_fk, scene_fk, product_fk, in_assortment,
                           distributed):
     in_assortment = 1 if in_assortment else 0
     out_of_stock = 1 if not distributed else 0
     attributes = pd.DataFrame(
         [(session_fk, scene_fk, product_fk, in_assortment, out_of_stock)],
         columns=[
             'session_fk', 'scene_fk', 'product_fk', 'in_assortment_osa',
             'oos_osa'
         ])
     query = insert(attributes.to_dict(), CUSTOM_SCIF_TABLE)
     return query
 def _write_level_1_to_static(self, level_1_record):
     attributes = {'name': [level_1_record['set_name']]}
     query = insert(attributes, 'static.kpi_set')
     self.cur.execute(query)
     new_set_fk = self.cur.lastrowid
     record_to_add_to_static = pd.DataFrame.from_records([{
         'kpi_set_name':
         level_1_record['set_name'],
         'kpi_set_fk':
         new_set_fk
     }])
     self.current_kpi_set = self.current_kpi_set.append(
         record_to_add_to_static, ignore_index=True)
 def write_to_db_result(self, df=None, level=None, kps_name_temp=None):
     if level == 'level3':
         atomic_kpi_fk = self.kpi_static_data[self.kpi_static_data['kpi_fk'] ==
                                              df['kpi_fk'][0]]['atomic_kpi_fk'].values[0]
         df['atomic_kpi_fk'] = atomic_kpi_fk
         df['kpi_fk'] = df['kpi_fk'][0]
         df_dict = df.to_dict()
         query = insert(df_dict, KPI_RESULT)
         return query
     elif level == 'level2':
         kpi_fk = self.kpi_static_data[(self.kpi_static_data['kpi_set_name'] == kps_name_temp) &
                                       (self.kpi_static_data['kpi_name'] == df['kpk_name'][0])]['kpi_fk'].values[0]
         df['kpi_fk'] = kpi_fk
         df_dict = df.to_dict()
         query = insert(df_dict, KPK_RESULT)
         return query
     elif level == 'level1':
         kpi_set_fk = self.kpi_static_data[self.kpi_static_data['kpi_set_name'] ==
                                           kps_name_temp]['kpi_set_fk'].values[0]
         df['kpi_set_fk'] = kpi_set_fk
         df_dict = df.to_dict()
         query = insert(df_dict, KPS_RESULT)
         return query
 def write_to_db_result(self, df=None, level=None, kps_name_temp=None):
     # temp = kps_name_temp.encode('utf-8')
     # kps_name = str(temp)
     if level == 'level3':
         # query = Queries.get_kpi_results_data().format(df['kpi_fk'][0])
         # level3 = pd.read_sql_query(query, self.rds_conn.db)
         atomic_kpi_fk = self.kpi_static_data[
             self.kpi_static_data['kpi_fk'] == df['kpi_fk']
             [0]]['atomic_kpi_fk'].values[0]
         df['atomic_kpi_fk'] = atomic_kpi_fk
         df['kpi_fk'] = df['kpi_fk'][0]
         df_dict = df.to_dict()
         query = insert(df_dict, KPI_RESULT)
         return query
     elif level == 'level2':
         # temp = df['kpk_name'][0].encode('utf-8')
         # kpi_name = str(temp)
         # query = Queries.get_kpk_results_data().format(kpi_name, kps_name)
         # level2 = pd.read_sql_query(query, self.rds_conn.db)
         kpi_fk = self.kpi_static_data[
             (self.kpi_static_data['kpi_set_name'] == kps_name_temp)
             & (self.kpi_static_data['kpi_name'] == df['kpk_name'][0]
                )]['kpi_fk'].values[0]
         df['kpi_fk'] = kpi_fk
         df_dict = df.to_dict()
         query = insert(df_dict, KPK_RESULT)
         return query
     elif level == 'level1':
         # query = Queries.get_kps_results_data().format(kps_name)
         # level1 = pd.read_sql_query(query, self.rds_conn.db)
         kpi_set_fk = self.kpi_static_data[
             self.kpi_static_data['kpi_set_name'] ==
             kps_name_temp]['kpi_set_fk'].values[0]
         df['kpi_set_fk'] = kpi_set_fk
         df_dict = df.to_dict()
         query = insert(df_dict, KPS_RESULT)
         return query
    def write_to_db_result(self):
        """
        This function writes KPI results to old tables

        """
        query = self.kpi_fetcher.get_pk_to_delete(self.session_fk)
        self.rds_conn = PSProjectConnector(self.project_name, DbUsers.CalculationEng)
        pk = pd.read_sql_query(query, self.rds_conn.db)
        if not pk.empty:
            pk_to_delete = tuple(pk['pk'].unique().tolist())
            delete_query = self.kpi_fetcher.get_delete_session_results(pk_to_delete)
            self.delete_results_data(delete_query)
        df_dict = self.results.to_dict()
        query = insert(df_dict, CARREFOUR_INVENTORY)
        self.insert_results_data(query)
 def get_insert_queries_hierarchy(self, result_entity,
                                  scene_session_hierarchy):
     relevant_df = self.kpi_results[self.kpi_results[self.SHOULD_ENTER] ==
                                    True]
     queries = []
     table = self.HIERARCHY_SESSION_TABLE
     if result_entity == self.SCENE:
         table = self.HIERARCHY_SCENE_TABLE
     for i, line in relevant_df.iterrows():
         result_fk = line[self.SESSION_RESULT_FK]
         parent_result_fk = line[self.PARENT_FK]
         scene_kpi_results_fk = line[self.SCENE_RESULT_FK]
         switch = line[self.SWITCH]
         if result_entity == self.SCENE:
             attributes = pd.DataFrame([(result_fk, parent_result_fk)],
                                       columns=[
                                           'scene_kpi_results_fk',
                                           'scene_kpi_results_parent_fk'
                                       ]).to_dict()
         elif scene_session_hierarchy:
             attributes = pd.DataFrame(
                 [(parent_result_fk, scene_kpi_results_fk)],
                 columns=[
                     'session_kpi_results_parent_fk', 'scene_kpi_results_fk'
                 ]).to_dict()
         elif result_entity == self.SESSION:
             if not switch:
                 attributes = pd.DataFrame(
                     [(result_fk, parent_result_fk, scene_kpi_results_fk)],
                     columns=[
                         'session_kpi_results_fk',
                         'session_kpi_results_parent_fk',
                         'scene_kpi_results_fk'
                     ]).to_dict()
             else:
                 attributes = pd.DataFrame(
                     [(parent_result_fk, scene_kpi_results_fk, None)],
                     columns=[
                         'session_kpi_results_parent_fk',
                         'scene_kpi_results_fk', 'session_kpi_results_fk'
                     ]).to_dict()
         else:
             Log.error(
                 'Cannot Calculate results per {}'.format(result_entity))
             return
         query = insert(attributes, table)
         queries.append(query)
     return queries
 def write_to_db_result(self, fk, score, level):
     """
     This function creates the result data frame of every KPI (atomic KPI/KPI/KPI set),
     and appends the insert SQL query into the queries' list, later to be written to the DB.
     """
     attributes = self.create_attributes_dict(fk, score, level)
     if level == self.LEVEL1:
         table = KPS_RESULT
     elif level == self.LEVEL2:
         table = KPK_RESULT
     elif level == self.LEVEL3:
         table = KPI_RESULT
     else:
         return
     query = insert(attributes, table)
     self.kpi_results_queries.append(query)
    def get_custom_query(self,
                         scene_fk,
                         product_fk,
                         in_assortment_OSA=0,
                         oos_osa=0,
                         mha_in_assortment=0,
                         mha_oos=0,
                         length_mm_custom=0):
        attributes = pd.DataFrame(
            [(self.session_fk, scene_fk, product_fk, in_assortment_OSA,
              oos_osa, mha_in_assortment, mha_oos, length_mm_custom)],
            columns=[
                'session_fk', 'scene_fk', 'product_fk', 'in_assortment_OSA',
                'oos_osa', 'mha_in_assortment', 'mha_oos', 'length_mm_custom'
            ])

        query = insert(attributes.to_dict(), self.PSERVICE_CUSTOM_SCIF)
        self.custom_scif_queries.append(query)
Exemple #19
0
 def make_insert_queries_hierarchy(self, table, df):
     df = df[df[self.SHOULD_ENTER] == True]
     df.loc[df[self.SCENE_RESULT_FK].notnull(),
            self.SESSION_RESULT_FK] = None
     queries = []
     subset = [self.SESSION_RESULT_FK, self.PARENT_FK, self.SCENE_RESULT_FK]
     col_names = [
         'session_kpi_results_fk', 'session_kpi_results_parent_fk',
         'scene_kpi_results_fk'
     ]
     if table == self.HIERARCHY_SCENE_TABLE:
         subset = [self.SESSION_RESULT_FK, self.PARENT_FK]
         col_names = ['scene_kpi_results_fk', 'scene_kpi_results_parent_fk']
     for i, row in df.iterrows():
         query = insert(
             pd.DataFrame(row[subset].values.reshape(1, 3),
                          columns=col_names).to_dict(), table)
         queries.append(query)
     return queries
Exemple #20
0
 def write_gaps_to_db(self):
     """
     This function translates KPI gaps into SQL queries, later to be inserted into the DB.
     """
     for gap_category in self.gaps:
         priorities = range(1, 6)
         for gap in sorted(self.gaps[gap_category].keys()):
             if not priorities:
                 break
             kpi_name = self.gaps[gap_category][gap]
             translation_data = self.gap_translations[
                 self.gap_translations['KPI Name'] == kpi_name]
             if not translation_data.empty:
                 kpi_name = translation_data['Gap Text'].iloc[0]
             attributes = pd.DataFrame(
                 [(self.session_fk, gap_category, kpi_name,
                   priorities.pop(0))],
                 columns=['session_fk', 'gap_category', 'name', 'priority'])
             query = insert(attributes.to_dict(), CUSTOM_GAPS_TABLE)
             self.gaps_queries.append(query)
 def _write_level_2_to_static(self, level_2_record):
     set_name = level_2_record['set_name']
     kpi_name = level_2_record['kpi_name']
     set_data = self.current_kpi_set[self.current_kpi_set['kpi_set_name'] ==
                                     set_name]
     set_fk = set_data.iloc[0]['kpi_set_fk']
     attributes = {'display_text': [kpi_name], 'kpi_set_fk': [set_fk]}
     query = insert(attributes, 'static.kpi')
     self.cur.execute(query)
     new_kpi_fk = self.cur.lastrowid
     record_to_add_to_static = pd.DataFrame.from_records([{
         'kpi_set_name':
         set_name,
         'kpi_name':
         kpi_name,
         'kpi_fk':
         new_kpi_fk,
         'kpi_set_fk':
         set_fk
     }])
     self.current_kpi = self.current_kpi.append(record_to_add_to_static,
                                                ignore_index=True)
    def _write_level_3_to_static(self, level_2_record):
        set_name = level_2_record['set_name']
        kpi_name = level_2_record['kpi_name']
        atomic_kpi_name = level_2_record['atomic_name']
        kpi_fk_cond = ((self.current_kpi['kpi_set_name'] == set_name) &
                       (self.current_kpi['kpi_name'] == kpi_name))
        kpi_fk = self.current_kpi.loc[kpi_fk_cond, 'kpi_fk'].iloc[0]

        attributes = {
            'name': [atomic_kpi_name],
            'description': [atomic_kpi_name],
            'display_text': [atomic_kpi_name],
            'kpi_fk': [kpi_fk]
        }
        query = insert(attributes, 'static.atomic_kpi')
        self.cur.execute(query)


# if __name__ == '__main__':
#     Config.init()
#     LoggerInitializer.init('TREX')
#     project = 'ripetcareuk-prod'
#     update = UpdateStaticData(project)
#     update.update_static_data()
    def write_to_db_result(self,
                           fk=None,
                           numerator_id=0,
                           numerator_result=0,
                           result=0,
                           denominator_id=0,
                           denominator_result=0,
                           score=0,
                           score_after_actions=0,
                           denominator_result_after_actions=None,
                           numerator_result_after_actions=0,
                           weight=None,
                           kpi_level_2_target_fk=None,
                           context_id=None,
                           parent_fk=None,
                           target=None,
                           identifier_parent=None,
                           identifier_result=None,
                           should_enter=False,
                           by_scene=False,
                           scene_result_fk=None,
                           only_hierarchy=False):
        """
            This function creates the result data frame of new tables KPI,
            and appends the insert SQL query into the queries' list, later to be written to the DB.


            only_heirchey: in case you need to connection between session and existing scence results
        """
        table = self.KPI_SESSION_RESULTS_TABLE
        if by_scene:
            table = self.KPI_SCENE_RESULTS_TABLE
        attributes = self.create_attributes_dict(
            by_scene,
            kpi_fk=fk,
            numerator_id=numerator_id,
            numerator_result=numerator_result,
            denominator_id=denominator_id,
            denominator_result=denominator_result,
            result=result,
            score=score,
            score_after_actions=score_after_actions,
            denominator_result_after_actions=denominator_result_after_actions,
            weight=weight,
            kpi_level_2_target_fk=kpi_level_2_target_fk,
            context_id=context_id,
            parent_fk=parent_fk,
            target=target,
            numerator_result_after_actions=numerator_result_after_actions)
        query = insert(attributes, table)
        # the condition for fictive_fk is added not to interfere with the code that uses fictive_fk (we can transfer the relevant
        # projects and remove this condition
        if only_hierarchy and by_scene == True:
            # raise FunctionUsageError('only_hierarchy and by_scene arguments cannot both be set to True')
            Log.error(
                'only_hierarchy and by_scene arguments cannot both be set to True'
            )
            return

        if (only_hierarchy == True
                and by_scene == False) or fk == self.FICTIVE_FK:
            query = ''
            new_result = {
                self.SESSION_RESULT_FK: None,
                self.SHOULD_ENTER: should_enter,
                self.IDENTIFIER_PARENT: identifier_parent,
                self.SCENE_RESULT_FK: scene_result_fk,
                self.QUERY: query,
                self.IDENTIFIER_RESULT: identifier_result
            }
        else:
            new_result = {
                self.SESSION_RESULT_FK: self.current_pk,
                self.SHOULD_ENTER: should_enter,
                self.IDENTIFIER_PARENT: identifier_parent,
                self.SCENE_RESULT_FK: scene_result_fk,
                self.QUERY: query,
                self.IDENTIFIER_RESULT: identifier_result
            }
            self.current_pk += 1
        # self.current_pk += 1
        self.kpi_results = self.kpi_results.append(new_result,
                                                   ignore_index=True)
 def insert_attributes_to_db(self, attrs, table):
     query = insert(attrs, table)
     self.common.kpi_results_queries.append(query)
Exemple #25
0
 def write_to_db_level_1_result(self, set_name, score):
     set_fk = self._get_kpi_set_fk(set_name=set_name)
     attributes = self._create_level_1_attributes_dict(fk=set_fk,
                                                       score=score)
     query = insert(attributes, KPS_RESULT)
     self._kpi_results_queries.append(query)
 def write_to_db_result(self,
                        fk,
                        numerator_id=0,
                        numerator_result=0,
                        result=0,
                        denominator_id=0,
                        denominator_result=0,
                        score=0,
                        score_after_actions=0,
                        denominator_result_after_actions=None,
                        numerator_result_after_actions=0,
                        weight=None,
                        kpi_level_2_target_fk=None,
                        context_id=None,
                        parent_fk=None,
                        target=None,
                        identifier_parent=None,
                        identifier_result=None,
                        should_enter=False,
                        by_scene=False,
                        scene_result_fk=None,
                        switch=None):
     """
         This function creates the result data frame of new tables KPI,
         and appends the insert SQL query into the queries' list, later to be written to the DB.
     """
     table = self.KPI_SESSION_RESULTS_TABLE
     if by_scene:
         table = self.KPI_SCENE_RESULTS_TABLE
     attributes = self.create_attributes_dict(
         by_scene,
         kpi_fk=fk,
         numerator_id=numerator_id,
         numerator_result=numerator_result,
         denominator_id=denominator_id,
         denominator_result=denominator_result,
         result=result,
         score=score,
         score_after_actions=score_after_actions,
         denominator_result_after_actions=denominator_result_after_actions,
         weight=weight,
         kpi_level_2_target_fk=kpi_level_2_target_fk,
         context_id=context_id,
         parent_fk=parent_fk,
         target=target,
         numerator_result_after_actions=numerator_result_after_actions)
     query = insert(attributes, table)
     if fk == self.FICTIVE_FK:
         query = ''
     new_result = {
         self.SESSION_RESULT_FK: self.current_pk,
         self.SHOULD_ENTER: should_enter,
         self.IDENTIFIER_PARENT: identifier_parent,
         self.SCENE_RESULT_FK: scene_result_fk,
         self.QUERY: query,
         self.IDENTIFIER_RESULT: identifier_result,
         self.SWITCH: switch
     }
     self.current_pk += 1
     self.kpi_results = self.kpi_results.append(new_result,
                                                ignore_index=True)
 def get_activation_query(store_fk, product_fk, date):
     attributes = pd.DataFrame(
         [(store_fk, product_fk, str(date), 1)],
         columns=['store_fk', 'product_fk', 'start_date', 'is_current'])
     query = insert(attributes.to_dict(), STORE_ASSORTMENT_TABLE)
     return query
    def sync_external_targets(self, kpi_group_name, kpi_type):
        # print("Syncing => {} - {}".format(kpi_group_name, kpi_type))
        current_eans = self.population_filter_to_ean_codes(
            self.targets_from_template[kpi_group_name])
        # print(current_eans.shape)

        insert_queries = []
        insert_queries_after_patch = []
        for idx, row in current_eans.iterrows():
            kpi_fk = self.common.get_kpi_fk_by_kpi_type(kpi_type)
            product_group_name = row["Product_Group_Name"]
            rel_ext_targets = self.ext_targets[
                (self.ext_targets['Config Name'].str.encode('utf8') ==
                 product_group_name.encode('utf8'))
                & (self.ext_targets['kpi_type'] == kpi_type)]
            if rel_ext_targets.empty:
                if self.is_old_visit:
                    # print("Ignoring this line item, since its a recalc")
                    continue
                # print("Product group not found in ext_targets")
                # print("Inserting valid product_groups into external_targets")
                kpi_level_2_fk = kpi_fk
                kpi_operation_type_fk = 2
                start_date = str(datetime.now().date())
                key_json = {
                    "Config Name":
                    row['Product_Group_Name'].replace("'",
                                                      "\\'").encode('utf-8'),
                }
                data_json = {
                    "Population":
                    row['Population_Filter'],
                    "Config Name":
                    row['Product_Group_Name'].replace("'",
                                                      "\\'").encode('utf-8'),
                    "entities":
                    row['entities'].tolist()
                }
                # check if we need to re-insert / insert
                new_record = {
                    "kpi_operation_type_fk": {
                        0: kpi_operation_type_fk
                    },
                    "kpi_level_2_fk": {
                        0: kpi_level_2_fk
                    },
                    "start_date": {
                        0: start_date
                    },
                    # "key_json": {0: key_json},
                    # "data_json": {0: data_json}
                    "key_json": {
                        0:
                        json.dumps(key_json).encode('ascii').decode(
                            'unicode-escape')
                    },
                    "data_json": {
                        0:
                        json.dumps(data_json).encode('ascii').decode(
                            'unicode-escape')
                    }
                }
                insert_queries.append(
                    insert(new_record, "static.kpi_external_targets"))
            else:
                # check if the entities in the product_group changed recently.
                if rel_ext_targets.shape[0] > 1:
                    print("More than one records...")
                else:
                    if self.is_old_visit:
                        print("use the current eans stored in db")
                        # rel_ext_targets['entities'].iloc[0]
                        continue
                    # print("Only one record.")
                    # print("If the entities matching with curr_eancodes")
                    # relv_current_eans = block_current_eans[
                    #     (current_eans['Product_Group_Name'].str.encode('utf8') == product_group_name.encode('utf8'))
                    #     &
                    #     (current_eans['KPI_NAME'] == KPI_NAME)]['entities'].iloc[0]
                    relv_current_eans = row['entities']
                    relv_target_eans = rel_ext_targets['entities'].iloc[0]
                    if len(set(relv_target_eans) -
                           set(relv_current_eans)) == 0:
                        pass
                        # print("Same")
                        # print("Use this pk")
                    else:
                        # if the visit is a new visit, then apply this
                        # self.new_session
                        # if not, use old
                        # print("There are diff in entities. So end the current pk and save the new one.")
                        ext_target_pk_to_end = rel_ext_targets.pk.iloc[0]
                        # print("PK to update {}".format(ext_target_pk_to_end))
                        # update the end date for this pk
                        end_date = str(
                            (datetime.now() - timedelta(days=1)).date())
                        to_update = {"end_date": {0: end_date}}
                        update_query = self.get_table_update_query(
                            to_update, "static.kpi_external_targets",
                            "pk = {}".format(ext_target_pk_to_end))
                        self.commit_to_db([update_query])
                        # insert the new record to external_target with relv_current_eans.
                        kpi_level_2_fk = kpi_fk
                        kpi_operation_type_fk = 2
                        start_date = str(datetime.now().date())
                        key_json = {
                            "Config Name":
                            row['Product_Group_Name'].replace(
                                "'", "\\'").encode('utf-8'),
                        }
                        data_json = {
                            "Population":
                            row['Population_Filter'],
                            "Config Name":
                            row['Product_Group_Name'].replace(
                                "'", "\\'").encode('utf-8'),
                            "entities":
                            row['entities'].tolist()
                        }
                        # check if we need to re-insert / insert
                        new_record = {
                            "kpi_operation_type_fk": {
                                0: kpi_operation_type_fk
                            },
                            "kpi_level_2_fk": {
                                0: kpi_level_2_fk
                            },
                            "start_date": {
                                0: start_date
                            },
                            # "key_json": {0: key_json},
                            # "data_json": {0: data_json}
                            "key_json": {
                                0:
                                json.dumps(key_json).encode('ascii').decode(
                                    'unicode-escape')
                            },
                            "data_json": {
                                0:
                                json.dumps(data_json).encode('ascii').decode(
                                    'unicode-escape')
                            }
                        }
                        insert_queries_after_patch.append(
                            insert(new_record, "static.kpi_external_targets"))

        if len(insert_queries) > 0:
            # print("call insert_statement check")
            self.commit_to_db(insert_queries)

        if len(insert_queries_after_patch) > 0:
            # print("call insert_statement after updating old ones")
            self.commit_to_db(insert_queries_after_patch)
 def generate_insert_queries(self, kpi_pks):
     for pk in kpi_pks:
         attributes = self.create_attributes_dict(pk)
         query = insert(attributes, Consts.STATIC_KPI_VIEW_CONFIG)
         self.insert_queries.append(query)