def __init__(self, project, template_path):
        self.template = pd.read_excel(template_path, sheetname=None)
        self.indexed_template = {
            key: df.set_index(Const.KPI_NAME)
            for key, df in self.template.items()
            if Const.KPI_NAME in df.columns
        }
        self.rds_conn = ProjectConnector(project, DbUsers.CalcAdmin)
        self.cur = self.rds_conn.db.cursor()

        self.kpi2_attribs = self.get_table_attributes(self.KPI)
        self.kpi2_table = self.read_table(self.KPI)
        self.kpi2_table = self.read_table(self.KPI)
        self.kpi2_set = set(self.kpi2_table['type'])
        self.kpi_pk_set = set(self.kpi2_table['pk'])

        self.family_attribs = self.get_table_attributes(self.FAMILY_TABLE)

        self.entity_table = self.read_table(self.ENTITY_TABLE)
        self.family_table = self.read_table(self.FAMILY_TABLE)
        self.calc_stage_table = self.read_table(self.CALC_STAGE_TABLE)

        self.entity_set = set(self.entity_table['name'])
        self.family_set = {x.upper() for x in self.family_table['name']}
        self.calc_stage_set_pk = set(self.calc_stage_table['pk'])

        self.family_dict = self.family_table.set_index('name')['pk'].to_dict()
        self.entity_dict = self.entity_table.set_index('name')['pk'].to_dict()

        self.pk_max_family = max(self.family_table['pk'])

        self.errors = []

        self.main()
    def __init__(self, project, template_path):
        self.entity_pairs = self.parse_all(template_path)
        self.rds_conn = ProjectConnector(project, DbUsers.CalcAdmin)
        self.cur = self.rds_conn.db.cursor()

        self.custom_entity_attribs = self.get_table_attributes(
            self.CUSTOM_ENTITY_TABLE)
        self.entity_type_attribs = self.get_table_attributes(
            self.ENTITY_TYPE_TABLE)

        self.custom_entity_table = self.read_table(self.CUSTOM_ENTITY_TABLE)
        self.entity_type_table = self.read_table(self.ENTITY_TYPE_TABLE)

        self.custom_entity_set = set(self.custom_entity_table['name'])
        self.entity_type_set = set(self.entity_type_table['name'])

        self.custom_entity_pk = max(
            [int(i) for i in self.custom_entity_table['pk']])
        self.entity_type_pk = max(
            [int(i) for i in self.entity_type_table['pk'] if int(i) < 999])

        self.entity_type_dict = self.entity_type_table.set_index(
            'name')['pk'].to_dict()

        self.update_db()
Esempio n. 3
0
    def __init__(self,
                 arg_sd=None,
                 arg_ed=None,
                 arg_to=None,
                 pivoted='pivoted'):
        self.log_prefix = BUCKET_FOLDER + '_' + PROJECT
        Log.init(self.log_prefix)
        Log.info(self.log_prefix + ' Opening SQL connector')
        self.project_name = PROJECT
        self.rds_conn = ProjectConnector(self.project_name,
                                         DbUsers.CalculationEng)

        self.pivoted = True if pivoted == 'pivoted' else False

        self.current_date = datetime.datetime.utcnow().date()

        self.start_date = self.current_date - datetime.timedelta(1)
        self.end_date = self.current_date - datetime.timedelta(1)

        self.start_date = arg_sd if arg_sd else self.start_date.strftime(
            '%Y-%m-%d')
        self.end_date = arg_ed if arg_ed else self.end_date.strftime(
            '%Y-%m-%d')

        self.report_name = FILE_NAME + '_' + str(self.start_date) + '_' + str(self.end_date) \
                           + ('_pivoted' if self.pivoted else '') \
                           + '_' + str(int(time.mktime(datetime.datetime.now().timetuple()))) \
                           + '.xlsx'

        self.receivers = arg_to.split(',') if arg_to else None
    def __init__(self, data_provider, output):
        self.output = output
        self.data_provider = data_provider
        self.common = Common(self.data_provider)
        self.common_v2 = CommonV2(self.data_provider)
        self.project_name = self.data_provider.project_name
        self.session_uid = self.data_provider.session_uid
        self.products = self.data_provider[Data.PRODUCTS]
        self.all_products = self.data_provider[Data.ALL_PRODUCTS]
        self.match_product_in_scene = self.data_provider[Data.MATCHES]
        self.visit_date = self.data_provider[Data.VISIT_DATE]
        self.session_info = self.data_provider[Data.SESSION_INFO]
        self.scene_info = self.data_provider[Data.SCENES_INFO]
        self.store_id = self.data_provider[Data.STORE_FK]
        self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
        self.template_info = self.data_provider.all_templates
        self.rds_conn = ProjectConnector(self.project_name,
                                         DbUsers.CalculationEng)
        self.ps_data_provider = PsDataProvider(self.data_provider)
        self.thresholds_and_results = {}
        self.result_df = []
        self.writing_to_db_time = datetime.timedelta(0)
        self.kpi_results_queries = []
        self.potential_products = {}
        self.shelf_square_boundaries = {}
        self.average_shelf_values = {}
        self.kpi_static_data = self.common.get_kpi_static_data()
        self.kpi_results_queries = []
        self.all_template_data = parse_template(TEMPLATE_PATH, "KPI")
        self.spacing_template_data = parse_template(TEMPLATE_PATH, "Spacing")
        self.fixture_width_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                    "Fixture Width",
                                                    dtype=pd.Int64Dtype())
        self.facings_to_feet_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                      "Conversion Table",
                                                      dtype=pd.Int64Dtype())
        self.header_positions_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE,
                                                       "Header Positions")
        self.flip_sign_positions_template = pd.read_excel(
            FIXTURE_WIDTH_TEMPLATE, "Flip Sign Positions")
        self.custom_entity_data = self.ps_data_provider.get_custom_entities(
            1005)
        self.ignore_stacking = False
        self.facings_field = 'facings' if not self.ignore_stacking else 'facings_ign_stack'
        self.INCLUDE_FILTER = 1
        self.assortment = Assortment(self.data_provider,
                                     output=self.output,
                                     ps_data_provider=self.ps_data_provider)
        self.store_assortment = self.assortment.get_lvl3_relevant_ass()

        self.kpi_new_static_data = self.common.get_new_kpi_static_data()
        try:
            self.mpis = self.match_product_in_scene.merge(self.products, on='product_fk', suffixes=['', '_p']) \
                        .merge(self.scene_info, on='scene_fk', suffixes=['', '_s']) \
                          .merge(self.template_info, on='template_fk', suffixes=['', '_t'])
        except KeyError:
            Log.warning('MPIS cannot be generated!')
            return
        self.adp = AltriaDataProvider(self.data_provider)
    def check_db_values(self, xl_column_name, xl_column_type, db_column_name,
                        table_name):
        Log.info(
            "DB Values Check column_name:{} - Started".format(xl_column_name))
        result = True
        lst_rows = []
        dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)
        for row_num, row_data in self.hierarchy_template.iterrows():
            try:
                if type(row_data[xl_column_name]) is str or type(
                        row_data[xl_column_name]) is unicode:
                    values = row_data[xl_column_name].strip()
                else:
                    values = row_data[xl_column_name]

                if xl_column_type == "list":
                    values = tuple(
                        ["{}".format(v.strip()) for v in values.split(",")])
                    query = "SELECT {db_column_name} FROM {table_name} WHERE {db_column_name} IN {values}"

                else:
                    values = u"{}".format(values)
                    query = u"SELECT {db_column_name} FROM {table_name} WHERE "
                    query += u" {db_column_name}='{values}'"

                query = query.format(db_column_name=db_column_name,
                                     table_name=table_name,
                                     values=values)
                query = query.strip().replace(",)", ")")

                df_result = pd.read_sql(query, dbcon.db)
                dict_result = dict()
                if df_result.empty:
                    dict_result['column_name'] = xl_column_name
                    dict_result['values'] = values
                    dict_result['table_name'] = table_name
                    Log.error(dict_result)
                    lst_rows.append(dict_result)
                    if is_debug:
                        Log.info(dict_result)
                    result = False
                else:
                    if xl_column_type == "list":
                        if len(df_result) != len(values):
                            dict_result['column_name'] = xl_column_name
                            dict_result['values'] = values
                            dict_result['table_name'] = table_name
                            Log.error(dict_result)
                            lst_rows.append(dict_result)
                            if is_debug:
                                Log.info(dict_result)
                            result = False
            except Exception as ex:
                result = False
                Log.error("{} {}".format(ex.message, ex.args))
        self.create_excel_log(lst_rows, "db_values_check")
        Log.info("DB Values Check column_name:{} - Completed".format(
            xl_column_name))
        return result
 def load_db_category_kpi(self):
     dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)
     query = self.get_db_category_kpi_query()
     df_result = pd.read_sql(query, dbcon.db)
     if df_result.empty:
         Log.error("Category KPI mapping missing in DB")
         return pd.DataFrame()
     else:
         return df_result
    def __init__(self, project, template_path):
        self.template_results = self.load_template_results(template_path)
        self.rds_conn = ProjectConnector(project, DbUsers.CalcAdmin)
        self.cur = self.rds_conn.db.cursor()
        self.result_value_attribs = self.get_table_attributes(
            'static.kpi_result_value')
        self.sql_results = self.result_values_query()
        self.sql_types = self.result_types_query()
        self.ps_type_pk = None
        self.ps_result_pk = max(self.sql_results['pk'])

        self.update_db()
    def check_db_values(self):
        result = True
        dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)
        for check in self.data_mapping:
            if "table_name" in check.keys():
                xl_column_name = check['xl_column_name']
                xl_column_type = check['xl_column_type']
                db_column_name = check['db_column_name']
                table_name = check['table_name']
                for row_num, row_data in self.template.iterrows():
                    try:
                        values = row_data[xl_column_name].strip()
                        if xl_column_type == "list":
                            values = tuple([
                                "{}".format(v.strip())
                                for v in values.split(",")
                            ])
                            query = "SELECT {db_column_name} FROM {table_name} WHERE {db_column_name} IN {values}"

                        else:
                            values = "{}".format(values)
                            query = "SELECT {db_column_name} FROM {table_name} WHERE {db_column_name}='{values}'"

                        query = query.format(db_column_name=db_column_name,
                                             table_name=table_name,
                                             values=values)
                        query = query.strip().replace(",)", ")")

                        df_result = pd.read_sql(query, dbcon.db)
                        if df_result.empty:
                            Log.error(
                                "{xl_column_name}={values} not in table {table_name}"
                                .format(xl_column_name=xl_column_name,
                                        values=values,
                                        table_name=table_name))
                            result = False
                        else:
                            if xl_column_type == "list":
                                if len(df_result) != len(values):
                                    message = "{xl_column_name}={values}".format(
                                        xl_column_name=xl_column_name,
                                        values=values)
                                    message += " one or more values not found in table {table_name}".format(
                                        table_name=table_name)
                                    Log.error(message)
                                    result = False
                    except Exception as ex:
                        result = False
                        Log.error("{} {}".format(ex.message, ex.args))
        return result
 def load_all_active_products(self):
     result = pd.DataFrame()
     query = self.get_product_group_query()
     dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)
     try:
         df_result = pd.read_sql(query, dbcon.db)
         if df_result.empty:
             Log.error("Category KPI mapping missing in DB")
             return result
         else:
             result = df_result
     except Exception as ex:
         Log.error("Error: {}".format(ex))
     return result
Esempio n. 10
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common = Common(self.data_provider)
     self.common_v2 = CommonV2(self.data_provider)
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.template_info = self.data_provider.all_templates
     self.rds_conn = ProjectConnector(self.project_name, DbUsers.CalculationEng)
     self.ps_data_provider = PsDataProvider(self.data_provider)
     self.match_product_in_probe_state_reporting = self.ps_data_provider.get_match_product_in_probe_state_reporting()
     self.kpi_results_queries = []
     self.fixture_width_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE, "Fixture Width", dtype=pd.Int64Dtype())
     self.facings_to_feet_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE, "Conversion Table", dtype=pd.Int64Dtype())
     self.header_positions_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE, "Header Positions")
     self.flip_sign_positions_template = pd.read_excel(FIXTURE_WIDTH_TEMPLATE, "Flip Sign Positions")
     self.custom_entity_data = self.ps_data_provider.get_custom_entities(1005)
     self.ignore_stacking = False
     self.facings_field = 'facings' if not self.ignore_stacking else 'facings_ign_stack'
     self.kpi_new_static_data = self.common.get_new_kpi_static_data()
     try:
         self.mpis = self.match_product_in_scene.merge(self.products, on='product_fk', suffixes=['', '_p']) \
                     .merge(self.scene_info, on='scene_fk', suffixes=['', '_s']) \
                       .merge(self.template_info, on='template_fk', suffixes=['', '_t'])
     except KeyError:
         Log.warning('MPIS cannot be generated!')
         return
     self.adp = AltriaDataProvider(self.data_provider)
     self.active_kpis = self._get_active_kpis()
     self.external_targets = self.ps_data_provider.get_kpi_external_targets()
     self.survey_dot_com_collected_this_session = self._get_survey_dot_com_collected_value()
     self._add_smart_attributes_to_mpis()
     self.scene_graphs = {}
     self.excessive_flipsigns = False
     self.incorrect_tags_in_pos_areas = []
    def __init__(self, project, path):
        self.rds_conn = ProjectConnector(project, DbUsers.CalcAdmin)
        self.cur = self.rds_conn.db.cursor()

        self.kpi2_table = self.read_table(self.KPI)
        self.kpi2_set = set(self.kpi2_table['type'])
        self.kpi_pk_set = set(self.kpi2_table['pk'])

        self.entity_table = self.read_table(self.ENTITY_TABLE)
        self.family_table = self.read_table(self.FAMILY_TABLE)
        self.calc_stage_table = self.read_table(self.CALC_STAGE_TABLE)

        self.entity_set = set(self.entity_table['name'])
        self.family_set = {x.upper() for x in self.family_table['name']}
        self.calc_stage_set_pk = set(self.calc_stage_table['pk'])

        self.family_dict = self.family_table.set_index('name')['pk'].to_dict()
        self.entity_dict = self.entity_table.set_index('name')['pk'].to_dict()

        self.template = pd.read_excel(path, sheet_name='KPIs')
Esempio n. 12
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common = Common(self.data_provider)
     self.templates_path = os.path.join(
         os.path.dirname(os.path.realpath(__file__)), '..', 'Data')
     self.excel_file_path = os.path.join(self.templates_path,
                                         'Template.xlsx')
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = ProjectConnector(self.project_name,
                                      DbUsers.CalculationEng)
     self.kpi_static_data = self.common.get_kpi_static_data()
     self.kpi_results_queries = []
Esempio n. 13
0
 def __init__(self, data_provider, output):
     self.output = output
     self.data_provider = data_provider
     self.common = Common(self.data_provider)
     self.project_name = self.data_provider.project_name
     self.session_uid = self.data_provider.session_uid
     self.products = self.data_provider[Data.PRODUCTS]
     self.all_products = self.data_provider[Data.ALL_PRODUCTS]
     self.match_product_in_scene = self.data_provider[Data.MATCHES]
     self.products = self.data_provider[Data.PRODUCTS]
     self.templates = self.data_provider.all_templates
     self.visit_date = self.data_provider[Data.VISIT_DATE]
     self.session_info = self.data_provider[Data.SESSION_INFO]
     self.scene_info = self.data_provider[Data.SCENES_INFO]
     self.store_id = self.data_provider[Data.STORE_FK]
     self.scif = self.data_provider[Data.SCENE_ITEM_FACTS]
     self.rds_conn = ProjectConnector(self.project_name,
                                      DbUsers.CalculationEng)
     self.kpi_static_data = self.common.get_kpi_static_data()
     self.kpi_sub_brand_data = pd.read_sql_query(self.get_sub_brand_data(),
                                                 self.rds_conn.db)
     self.kpi_results_queries = []
     self.Presence_template = parse_template(TEMPLATE_PATH, "Presence")
     self.BaseMeasure_template = parse_template(TEMPLATE_PATH,
                                                "Base Measurement")
     self.Anchor_template = parse_template(TEMPLATE_PATH, "Anchor")
     self.Blocking_template = parse_template(TEMPLATE_PATH, "Blocking")
     self.Adjaceny_template = parse_template(TEMPLATE_PATH, "Adjacency")
     self.Eye_Level_template = parse_template(TEMPLATE_PATH, "Eye Level")
     self.eye_level_definition = parse_template(TEMPLATE_PATH, "Shelves")
     self.ignore_stacking = False
     self.facings_field = 'facings' if not self.ignore_stacking else 'facings_ign_stack'
     self.availability = Availability(self.data_provider)
     self.blocking_calc = Block(self.data_provider)
     self.mpis = self.match_product_in_scene.merge(self.products, on='product_fk', suffixes=['', '_p']) \
         .merge(self.scene_info, on='scene_fk', suffixes=['', '_s']) \
         .merge(self.templates, on='template_fk', suffixes=['', '_t'])
    def check_entity_values(self, entity_name_key, entity_value_key):
        result = True
        dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)

        if entity_name_key == "others":
            entity = {'others': ('static_new.product', 'ean_code')}
        else:
            entity = {
                'product': ('static_new.product', 'ean_code'),
                'brand': ('static_new.brand', 'name'),
                'category': ('static_new.category', 'name'),
                'sub_category': ('static_new.sub_category', 'name')
            }

        for row_num, row_data in self.template.iterrows():
            try:
                if entity_name_key != "others":
                    xl_entity_type = row_data[entity_name_key]
                else:
                    xl_entity_type = "others"

                if pd.isnull(row_data[entity_value_key]):
                    continue

                xl_entity_values = row_data[entity_value_key].strip()

                if xl_entity_type not in entity.keys():
                    Log.error(
                        "{} not an valid entity, allowed entities={}".format(
                            xl_entity_type, entity.keys()))
                    result = False
                    continue

                entity_table_name, entity_column_name = entity[xl_entity_type]
                values = tuple([
                    "{}".format(v.strip()) for v in xl_entity_values.split(",")
                ])
                values_count = Counter(values)
                for value in values_count.items():
                    if value[1] > 1:
                        Log.error(
                            "row_number:{} Duplicate(s) values={} found in {}".
                            format(row_num + 2, entity_name_key, value))
                        result = False

                query = "SELECT {db_column_name} FROM {table_name} WHERE {db_column_name} IN {values}"
                query = query.format(db_column_name=entity_column_name,
                                     table_name=entity_table_name,
                                     values=values)
                query = query.strip().replace(",)", ")")
                df_result = pd.read_sql(query, dbcon.db)
                if df_result.empty:
                    message = "row_num:{row_num} {xl_column_name}={values} ".format(
                        row_num=row_num + 3,
                        xl_column_name=entity_column_name,
                        values=values)
                    message += " not in table {table_name}".format(
                        table_name=entity_table_name)
                    Log.error(message)
                    result = False
                else:
                    entity_values_rt = set(
                        df_result[entity_column_name].unique())
                    entity_values_ck = set(values)
                    missing_values = list(entity_values_ck - entity_values_rt)
                    if len(missing_values) != 0:
                        message = "row_number:{} {} =>{} missing values={}:".format(
                            row_num + 3, entity_name_key, entity_column_name,
                            missing_values)
                        Log.error(message)
                        result = False
            except Exception as ex:
                result = False
                Log.error("{} {}".format(ex.message, ex.args))
        return result
    def check_entity_values(self, entity_name_key, entity_value_key):
        Log.info("Entity Values Check - Started")
        result = True
        dbcon = ProjectConnector(self.project_name, DbUsers.CalculationEng)

        if entity_name_key == "others":
            entity = {'others': ('static_new.product', 'ean_code')}
        else:
            entity = {
                'product': ('static_new.product', 'ean_code'),
                'brand': ('static_new.brand', 'name'),
                'category': ('static_new.category', 'name'),
                'sub_category': ('static_new.sub_category', 'name')
            }
        lst_exceptions = []
        for row_num, row_data in self.hierarchy_template.iterrows():
            try:
                if entity_name_key != "others":
                    xl_entity_type = row_data[entity_name_key]
                else:
                    xl_entity_type = "others"

                if pd.isnull(row_data[entity_value_key]):
                    continue

                xl_entity_values = row_data[entity_value_key].strip()

                if xl_entity_type not in entity.keys():
                    dict_exception = dict()
                    dict_exception[
                        'exception'] = 'invalid_entity ' + xl_entity_type
                    dict_exception[
                        'message'] = 'valid entities ' + entity.keys()
                    lst_exceptions.append(dict_exception)
                    if is_debug:
                        print(dict_exception)
                    result = False
                    continue

                entity_table_name, entity_column_name = entity[xl_entity_type]
                values = tuple([
                    "{}".format(v.strip()) for v in xl_entity_values.split(",")
                ])
                values_count = Counter(values)
                for value in values_count.items():
                    if value[1] > 1:
                        dict_exception = dict()
                        dict_exception['exception'] = "Duplicate values"
                        dict_exception[
                            'message'] = "row_number:{} Duplicate(s) values={} found in {}".format(
                                row_num + 2, entity_name_key, value)
                        lst_exceptions.append(dict_exception)
                        if is_debug:
                            print(dict_exception)
                        result = False

                query = "SELECT {db_column_name} FROM {table_name} WHERE {db_column_name} IN {values}"
                query = query.format(db_column_name=entity_column_name,
                                     table_name=entity_table_name,
                                     values=values)
                query = query.strip().replace(",)", ")")
                df_result = pd.read_sql(query, dbcon.db)
                if df_result.empty:
                    dict_exception = dict()
                    dict_exception['exception'] = "missing column name"
                    message = "row_num:{row_num} {xl_column_name}={values}".format(
                        row_num=row_num + 3,
                        xl_column_name=entity_column_name,
                        values=values)
                    message += " not in table {table_name}".format(
                        table_name=entity_table_name)
                    dict_exception['message'] = message
                    if is_debug:
                        Log.error(dict_exception)

                    result = False
                else:
                    entity_values_rt = set(
                        df_result[entity_column_name].unique())
                    entity_values_ck = set(values)
                    missing_values = list(entity_values_ck - entity_values_rt)
                    if len(missing_values) != 0:
                        dict_exception = dict()
                        dict_exception['exception'] = "missing values"
                        message = "row_number:{} {} =>{} missing values={}:".format(
                            row_num + 3, entity_name_key, entity_column_name,
                            missing_values)
                        dict_exception['message'] = message
                        if is_debug:
                            Log.error(dict_exception)
                        result = False
            except Exception as ex:
                result = False
                Log.error("{} {}".format(ex.message, ex.args))
        self.create_excel_log(lst_exceptions, "entity_values_check")
        Log.info("Entity Values Check - Completed")
        return result