Пример #1
0
    def add_new_kpi_to_static_tables(self, set_fk, new_kpi_list):
        """
        :param set_fk: The relevant KPI set FK.
        :param new_kpi_list: a list of all new KPI's parameters.
        This function adds new KPIs to the DB ('Static' table) - both to level2 (KPI) and level3 (Atomic KPI).
        """
        session = OrmSession(self.project_name, writable=True)
        with session.begin(subtransactions=True):
            for kpi in new_kpi_list:
                level2_query = """
                               INSERT INTO static.kpi (kpi_set_fk, display_text)
                               VALUES ('{0}', '{1}');""".format(
                    set_fk, kpi.get(KPI_NAME))
                result = session.execute(level2_query)
                kpi_fk = result.lastrowid
                level3_query = """
                               INSERT INTO static.atomic_kpi (kpi_fk, name, description, display_text,
                                                              presentation_order, display)
                               VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}');""".format(
                    kpi_fk, kpi.get(KPI_NAME), kpi.get(KPI_NAME),
                    kpi.get(KPI_NAME), 1, 'Y')

                session.execute(level3_query)
        session.close()
        return
Пример #2
0
 def add_kpi_sets_to_static(self, set_names):
     """
     This function is to be ran at a beginning of a projects - and adds the constant KPI sets data to the DB.
     """
     session = OrmSession(self.project_name, writable=True)
     with session.begin(subtransactions=True):
         for set_name in set_names:
             level1_query = """
                            INSERT INTO static.kpi_set (name, missing_kpi_score, enable, normalize_weight,
                                                        expose_to_api, is_in_weekly_report)
                            VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}');""".format(set_name, 'Bad', 'Y',
                                                                                         'N', 'N', 'N')
             session.execute(level1_query)
     session.close()
     return
Пример #3
0
def get_stitching_data(project, scene_ids, session=None):
    if not session:
        session = OrmSession(project)
    query = SceneDBHelper().get_stitching_data_query(scene_ids)
    res = session.execute(query)
    stitching_data = pd.DataFrame(list(res), columns=res.keys())
    return stitching_data
Пример #4
0
def get_masks_archive(project, athena_query_probes, session=None):
    if not session:
        session = OrmSession(project)
    Log.info("Accessing cloud archive to extract masks for {} probes".format(
        len(athena_query_probes)))
    probes_list_str = ','.join([str(val) for val in athena_query_probes])
    processing_info_query = """
                            select 	
                                distinct	
                                DATE(completion_time) as date,
                                LAST_DAY(completion_time) = DATE(completion_time) as is_last_day_of_month
                            from 
                                probedata.user_processing_info
                            where probe_fk in ({})
                        """.format(probes_list_str)
    res = session.execute(processing_info_query)
    partitions_df = pd.DataFrame(list(res), columns=res.keys())
    partitions_df["date"] = pd.to_datetime(partitions_df["date"])

    # handle the case of end of month probe upload
    sub_df = partitions_df[partitions_df["is_last_day_of_month"] == 1].copy()
    sub_df["date"] = sub_df["date"].apply(
        lambda d: d + datetime.timedelta(days=7))
    partitions_df = partitions_df.append(sub_df)

    partitions_df["month"] = partitions_df["date"].dt.month
    partitions_df["year"] = partitions_df["date"].dt.year
    partition_str = "(" + " or ".join(partitions_df[[
        "month", "year"
    ]].drop_duplicates().apply(
        lambda row: "(month={} and year={})".format(row["month"], row["year"]),
        axis=1)) + ")"

    athena_query = """SELECT 
                            _id, creation_time, pipeline_id, probe_id, project_name, response, version
                        FROM 
                            mongo_archives_prod.masking_engine_response_view 
                        where 1=1
                        and project_name = '{}'
                        and probe_id in ({})
                        and {};""".format(project, probes_list_str,
                                          partition_str)
    with Log.Timer('BigDataFactory masking extraction execution time',
                   Severities.INFO,
                   extra={'project_name': project}):
        athena_connector = BigDataFactory.get_big_data_connector(
            region='us-east-1', schema_name='mongo_archives_prod')
        athena_connector.connect()
        cursor = athena_connector.execute_query(athena_query)

    columns = [col[0] for col in cursor.description]
    probe_masks_athena = pd.DataFrame.from_records(
        cursor, columns=columns).drop_duplicates()
    probe_masks_athena["response"] = probe_masks_athena["response"].apply(
        jsonify_response)
    return probe_masks_athena
Пример #5
0
def get_masks_from_rds(project, probe_list, session=None):
    if not session:
        session = OrmSession(project)

    probe_list_str = "and mpip.probe_fk in (" + ",".join(
        [str(val) for val in probe_list]) + ")"
    query = """
    select
        1 as _id,
        mpip.creation_time as creation_time,
        'just' as pipeline_id,
        mpip.probe_fk as probe_id,
        '{project}' as project_name,
        JSON_OBJECT("probe_tag_id",
        JSON_ARRAYAGG(mpipm.match_product_in_probe_fk),
        "x1",
        JSON_ARRAYAGG(mpipm.mask_left),
        "x2",
        JSON_ARRAYAGG(mpipm.mask_right),
        "y1",
        JSON_ARRAYAGG(mpipm.mask_top),
        "y2",
        JSON_ARRAYAGG(mpipm.mask_bottom),
        "x",
        JSON_ARRAYAGG(ROUND(mpip.rect_x + mpip.rect_width*0.5, 0)),
        "y",
        JSON_ARRAYAGG(ROUND(mpip.rect_y + mpip.rect_height*0.5, 0))) as response,
        'MaskingEngine_SQL' as version
    from
        probedata.match_product_in_probe_masks mpipm
    inner join probedata.match_product_in_probe mpip on
        mpip.pk = mpipm.match_product_in_probe_fk
    where
        1=1
        {probe_list_str}
    GROUP by
        mpip.probe_fk
    """.format(**{
        "project": project,
        "probe_list_str": probe_list_str
    })

    res = session.execute(query)
    masks_from_rds = pd.DataFrame(list(res), columns=res.keys())
    masks_from_rds["response"] = masks_from_rds["response"].apply(
        jsonify_response)

    return masks_from_rds
Пример #6
0
def get_masks_from_rds_flat(project, probe_list, session=None):
    if not session:
        session = OrmSession(project)

    probe_list_str = "AND mpip.probe_fk IN (" + ",".join(
        [str(val) for val in probe_list]) + ")"
    query = """
    SELECT
        mpip.creation_time AS creation_time,
        mpip.probe_fk AS probe_id,
        mpipm.match_product_in_probe_fk AS probe_tag_id,
        mpipm.mask_left AS x1,
        mpipm.mask_right AS x2,
        mpipm.mask_top AS y1,
        mpipm.mask_bottom AS y2,
        ROUND(mpip.rect_x + mpip.rect_width*0.5, 0) AS x,
        ROUND(mpip.rect_y + mpip.rect_height*0.5, 0) AS y
    FROM
        probedata.match_product_in_probe_masks AS mpipm
    INNER JOIN probedata.match_product_in_probe AS mpip 
        ON mpip.pk = mpipm.match_product_in_probe_fk
    WHERE
        1=1
        {probe_list_str}
    """.format(**{
        "project": project,
        "probe_list_str": probe_list_str
    })

    res = session.execute(query)
    masks_from_rds = pd.DataFrame(list(res), columns=res.keys())
    masks_from_rds[['x1', 'x2', 'y1',
                    'y2']] = masks_from_rds[['x1', 'x2', 'y1',
                                             'y2']].astype(float)
    masks_from_rds[['x', 'y']] = masks_from_rds[['x', 'y']].astype(int)

    return masks_from_rds
Пример #7
0
    def insert_new_kpis_old(self, project, kpi_list=None):
        """
        This function inserts KPI metadata to static tables
        """
        session = OrmSession(project, writable=True)
        try:
            voting_process_pk_dic = {}
            with session.begin(subtransactions=True):
                for kpi in kpi_list.values()[0]:
                    if kpi.get('To include in first calculation?') == 4:
                        Log.info('Trying to write KPI {}'.format(
                            kpi.get('KPI name Eng')))
                        #         # kpi_level_1_hierarchy = pd.DataFrame(data=[('Canteen', None, None, 'WEIGHTED_AVERAGE',
                        #         #                                             1, '2016-11-28', None, None)],
                        #         #                                      columns=['name', 'short_name', 'eng_name', 'operator',
                        #         #                                               'version', 'valid_from', 'valid_until', 'delete_date'])
                        #         # self.output.add_kpi_hierarchy(Keys.KPI_LEVEL_1, kpi_level_1_hierarchy)
                        #         if kpi.get('level') == 2:
                        #             kpi_level_2_hierarchy = pd.DataFrame(data=[
                        #                 (1, kpi.get('KPI Name ENG'), None, None, None, None, kpi.get('weight'), 1, '2016-12-25', None, None)],
                        #                                          columns=['kpi_level_1_fk', 'name', 'short_name', 'eng_name', 'operator',
                        #                                                   'score_func', 'original_weight', 'version', 'valid_from', 'valid_until',
                        #                                                   'delete_date'])
                        #             self.output.add_kpi_hierarchy(Keys.KPI_LEVEL_2, kpi_level_2_hierarchy)
                        #         elif kpi.get('level') == 3:
                        #             kpi_level_3_hierarchy = pd.DataFrame(data=[(1, kpi.get('KPI Name ENG'), None, None, None,
                        #                                                        None, kpi.get('weight'), 1, '2016-12-25', None, None)],
                        #                                                  columns=['kpi_level_2_fk', 'name', 'short_name', 'eng_name', 'operator',
                        #                                                           'score_func', 'original_weight', 'version', 'valid_from',
                        #                                                           'valid_until', 'delete_date'])
                        #             self.output.add_kpi_hierarchy(Keys.KPI_LEVEL_3, kpi_level_3_hierarchy)
                        #     else:
                        #         Log.info('No KPIs to insert')
                        #     self.data_provider.export_kpis_hierarchy(self.output)

                        # insert_trans = """
                        #                 INSERT INTO static.kpi_level_1 (name,
                        #                operator, version, valid_from)
                        #                VALUES ('{0}', '{1}', '{2}', '{3}');""".format('test',  'WEIGHTED_AVERAGE', 1,
                        #                                                         '2016-11-28')
                        # insert_trans_level1 = """
                        #             INSERT INTO static.kpi_set (name,
                        #            missing_kpi_score, enable, normalize_weight, expose_to_api, is_in_weekly_report)
                        #            VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}');""".format('Hypermarket', 'Bad',
                        #                                                          'Y', 'N', 'N', 'N')
                        # Log.get_logger().debug(insert_trans_level1)
                        # result = session.execute(insert_trans_level1)
                        insert_trans_level2 = """
                                        INSERT INTO static.kpi (kpi_set_fk,
                                       logical_operator, weight, display_text)
                                       VALUES ('{0}', '{1}', '{2}', '{3}');""".format(
                            34, kpi.get('Logical Operator'),
                            kpi.get('KPI Weight'), kpi.get('KPI name Eng'))
                        # # #
                        # # # #     # insert_trans = """
                        # # # #     #                 UPDATE static.kpi_level_1 SET short_name=null, eng_name=null, valid_until=null, delete_date=null
                        # # # #     #                 WHERE pk=1;"""
                        # Log.get_logger().debug(insert_trans_level2)
                        result = session.execute(insert_trans_level2)
                        kpi_fk = result.lastrowid
                        insert_trans_level3 = """
                                        INSERT INTO static.atomic_kpi (kpi_fk,
                                       name, description, display_text, presentation_order, display)
                                       VALUES ('{0}', '{1}', '{2}', '{3}', '{4}', '{5}');""".format(
                            kpi_fk, kpi.get('KPI name Eng'),
                            kpi.get('KPI name Eng'), kpi.get('KPI name Eng'),
                            1, 'Y')
                        Log.get_logger().debug(insert_trans_level3)
                        result = session.execute(insert_trans_level3)
                        # voting_process_pk = result.lastrowid
                        # voting_process_pk_dic[kpi] = voting_process_pk
                        # Log.info('KPI level 1 was inserted to the DB')
                        # Log.info('Inserted voting process {} in project {} SQL DB'.format(voting_process_pk, project))
                        # voting_session_fk = self.insert_production_session(voting_process_pk, kpi, session)
                        # self.insert_production_tag(voting_process_pk, voting_session_fk, kpi, session)

            session.close()
            # return voting_process_pk_dic
            return
        except Exception as e:
            Log.error(
                'Caught exception while inserting new voting process to SQL: {}'
                .format(str(e)))
            return -1