示例#1
0
    def update_targets(self):
        """
        This will return all the list of the targets in json format from the database .
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            LAST_MODIFIED_DATE = str(round(time.time() * 1000))

            try:
                query = TARGET_HGI_UPDATE.format(
                    NAME, TABLE_NAME, self._request_payload[PARAM_VALUE],
                    LAST_MODIFIED_DATE, FLAG, self.algorithm_name)

                self._csql_session.execute(query)
            except Exception as e:
                log_error("Exception due to : %s" + str(e))
                return asert_res(e)

            return JsonResponse(sucess_message, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError
 def get_hgi_values(self):
     """
     This will return the lat updated and target list and offset value
     :return: Json Response
     """
     try:
         assert self._db_connection, {
             STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
             MESSAGE_KEY: DB_ERROR
         }
         df_data = pd.DataFrame(
             self._csql_session.execute(
                 TARGET_HGI_LIST.format(NAME, TABLE_NAME,
                                        self.algorithm_name)))
         dict_data = {
             "target_hgi":
             ListofTargetHGI,
             "offset":
             None,
             "last_updated_target":
             self.get_updated_data_by_algorithm_name(self.algorithm_name)
         }
         if not df_data.empty:
             df = pd.DataFrame.from_dict(json.loads(df_data['value'][0]))
             for i, row in df.iterrows():
                 dict_data["offset"] = int(row['HgiDelta'])
         return JsonResponse(dict_data, safe=False)
     except AssertionError as e:
         log_error("Assertion error due to : %s" + str(e))
         return asert_res(e)
     except Exception as e:
         log_error("Exception due to : %s" + str(e))
         return json_InternalServerError
示例#3
0
    def __get_user_query(self):
        """
        This function will execute the query for getting the details for the requested user or all users
        :return: Json object
        """
        try:
            self._psql_session.execute(CHECK_AUTHENTICATION_QUERY.format(self.loggedin_userid_details[LOGIN_ID]))
            result_set = self._psql_session.fetchall()
            user = []
            if result_set:
                self._psql_session.execute(SINGLE_USER_GET_DETAILS.format(self.loggedin_userid_details[LOGIN_ID]))
                user_data = pd.DataFrame(self._psql_session.fetchall())
                expiry_peroid = user_data['Days Left for password to expire'].iloc[0]
                user_data.drop(columns=['Days Left for password to expire'], inplace=True)
                self._psql_session.execute(PASSWORD_RESET_EXPIRY_CHECK.format(PASSWORD_EXPIRY_PERIOD))
                password = pd.DataFrame(self._psql_session.fetchall())
                if not password.empty:
                    expiry = password['value'].iloc[0]
                else:
                    expiry = None

                user_data['Days Left for password to expire'] = expiry - expiry_peroid
                user = yaml.safe_load(user_data.to_json(orient=RECORDS))
                return JsonResponse(user, safe=False)
            return JsonResponse(user, safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return asert_res(e)

        except Exception as e:

            log_error("Exception due to : %s" + str(e))

            return json_InternalServerError
示例#4
0
    def forgot_password(self):
        """
        This function will reset the user password
        :return: Json payload
        """
        try:
            assert self._db_connection, {STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR, MESSAGE_KEY: DB_ERROR}

            return self.__forgot_user_password_query()

        except Exception as e:
            log_info()
            return asert_res(e)
    def clear_notifications(self):
        """
        This will get query from the Database for LBT algo
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            self._psql_session.execute(
                CHECK_AUTHENTICATION_QUERY.format(
                    self.loggedin_userid_details[LOGIN_ID]))
            user = pd.DataFrame(self._psql_session.fetchall())
            if not self._psql_session.rowcount:
                return JsonResponse(
                    {MESSAGE_KEY: "LOGIN ID NOT REGISTER WITH US"},
                    status=HTTP_400_BAD_REQUEST)
            self._psql_session.execute(
                GET_PERMISSION.format(user['user_type'].iloc[0]))
            permission = pd.DataFrame(self._psql_session.fetchall())
            if not permission.empty:
                permissions = list(permission["feature"])
            else:
                permissions = []
            if 'Notification Clear Functionality' in permissions:
                for each in self._request_payload:
                    try:
                        clear_notification = CLEAR_NOTIFICATIONS.format(
                            TAG_NAME, TIMESTAMP, USER_ID, each[TAG_NAME],
                            each[TIMESTAMP],
                            self.loggedin_userid_details[LOGIN_ID])
                        self._psql_session.execute(clear_notification)

                    except Exception as e:
                        log_error("Exception occurred due to" + str(e))
                        return json_InternalServerError

                return JsonResponse(
                    {"MESSAGE": "NOTIFICATIONS SUCCESSFULLY CLEARED!"},
                    safe=False)
            else:
                return JsonResponse({MESSAGE_KEY: "FORBIDDEN ERROR"},
                                    status=HTTP_403_FORBIDDEN)
        except AssertionError as e:
            log_error("Exception occurred due to" + str(e))
            return asert_res(e)

        except Exception as e:
            log_error("Exception occurred due to" + str(e))
            return json_InternalServerError
    def update_user_count(self):
        """
        This function will update the count of the user
        :return: Json payload
        """
        try:
            assert self._db_connection, {STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR, MESSAGE_KEY: DB_ERROR}

            return self.__update_user_count_query()

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError
示例#7
0
    def update_access(self):
        """
        This will update the Access details in the Users
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            try:
                """
                Getting the features and access list of users and updating
                
                """
                for each in self._request_payload:
                    if each not in [
                            PASSWORD_EXPIRY_PERIOD,
                            MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD,
                            DEFAULT_NOTIFICATION_VIEW
                    ]:
                        for each1 in self._request_payload[each]:
                            try:
                                access_update = ACCESS_UPDATE.format(
                                    SUPER_ADMIN, each1[SUPER_ADMIN], ADMIN,
                                    each1[ADMIN], NON_ADMIN, each1[NON_ADMIN],
                                    SECTION, each, FEATURE, each1[FEATURE])
                                self._psql_session.execute(access_update)
                            except Exception as e:
                                log_error("Exception due to : %s" + str(e))
                                return JsonResponse(
                                    {MESSAGE_KEY: "error due {}".format(e)})
                    if each in [
                            DEFAULT_NOTIFICATION_VIEW,
                            MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD,
                            PASSWORD_EXPIRY_PERIOD
                    ]:
                        try:
                            if DEFAULT_NOTIFICATION_VIEW == each:
                                access_update = NOTIFICATION_UPDATE.format(
                                    VALUE,
                                    int(self._request_payload[
                                        DEFAULT_NOTIFICATION_VIEW]), SETTING,
                                    DEFAULT_NOTIFICATION_VIEW)
                                self._psql_session.execute(access_update)
                        except Exception as e:
                            log_error("Exception due to : %s" + str(e))
                            return JsonResponse({
                                MESSAGE_KEY:
                                "error occur during updating the {}".format(
                                    DEFAULT_NOTIFICATION_VIEW)
                            })
                    if MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD == each:
                        try:
                            access_update = NOTIFICATION_UPDATE.format(
                                VALUE,
                                int(self._request_payload[
                                    MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD]
                                    ), SETTING,
                                MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD
                            )
                            self._psql_session.execute(access_update)
                        except Exception as e:
                            log_error("Exception due to : %s" + str(e))
                            return JsonResponse({
                                MESSAGE_KEY:
                                "error occur during updating the {}".format(
                                    MAXIMUM_VALUES_NOTIFICATION_DOWNLOAD_TIME_PERIOD
                                )
                            })
                    if PASSWORD_EXPIRY_PERIOD == each:
                        try:
                            access_update = NOTIFICATION_UPDATE.format(
                                VALUE,
                                int(self.
                                    _request_payload[PASSWORD_EXPIRY_PERIOD]),
                                SETTING, PASSWORD_EXPIRY_PERIOD)
                            self._psql_session.execute(access_update)
                        except Exception as e:
                            log_error("Exception due to : %s" + str(e))
                            return JsonResponse({
                                MESSAGE_KEY:
                                "error occur during updating the {}".format(
                                    PASSWORD_EXPIRY_PERIOD)
                            })

            except Exception as e:

                log_error("Exception due to : %s" + str(e))

                return asert_res(e)

            return JsonResponse(sucess_message, safe=False)

        except AssertionError as e:

            log_error("Assertion error due to : %s" + str(e))

            return asert_res(e)

        except Exception as e:

            log_error("Exception due to : %s" + str(e))

            return json_InternalServerError
示例#8
0
    def handle_login(self):
        """
        This will get query from the database for the username and validation
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            session_count = None
            active_sessions = 0
            standard_user_count = 0
            active_standard = 0
            if ENV:
                self._psql_session.execute(
                    USER_AUTHETICATION_QUERY.format(
                        self._request_payload[LOGIN_ID]))
            else:
                self._psql_session.execute(
                    USER_PROD_AUTHETICATION_QUERY.format(
                        self._request_payload[LOGIN_ID]))

            result_set = self._psql_session.fetchone()
            if result_set:
                obj = HashingSalting()
                self._psql_session.execute(GET_PARALLEL_SESSION_COUNT)
                s_count = pd.DataFrame(self._psql_session.fetchall())
                if not s_count.empty:
                    session_count = s_count['limit_value'].iloc[0]
                self._psql_session.execute(GET_ACTIVE_SESSIONS_COUNT)
                active_count = pd.DataFrame(self._psql_session.fetchall())
                if not active_count.empty:
                    active_sessions = active_count['value'].iloc[0]
                if active_sessions < session_count:
                    user_type = result_set['user_type']
                    if user_type == 'Non Admin':
                        self._psql_session.execute(GET_STANDARD_USER_COUNT)
                        all_operator = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not all_operator.empty:
                            standard_user_count = all_operator[
                                'limit_value'].iloc[0].item()
                        else:
                            standard_user_count = None
                        self._psql_session.execute(
                            GET_ACTIVE_STANDARD_USER_COUNT)
                        active_operator = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not active_operator.empty:
                            active_standard = active_operator['value'].iloc[0]
                        else:
                            active_standard = None
                        if active_standard < standard_user_count:
                            obj.active_parallel_standard_sessions_increase()
                        else:
                            return JsonResponse(
                                {
                                    MESSAGE_KEY:
                                    'Could not login as the maximum number of parallel user logins have '
                                    'been exceeded'
                                },
                                status=HTTP_403_FORBIDDEN)

                    if user_type == 'Super Admin':
                        obj.active_parallel_sessions_increase()
                    if user_type == 'Admin':
                        obj.active_parallel_admin_sessions_increase()

                    self._psql_session.execute(
                        PASSWORD_RESET_EXPIRY_CHECK.format(
                            PASSWORD_EXPIRY_PERIOD))
                    password = pd.DataFrame(self._psql_session.fetchall())
                    if not password.empty:
                        expiry = password['value'].iloc[0]
                    else:
                        expiry = None
                    self._psql_session.execute(
                        USER_PASSWORD_EXPIRY_CHECK.format(
                            self._request_payload[LOGIN_ID]))
                    expiry_value = pd.DataFrame(self._psql_session.fetchall())
                    if not expiry_value.empty:
                        user_pwd_expiry = expiry_value['value'].iloc[0]
                    else:
                        user_pwd_expiry = None
                    if user_pwd_expiry <= expiry:
                        if obj.check_password(
                                self._request_payload[USERPASSWORD_KEY],
                                result_set[SALT_KEY],
                                result_set[USERPASSWORD_KEY]):
                            if not result_set['status']:
                                return JsonResponse(
                                    {MESSAGE_KEY: STATUS_VALUE},
                                    status=HTTP_401_UNAUTHORIZED)
                            self._psql_session.execute(
                                PERMISSION_QUERY_1.format(
                                    result_set['user_type']))
                            permissions = pd.DataFrame(
                                self._psql_session.fetchall())
                            role = str(result_set['user_type'])
                            dict_data = {}
                            if not permissions.empty:
                                data_now = permissions.groupby('section')
                                for name, group in data_now:
                                    dict_data[name] = list(group['feature'])
                            jwt_token = TokenManagement().add_jwt({
                                LOGGEDINUSERID_KEY:
                                result_set[USERID_KEY],
                                LOGIN_ID:
                                result_set[LOGIN_ID],
                                USERNAME_KEY:
                                result_set['name'],
                                'role':
                                role,
                                'permissions':
                                dict_data,
                                'exp':
                                datetime.datetime.utcnow() +
                                datetime.timedelta(seconds=86400)
                            })

                            return JsonResponse({TOKEN_KEY: jwt_token})

                        return JsonResponse({MESSAGE_KEY: PASSWORD_WRONG},
                                            status=HTTP_401_UNAUTHORIZED)
                    return JsonResponse(
                        {
                            MESSAGE_KEY:
                            "PASSWORD EXPIRED! PLEASE CONTACT YOUR SUPER ADMIN"
                        },
                        status=HTTP_403_FORBIDDEN)
                return JsonResponse(
                    {
                        MESSAGE_KEY:
                        'Could not login as the maximum number of parallel user logins have been exceeded'
                    },
                    status=HTTP_403_FORBIDDEN)

            return JsonResponse({MESSAGE_KEY: USERNAME_NOT_REGISTERED},
                                status=HTTP_401_UNAUTHORIZED)

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError
示例#9
0
    def get_access_list(self):
        """
        This will get query from the Database for LBT algo
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            benchmark = []
            furncae_A = []
            furncae_B = []
            hgi = []
            left_pane = []
            notifications = []
            outage = []
            pdi = []
            settings = []
            system_overview = []
            furnace_a_spall = []
            furnace_b_spall = []
            default_n_view = None
            maximum_n_d_period = None
            password_expiry = None
            self._psql_session.execute(GET_ACCESS_LIST)
            access_list = pd.DataFrame(self._psql_session.fetchall())
            if not access_list.empty:
                df = access_list.groupby('section')
                keys = []
                for name, group in df:
                    keys.append(name)
                if 'Settings' in keys:
                    settings = access_list[access_list['section'].str.contains(
                        'Settings')]
                    settings.drop(columns=['section'], inplace=True)
                    settings = yaml.safe_load(settings.to_json(orient=RECORDS))
                if 'HGI' in keys:
                    hgi = access_list[access_list['section'].str.contains(
                        'HGI')]
                    hgi.drop(columns=['section'], inplace=True)
                    hgi = yaml.safe_load(hgi.to_json(orient=RECORDS))

                if 'Furnace: H3901B' in keys:
                    furncae_B = access_list[
                        access_list['section'].str.contains('Furnace: H3901B')]
                    furncae_B.drop(columns=['section'], inplace=True)
                    furncae_B = yaml.safe_load(
                        furncae_B.to_json(orient=RECORDS))
                if 'Furnace: H3901A' in keys:
                    furncae_A = access_list[
                        access_list['section'].str.contains('Furnace: H3901A')]
                    furncae_A.drop(columns=['section'], inplace=True)
                    furncae_A = yaml.safe_load(
                        furncae_A.to_json(orient=RECORDS))
                if 'Benchmarking' in keys:
                    benchmark = access_list[
                        access_list['section'].str.contains('Benchmarking')]
                    benchmark.drop(columns=['section'], inplace=True)
                    benchmark = yaml.safe_load(
                        benchmark.to_json(orient=RECORDS))

                if 'Left Pane' in keys:
                    left_pane = access_list[
                        access_list['section'].str.contains('Left Pane')]
                    left_pane.drop(columns=['section'], inplace=True)
                    left_pane = yaml.safe_load(
                        left_pane.to_json(orient=RECORDS))

                if 'Notifications' in keys:
                    notifications = access_list[
                        access_list['section'].str.contains('Notifications')]
                    notifications.drop(columns=['section'], inplace=True)
                    notifications = yaml.safe_load(
                        notifications.to_json(orient=RECORDS))

                if 'Outage' in keys:
                    outage = access_list[access_list['section'].str.contains(
                        'Outage')]
                    outage.drop(columns=['section'], inplace=True)
                    outage = yaml.safe_load(outage.to_json(orient=RECORDS))

                if 'PDI' in keys:
                    pdi = access_list[access_list['section'].str.contains(
                        'PDI')]
                    pdi.drop(columns=['section'], inplace=True)
                    pdi = yaml.safe_load(pdi.to_json(orient=RECORDS))

                if 'System Overview' in keys:
                    system_overview = access_list[
                        access_list['section'].str.contains('System Overview')]
                    system_overview.drop(columns=['section'], inplace=True)
                    system_overview = yaml.safe_load(
                        system_overview.to_json(orient=RECORDS))
                if 'Furnace: H3901A (Spall)' in keys:
                    furnace_a_spall = access_list.loc[
                        access_list['section'] == 'Furnace: H3901A (Spall)']
                    furnace_a_spall.drop(columns=['section'], inplace=True)
                    furnace_a_spall = yaml.safe_load(
                        furnace_a_spall.to_json(orient=RECORDS))
                if 'Furnace: H3901B (Spall)' in keys:
                    furnace_b_spall = access_list.loc[
                        access_list['section'] == 'Furnace: H3901B (Spall)']
                    furnace_b_spall.drop(columns=['section'], inplace=True)
                    furnace_b_spall = yaml.safe_load(
                        furnace_b_spall.to_json(orient=RECORDS))
            self._psql_session.execute(PERIOD_OF_TIME)
            time_period = pd.DataFrame(self._psql_session.fetchall())
            if not time_period.empty:
                df = time_period.groupby('setting')
                keys = []
                for name, group in df:
                    keys.append(name)
                if 'Default Notification View Timer Period' in keys:
                    default_n_view = int(
                        time_period[time_period['setting'].str.contains(
                            'Default Notification View '
                            'Timer Period')]['value'].iloc[0])
                if 'Maximum Values Notification Download Time Period' in keys:
                    maximum_n_d_period = int(
                        time_period[time_period['setting'].str.contains(
                            'Maximum Values Notification '
                            'Download Time Period')]['value'].iloc[0])
                if 'Password Expiry Period' in keys:
                    password_expiry = int(
                        time_period[time_period['setting'].str.contains(
                            'Password Expiry Period')]['value'].iloc[0])

            old_dict = {
                "Benchmarking": benchmark,
                "Furnace: H3901A": furncae_A,
                "Furnace: H3901B": furncae_B,
                "Furnace: H3901A (Spall)": furnace_a_spall,
                "Furnace: H3901B (Spall)": furnace_b_spall,
                "HGI": hgi,
                "Left Pane": left_pane,
                "Notifications": notifications,
                "Outage": outage,
                "PDI": pdi,
                "Settings": settings,
                "System Overview": system_overview,
                "Default Notification View Timer Period": default_n_view,
                "Maximum Values Notification Download Time Period":
                maximum_n_d_period,
                "Password Expiry Period": password_expiry
            }
            return JsonResponse(old_dict, safe=False)
        except AssertionError as e:
            log_error("Exception occurred due to" + str(e))
            return asert_res(e)

        except Exception as e:
            log_error("Exception occurred due to" + str(e))
            return json_InternalServerError
示例#10
0
    def get_outage(self):
        """
        This will return the graph data for the outage module
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            if self.equipment == COKE_DRUM_VALUE and self.module == OUTAGE_VALUE:
                """
                This will return the graph data for the selected outage module
                """
                query_params = {
                    TAG_NAME_REQUEST: self.query_params.GET[TAG_NAME_REQUEST],
                    START_DATE_REQUEST:
                    self.query_params.GET[START_DATE_REQUEST],
                    END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]
                }
                MODULE_LEVEL_MULTILINE_TAG = tuple(
                    LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH)
                if MULTILINE_REQUEST in self.query_params.GET:
                    """
                    This will return the graph data for the actual and predicted tags for the selected outage module 
                    """
                    query_params[MULTILINE_REQUEST] = self.query_params.GET[
                        MULTILINE_REQUEST]

                if query_params:
                    if START_DATE_REQUEST not in query_params or not query_params[START_DATE_REQUEST] and \
                            MULTILINE_REQUEST not in query_params:
                        graph_data = django_search_query_all(
                            DETAILED_OUTAGE_GRAPH_NULL_START_DATE.format(
                                self.module, query_params[TAG_NAME_REQUEST],
                                query_params[END_DATE_REQUEST]))
                    elif query_params[
                            START_DATE_REQUEST] and MULTILINE_REQUEST not in query_params:
                        graph_data = django_search_query_all(
                            DETAILED_OUTAGE_GRAPH.format(
                                self.module, query_params[TAG_NAME_REQUEST],
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                    elif query_params[START_DATE_REQUEST] and query_params[
                            MULTILINE_REQUEST]:
                        if query_params[
                                TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH:
                            graph_data = django_search_query_all(
                                DETAILED_OUTAGE_MODULE_MULTILINE_GRAPH.format(
                                    self.module, MODULE_LEVEL_MULTILINE_TAG,
                                    query_params[START_DATE_REQUEST],
                                    query_params[END_DATE_REQUEST]))

                        else:
                            graph_data = django_search_query_all(
                                DETAILED_OUTAGE_GRAPH.format(
                                    self.module,
                                    query_params[TAG_NAME_REQUEST],
                                    query_params[START_DATE_REQUEST],
                                    query_params[END_DATE_REQUEST]))

                    df_data = pd.DataFrame(graph_data)
                    min_max = django_search_query_all(
                        MIN_MAX_DATA.format(self.module,
                                            query_params[TAG_NAME_REQUEST]))
                    df_min_max_data = pd.DataFrame(min_max)
                    graph = []

                    if not df_data.empty:
                        df_data = df_data.where(
                            pd.notnull(df_data) == True, None)
                        df_data.sort_values(TIMESTAMP_KEY,
                                            ascending=True,
                                            inplace=True)
                        df_unit = df_data[UNIT].iloc[0]
                        df_description = df_data[DESCRIPTION].iloc[0]
                        df_timestamp = list(
                            dict.fromkeys(list(df_data[TIMESTAMP_KEY])))

                        if query_params[
                                TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH:
                            df_result = df_data.groupby(TAG_NAME_REQUEST)
                            actual_north_data = []
                            predicted_north_data = []
                            actual_south_data = []
                            predicted_south_data = []
                            if len(df_result) == 2:
                                df_description = \
                                    df_data[df_data[TAG_NAME_REQUEST] == query_params[TAG_NAME_REQUEST]][
                                        DESCRIPTION].iloc[0]
                                df_north_actual = df_result.get_group(
                                    OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
                                actual_north_data = list(
                                    df_north_actual['north_drum_tag_value'])
                                df_north_predicted = df_result.get_group(
                                    OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
                                predicted_north_data = list(
                                    df_north_predicted['north_drum_tag_value'])
                                df_south_actual = df_result.get_group(
                                    OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
                                actual_south_data = list(
                                    df_south_actual['south_drum_tag_value'])
                                df_south_predicted = df_result.get_group(
                                    OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
                                predicted_south_data = list(
                                    df_south_predicted['south_drum_tag_value'])
                            elif len(df_result) == 1:

                                if df_result[
                                        TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG:
                                    df_description = \
                                        df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG][
                                            DESCRIPTION].iloc[0]
                                    df_north_actual = df_result.get_group(
                                        OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
                                    actual_north_data = list(
                                        df_north_actual['north_drum_tag_value']
                                    )
                                    df_south_actual = df_result.get_group(
                                        OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
                                    actual_south_data = list(
                                        df_south_actual['south_drum_tag_value']
                                    )

                                elif df_result[
                                        TAG_NAME_REQUEST] != OUTAGE_MODULE_LEVEL_ACTUAL_TAG:
                                    df_description = \
                                        df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_PREDICTED_TAG][
                                            DESCRIPTION].iloc[0]
                                    df_north_predicted = df_result.get_group(
                                        OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
                                    predicted_north_data = list(
                                        df_north_predicted[
                                            'north_drum_tag_value'])
                                    df_south_predicted = df_result.get_group(
                                        OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
                                    predicted_south_data = list(
                                        df_south_predicted[
                                            'south_drum_tag_value'])

                            temp = {
                                "north_actual": actual_north_data,
                                "north_predicted": predicted_north_data,
                                "south_actual": actual_south_data,
                                "south_predicted": predicted_south_data,
                                "x_axis": df_timestamp,
                                "unit": df_unit,
                                "description": df_description
                            }

                        else:
                            temp = {
                                "y_axis": list(df_data[TAG_VALUE]),
                                "x_axis": df_timestamp,
                                "unit": df_unit,
                                "description": df_description
                            }
                        if not df_min_max_data.empty:
                            temp["min_data"] = df_min_max_data[MIN_VALUE].iloc[
                                0]
                            temp["max_data"] = df_min_max_data[MAX_VALUE].iloc[
                                0]
                        else:
                            temp["min_data"] = None
                            temp["max_data"] = None
                        graph.append(temp)

                    return graph

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError
    def update_benchmarking(self):
        """
        This will update the benchmarking configuration details in the database
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            try:
                a = []

                if TARGETS in self._request_payload:
                    for each in self._request_payload[TARGETS]:
                        target_min = float(each[MIN])
                        target_max = float(each[MAX])
                        target_description = each[DESCRIPTION]
                        target_tag = each[TAG_NAME_REQUEST]
                        target_value = float(each[TARGET_VALUE])
                        target_active = each[IS_ACTIVE]
                        if TARGETS in self._request_payload:
                            self._psql_session.execute(
                                LBT_TAG_DATA_VALIDATION.format(target_tag))
                            df = pd.DataFrame(self._psql_session.fetchall())
                            df['condition'] = df['condition'].str[1:]
                            df['condition'] = df['condition'].astype(float)
                            df_target = df.loc[(
                                df[TAG_NAME_REQUEST] == target_tag
                            )]['condition'].iloc[0]
                            if df_target <= target_value <= target_max and target_value >= target_min >= df_target and target_min <= target_value and target_max >= target_min and target_max >= target_value:
                                update_target_tags_query = UPDATING_TARGET_TAGS.format(
                                    target_min, target_max, target_value,
                                    target_active,
                                    self._request_payload[PERFORMANCE_TAG],
                                    target_description, target_tag)

                                self._psql_session.execute(
                                    update_target_tags_query)
                            else:
                                a.append(target_description)
                if MATCH_TAGS in self._request_payload:
                    for each in self._request_payload[MATCH_TAGS]:
                        match_min = float(each[MIN])
                        match_max = float(each[MAX])
                        match_description = each[DESCRIPTION]
                        match_tag = each[TAG_NAME_REQUEST]
                        match_active = each[IS_ACTIVE]
                        if MATCH_TAGS in self._request_payload:
                            update_match_tags_query = UPDATING_MATCH_TAGS.format(
                                match_min, match_max, match_active,
                                self._request_payload[PERFORMANCE_TAG],
                                match_description, match_tag)
                            self._psql_session.execute(update_match_tags_query)
                if NOISE_TAGS in self._request_payload:
                    for each in self._request_payload[NOISE_TAGS]:
                        noise_min = float(each[MIN])
                        noise_max = float(each[MAX])
                        noise_description = each[DESCRIPTION]
                        noise_tag = each[TAG_NAME_REQUEST]
                        noise_active = each[IS_ACTIVE]
                        if NOISE_TAGS in self._request_payload:
                            self._psql_session.execute(
                                LBT_NOISE_TAG_DATA_VALIDATION.format(
                                    noise_tag))
                            df = pd.DataFrame(self._psql_session.fetchall())
                            df['condition'] = df['condition'].str[2:]
                            df['condition'] = df['condition'].astype(float)
                            df_noise = df.loc[(df[TAG_NAME_REQUEST] ==
                                               noise_tag)]['condition'].iloc[0]
                            if df_noise <= noise_min <= noise_max and noise_max >= noise_min:
                                update_noise_tags_query = UPDATING_NOISE_TAGS.format(
                                    noise_min, noise_max, noise_active,
                                    self._request_payload[PERFORMANCE_TAG],
                                    noise_description, noise_tag)
                                self._psql_session.execute(
                                    update_noise_tags_query)
                            else:
                                a.append(noise_description)
                self._psql_session.execute(SETTING_PERF_TAG_TO_FALSE)
                if PERFORMANCE_TAG in self._request_payload:
                    update_performance_tags_query = UPDATING_PERFORMANCE_TAGS.format(
                        self._request_payload[PERFORMANCE_TAG])

                    self._psql_session.execute(update_performance_tags_query)

            except Exception as e:
                log_error("Exception due to : %s" + str(e))
                return asert_res(e)
            if len(a) != 0 and len(a) == 1:
                tag = ','.join(a)
                return JsonResponse(
                    {
                        MESSAGE_KEY:
                        "Set Max and Min values within the range for the tag:"
                        + tag
                    },
                    status=HTTP_400_BAD_REQUEST)
            elif len(a) >= 1:
                tags = ','.join(a)
                return JsonResponse(
                    {
                        MESSAGE_KEY:
                        "Set Max and Min values within the range for the tags:"
                        + tags
                    },
                    status=HTTP_400_BAD_REQUEST)
            else:
                return JsonResponse(sucess_message, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError
示例#12
0
    def get_notifications(self, request):
        """
        This will download notifications from the database
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            dict_data = {}

            self._psql_session.execute(
                CHECK_AUTHENTICATION_QUERY.format(
                    self.loggedin_userid_details[LOGIN_ID]))
            user = pd.DataFrame(self._psql_session.fetchall())
            if not self._psql_session.rowcount:
                return JsonResponse(
                    {MESSAGE_KEY: "LOGIN ID NOT REGISTER WITH US"},
                    status=HTTP_400_BAD_REQUEST)
            self._psql_session.execute(
                GET_PERMISSION.format(user['user_type'].iloc[0]))
            permission = pd.DataFrame(self._psql_session.fetchall())
            if not permission.empty:
                permissions = list(permission["feature"])
            else:
                permissions = []
            if 'Download Notifications for Selected Dates' in permissions:
                self._psql_session.execute(DOWNLOAD_NOTIFICATION_PERIOD)
                download_period = pd.DataFrame(self._psql_session.fetchall())
                if not download_period.empty:
                    download_time_period = int(
                        download_period['value'].iloc[0])
                else:
                    download_time_period = None

                if self.query_params:
                    query_params = {
                        START_DATE_REQUEST:
                        self.query_params.GET[START_DATE_REQUEST],
                        END_DATE_REQUEST:
                        self.query_params.GET[END_DATE_REQUEST]
                    }
                    d0 = np.datetime64(
                        query_params[START_DATE_REQUEST]).astype('int64')
                    d1 = np.datetime64(
                        query_params[END_DATE_REQUEST]).astype('int64')
                    """
                    Calculating number of days between start date and end date
                    delta = (d1 - d0) / (24 * 3600000)
                    """
                    delta = (d1 - d0) / (24 * 3600000)

                    if delta <= download_time_period:
                        tm = t.time()
                        LAST_MODIFIED_DATE = pd.to_datetime(
                            tm, unit='s').strftime('%d/%b/%Y %H:%M')
                        start_date = to_datetime(
                            query_params[START_DATE_REQUEST],
                            format='%Y-%m-%dT%H:%M:%S.%fZ')
                        converted_start_date = pd.to_datetime(
                            start_date).strftime('%d-%b-%Y %H:%M:%S')
                        end_date = to_datetime(query_params[END_DATE_REQUEST],
                                               format='%Y-%m-%dT%H:%M:%S.%fZ')
                        converted_end_date = pd.to_datetime(end_date).strftime(
                            '%d-%b-%Y %H:%M:%S')
                        notifications_duration = str(
                            converted_start_date) + " to " + str(
                                converted_end_date)
                        dict_data["current_time"] = LAST_MODIFIED_DATE
                        dict_data["duration"] = notifications_duration
                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST.format(
                                OVER_HEAD_PDI_TABLE, OVER_HEAD_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        overhead_notifications = pd.DataFrame(
                            self._psql_session.fetchall())

                        if not overhead_notifications.empty:
                            overhead_notifications = overhead_notifications[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(
                                OVER_HEAD_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        overhead_alerts = pd.DataFrame(
                            self._psql_session.fetchall())

                        if not overhead_alerts.empty:
                            overhead_alerts['Date Time'] = overhead_alerts[
                                'Date Time'].dt.tz_convert(None)
                            overhead_alerts['Date Time'] = overhead_alerts[
                                'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = overhead_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            overhead_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        pdi_df = [overhead_notifications, overhead_alerts]
                        pdi_dataFrame = pd.concat(pdi_df)
                        pdi_dataFrame = pdi_dataFrame.style.set_properties(
                            subset=['Notification'], **{'width': '400px'})

                        if not overhead_notifications.empty or not overhead_alerts.empty:
                            dict_data["overhead_pdi"] = pdi_dataFrame.render

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST.format(
                                OUTGAE_TABLE, OUTAGE_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        outage_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not outage_notifications.empty:
                            outage_notifications = outage_notifications[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(
                                OUTAGE_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        outage_alerts = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not outage_alerts.empty:
                            outage_alerts['Date Time'] = outage_alerts[
                                'Date Time'].dt.tz_convert(None)
                            outage_alerts['Date Time'] = outage_alerts[
                                'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = outage_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            outage_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        outage_df = [outage_notifications, outage_alerts]
                        outage_dataFrame = pd.concat(outage_df)
                        outage_dataFrame = outage_dataFrame.style.set_properties(
                            subset=['Notification'], **{'width': '400px'})

                        if not outage_notifications.empty or not outage_alerts.empty:
                            dict_data["outage"] = outage_dataFrame.render

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST.format(
                                HGI_TABLE, HGI_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        hgi_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not hgi_notifications.empty:
                            hgi_notifications = hgi_notifications[[
                                'Date Time', 'Category', 'Notification'
                            ]]
                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATION_ERROR_DETAILS.format(
                                HGI_MODULE, query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        hgi_alerts = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not hgi_alerts.empty:
                            hgi_alerts['Date Time'] = hgi_alerts[
                                'Date Time'].dt.tz_convert(None)
                            hgi_alerts['Date Time'] = hgi_alerts[
                                'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = hgi_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            hgi_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        hgi_df = [hgi_notifications, hgi_alerts]
                        hgi_dataFrame = pd.concat(hgi_df)
                        hgi_dataFrame = hgi_dataFrame.style.set_properties(
                            subset=['Notification'], **{'width': '400px'})

                        if not hgi_notifications.empty or not hgi_alerts.empty:
                            dict_data["hgi"] = hgi_dataFrame.render
                        """""" """""" """""" """""" """"""
                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(
                                TMT_RESULT_TABLE, TMT_FURNACE_A_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_A_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_A_notifications.empty:
                            tmt_furnace_A_notifications = tmt_furnace_A_notifications[
                                ['Date Time', 'Category', 'Notification']]
                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(
                                TMT_SPALL_RESULT, TMT_FURNACE_A_SPALL_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_spall_A_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_spall_A_notifications.empty:
                            tmt_furnace_spall_A_notifications = tmt_furnace_spall_A_notifications[
                                ['Date Time', 'Category', 'Notification']]

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(
                                ERROR_TMT_A, query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_A_alerts = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_A_alerts.empty:
                            tmt_furnace_A_alerts[
                                'Date Time'] = tmt_furnace_A_alerts[
                                    'Date Time'].dt.tz_convert(None)
                            tmt_furnace_A_alerts[
                                'Date Time'] = tmt_furnace_A_alerts[
                                    'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = tmt_furnace_A_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            tmt_furnace_A_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        tmt_A_df = [
                            tmt_furnace_A_notifications,
                            tmt_furnace_spall_A_notifications,
                            tmt_furnace_A_alerts
                        ]
                        tmt_A_dataFrame = pd.concat(tmt_A_df)
                        tmt_A_dataFrame = tmt_A_dataFrame.style.set_properties(
                            subset=['Notification'], **{'width': '400px'})

                        if not tmt_furnace_A_notifications.empty or not tmt_furnace_spall_A_notifications.empty or not tmt_furnace_A_alerts.empty:
                            dict_data["furnace_tmt_A"] = tmt_A_dataFrame.render
                        """ ''''''''''''' """

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(
                                TMT_RESULT_TABLE, TMT_FURNACE_B_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_B_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_B_notifications.empty:
                            tmt_furnace_B_notifications = tmt_furnace_B_notifications[
                                ['Date Time', 'Category', 'Notification']]
                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATIONS_LIST_TMT.format(
                                TMT_SPALL_RESULT, TMT_FURNACE_B_SPALL_MODULE,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_spall_B_notifications = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_spall_B_notifications.empty:
                            tmt_furnace_spall_B_notifications = tmt_furnace_spall_B_notifications[
                                ['Date Time', 'Category', 'Notification']]

                        self._psql_session.execute(
                            DOWNLOAD_NOTIFICATION_ERROR_DETAILS_TMT.format(
                                ERROR_TMT_B, query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        tmt_furnace_B_alerts = pd.DataFrame(
                            self._psql_session.fetchall())
                        if not tmt_furnace_B_alerts.empty:
                            tmt_furnace_B_alerts[
                                'Date Time'] = tmt_furnace_B_alerts[
                                    'Date Time'].dt.tz_convert(None)
                            tmt_furnace_B_alerts[
                                'Date Time'] = tmt_furnace_B_alerts[
                                    'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = tmt_furnace_B_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            tmt_furnace_B_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]

                        tmt_B_df = [
                            tmt_furnace_B_notifications,
                            tmt_furnace_spall_B_notifications,
                            tmt_furnace_B_alerts
                        ]
                        tmt_B_dataFrame = pd.concat(tmt_B_df)
                        tmt_B_dataFrame = tmt_B_dataFrame.style.set_properties(
                            subset=['Notification'], **{'width': '400px'})

                        if not tmt_furnace_B_notifications.empty or not tmt_furnace_spall_B_notifications.empty or not tmt_furnace_B_alerts.empty:
                            dict_data["furnace_tmt_B"] = tmt_B_dataFrame.render
                        """ """ """""" """""" """""" """""" """""" """""" """""" """""" " " " " ""
                        self._psql_session.execute(
                            DOWNLOAD_BENCH_MARK_ERROR.format(
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        benchmark_alerts = pd.DataFrame(
                            self._psql_session.fetchall())

                        if not benchmark_alerts.empty:
                            benchmark_alerts['Date Time'] = benchmark_alerts[
                                'Date Time'].dt.tz_convert(None)
                            benchmark_alerts['Date Time'] = benchmark_alerts[
                                'Date Time'].dt.strftime('%d/%b/%Y %H:%M')
                            alert = benchmark_alerts[[
                                'tag_name', 'Date Time', 'Notification'
                            ]]
                            alert_group = alert.groupby([
                                'Date Time', 'Notification'
                            ])['tag_name'].apply(', '.join).reset_index()
                            alert_group['Notification'] = alert_group[
                                'Notification'].str.cat(
                                    alert_group['tag_name'], sep=" - ")
                            alert_group['Category'] = 'Alert'
                            benchmark_alerts = alert_group[[
                                'Date Time', 'Category', 'Notification'
                            ]]
                            benchmark_dataFrame = benchmark_alerts
                            benchmark_dataFrame = benchmark_dataFrame.style.set_properties(
                                subset=['Notification'], **{'width': '400px'})

                            dict_data[
                                "benchmarking"] = benchmark_dataFrame.render
                        SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
                        # image_1 = "\..\..\\templates\\p66logo.png"
                        image_1 = "/../..//templates//p66logo.png"
                        image_1_path = SITE_ROOT + image_1
                        # image_2 = "\..\..\\templates\\ingenero_logo.png"
                        image_2 = "/../..//templates//ingenero_logo.png"
                        image_2_path = SITE_ROOT + image_2
                        dict_data["image_1"] = image_1_path
                        dict_data["image_2"] = image_2_path
                        pdf = render_to_pdf('invoice.html', dict_data)
                        if pdf:
                            response = HttpResponse(
                                pdf, content_type='application/pdf')
                            filename = "Notifications.pdf"
                            content = "inline; filename=%s" % filename
                            download = request.GET.get("download")
                            if download:
                                content = "attachment; filename=%s" % filename
                            response['Content-Disposition'] = content
                            return response
                        return HttpResponse("Not found")
                    else:
                        return JsonResponse(
                            {
                                MESSAGE_KEY:
                                "The days to download exceeds the default download time period"
                            },
                            safe=False)
            else:
                return JsonResponse({MESSAGE_KEY: "FORBIDDEN ERROR"},
                                    status=HTTP_403_FORBIDDEN)
        except AssertionError as e:
            log_error("Exception occurred due to" + str(e))
            return asert_res(e)

        except Exception as e:
            log_error("Exception occurred due to" + str(e))
            return json_InternalServerError
    def get_notifications(self):
        """
        This will get query from the Database for LBT algo
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            system_overview = []
            Benchmarking = []
            self._psql_session.execute(
                CHECK_AUTHENTICATION_QUERY.format(
                    self.loggedin_userid_details[LOGIN_ID]))
            user = pd.DataFrame(self._psql_session.fetchall())
            if not self._psql_session.rowcount:
                return JsonResponse(
                    {MESSAGE_KEY: "LOGIN ID NOT REGISTER WITH US"},
                    status=HTTP_400_BAD_REQUEST)
            self._psql_session.execute(
                GET_PERMISSION.format(user['user_type'].iloc[0]))
            permission = pd.DataFrame(self._psql_session.fetchall())
            if not permission.empty:
                permissions = list(permission["feature"])
            else:
                permissions = []
            if 'Notification Menu' in permissions:
                self._psql_session.execute(DOWNLOAD_NOTIFICATION_PERIOD)
                download_period = pd.DataFrame(self._psql_session.fetchall())
                if not download_period.empty:
                    download_time_period = int(
                        download_period['value'].iloc[0])
                else:
                    download_time_period = None
                self._psql_session.execute(DEFAULT_NOTIFICATION_VIEW)
                notification_view = pd.DataFrame(self._psql_session.fetchall())
                if not notification_view.empty:
                    notification_view_time = int(
                        notification_view['value'].iloc[0])
                else:
                    notification_view_time = None

                self._psql_session.execute(
                    NOTIFICATIONS_LIST.format(
                        OVER_HEAD_PDI_TABLE, OVER_HEAD_MODULE,
                        OVER_HEAD_PDI_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                overhead_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS.format(
                        OVER_HEAD_MODULE, OVER_HEAD_PDI_TABLE,
                        notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                overhead_alerts = pd.DataFrame(self._psql_session.fetchall())
                if not overhead_alerts.empty:
                    overhead_alerts['type'] = 'Alert'
                    overhead_notifications = overhead_notifications.append(
                        overhead_alerts, ignore_index=True)
                pdi_overhead_notifications = overhead_notifications.to_dict(
                    orient=RECORDS)
                self._psql_session.execute(
                    NOTIFICATIONS_LIST.format(
                        OUTGAE_TABLE, OUTAGE_MODULE, OUTGAE_TABLE,
                        notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                outage_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS.format(
                        OUTAGE_MODULE, OUTGAE_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                outage_alerts = pd.DataFrame(self._psql_session.fetchall())
                if not outage_alerts.empty:
                    outage_alerts['type'] = 'Alert'
                    outage_notifications = outage_notifications.append(
                        outage_alerts, ignore_index=True)
                coke_drum_outage_notifications = outage_notifications.to_dict(
                    orient=RECORDS)

                self._psql_session.execute(
                    NOTIFICATIONS_LIST.format(
                        HGI_TABLE, HGI_MODULE, HGI_TABLE,
                        notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                hgi_notifications = pd.DataFrame(self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS.format(
                        HGI_MODULE, HGI_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                hgi_alerts = pd.DataFrame(self._psql_session.fetchall())
                if not hgi_alerts.empty:
                    hgi_alerts['type'] = 'Alert'
                    hgi_notifications = hgi_notifications.append(
                        hgi_alerts, ignore_index=True)
                coke_drum_hgi_notifications = hgi_notifications.to_dict(
                    orient=RECORDS)
                """""" """""" """""" """""" """"""
                self._psql_session.execute(
                    NOTIFICATIONS_LIST_TMT.format(
                        TMT_RESULT_TABLE, TMT_FURNACE_A_MODULE,
                        TMT_RESULT_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_A_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATIONS_LIST_TMT.format(
                        TMT_SPALL_RESULT, TMT_FURNACE_A_SPALL_MODULE,
                        TMT_SPALL_RESULT, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_spall_A_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                tmt_furnace_A_notifications = tmt_furnace_A_notifications.append(
                    tmt_furnace_spall_A_notifications, ignore_index=True)
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS_TMT.format(
                        ERROR_TMT_A, TMT_RESULT_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_A_alerts = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS_TMT.format(
                        ERROR_TMT_A, TMT_SPALL_RESULT, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_spall_A_alerts = pd.DataFrame(
                    self._psql_session.fetchall())
                tmt_furnace_A_alerts = tmt_furnace_A_alerts.append(
                    tmt_spall_A_alerts, ignore_index=True)
                if not tmt_furnace_A_alerts.empty:
                    tmt_furnace_A_alerts['type'] = 'Alert'
                    tmt_furnace_A_notifications = tmt_furnace_A_notifications.append(
                        tmt_furnace_A_alerts, ignore_index=True)
                furnace_H3901A = tmt_furnace_A_notifications.to_dict(
                    orient=RECORDS)
                """ ''''''''''''' """

                self._psql_session.execute(
                    NOTIFICATIONS_LIST_TMT.format(
                        TMT_RESULT_TABLE, TMT_FURNACE_B_MODULE,
                        TMT_RESULT_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_B_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATIONS_LIST_TMT.format(
                        TMT_SPALL_RESULT, TMT_FURNACE_B_SPALL_MODULE,
                        TMT_SPALL_RESULT, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_spall_B_notifications = pd.DataFrame(
                    self._psql_session.fetchall())
                tmt_furnace_B_notifications = tmt_furnace_B_notifications.append(
                    tmt_furnace_spall_B_notifications, ignore_index=True)
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS_TMT.format(
                        ERROR_TMT_B, TMT_RESULT_TABLE, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_furnace_B_alerts = pd.DataFrame(
                    self._psql_session.fetchall())
                self._psql_session.execute(
                    NOTIFICATION_ERROR_DETAILS_TMT.format(
                        ERROR_TMT_B, TMT_SPALL_RESULT, notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                tmt_spall_B_alerts = pd.DataFrame(
                    self._psql_session.fetchall())
                tmt_furnace_B_alerts = tmt_furnace_B_alerts.append(
                    tmt_spall_B_alerts, ignore_index=True)
                if not tmt_furnace_B_alerts.empty:
                    tmt_furnace_B_alerts['type'] = 'Alert'
                    tmt_furnace_B_notifications = tmt_furnace_B_notifications.append(
                        tmt_furnace_B_alerts, ignore_index=True)
                furnace_H3901B = tmt_furnace_B_notifications.to_dict(
                    orient=RECORDS)
                """ """ """""" """""" """""" """""" """""" """""" """""" """""" " " " " ""
                self._psql_session.execute(
                    BENCH_MARK_ERROR.format(
                        notification_view_time,
                        self.loggedin_userid_details[LOGIN_ID]))
                bench_mark_alerts = pd.DataFrame(self._psql_session.fetchall())
                if not bench_mark_alerts.empty:
                    bench_mark_alerts['type'] = 'Alert'
                    Benchmarking = bench_mark_alerts.to_dict(orient=RECORDS)

                old_dict = {
                    "Coke Drum :Overhead PDI":
                    pdi_overhead_notifications,
                    "Coke Drum :Outage":
                    coke_drum_outage_notifications,
                    "Coke Drum :HGI":
                    coke_drum_hgi_notifications,
                    "Furnace : H3901A Pass 3 & 4":
                    furnace_H3901A,
                    "Furnace : H3901B Pass 1 & 2":
                    furnace_H3901B,
                    "Benchmarking":
                    Benchmarking,
                    "System Overview":
                    system_overview,
                    "Maximum Values Notification Download Time Period":
                    download_time_period,
                }
                return JsonResponse(old_dict, safe=False)
            else:
                return JsonResponse({MESSAGE_KEY: "FORBIDDEN ERROR"},
                                    status=HTTP_403_FORBIDDEN)
        except AssertionError as e:
            log_error("Exception occurred due to" + str(e))
            return asert_res(e)

        except Exception as e:
            log_error("Exception occurred due to" + str(e))
            return json_InternalServerError
示例#14
0
    def get_user_values(self):
        """
        This will get query from the Database for the console and equipment name
        :return: Json Response
        """
        admin_count = None
        admin_user = None
        standard_user_count = None
        par_value = None
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            try:
                self._psql_session.execute(GET_ADMIN_USER)
                admin = pd.DataFrame(self._psql_session.fetchall())
                if not admin.empty:
                    admin_user = admin['limit_value'].iloc[0].item()
                else:
                    admin_user = None
            except Exception as e:
                log_error("Exception due to : %s" + str(e))
            try:
                self._psql_session.execute(GET_ADMIN_COUNT)
                all_admin = pd.DataFrame(self._psql_session.fetchall())
                if not all_admin.empty:
                    admin_count = all_admin['count'].iloc[0].item()
                else:
                    admin_count = None
            except Exception as e:
                log_error("Exception due to : %s" + str(e))
            try:
                self._psql_session.execute(GET_STANDARD_USER_COUNT)
                all_operator = pd.DataFrame(self._psql_session.fetchall())
                if not all_operator.empty:
                    standard_user_count = all_operator['limit_value'].iloc[
                        0].item()
                else:
                    standard_user_count = None
            except Exception as e:
                log_error("Exception due to : %s" + str(e))
            try:
                self._psql_session.execute(GET_PARALLEL_SESSION)
                parallel = pd.DataFrame(self._psql_session.fetchall())
                if not parallel.empty:
                    par_value = parallel['limit_value'].iloc[0].item()
                else:
                    par_value = None
            except Exception as e:
                log_error('The Exception is' + str(e))

            temp = {
                "users": [],
                "admin_count": admin_user,
                "total_admin": admin_count,
                "standard": standard_user_count,
                "parallel_sessions": par_value
            }

            try:
                self._psql_session.execute(GET_ALL_USER)
                df = pd.DataFrame(self._psql_session.fetchall())
                temp['users'] = yaml.safe_load(df.to_json(orient=RECORDS))

            except Exception as e:
                log_error("Exception due to : %s" + str(e))
            return JsonResponse(temp, safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return asert_res(e)
        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return json_InternalServerError