def update_case(self):
        """
        This will data on the bases of the eqipment and console name.This will give the overview for the
        dyanmic benchmaraking features
        :return: Json Response
        """

        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR}

            self._psql_session.execute(MAKE_FALSE_POSTGRES_QUERY.format(self.console, self.equipment))

            self._psql_session.execute(MAKE_TRUE_POSTGRES_QUERY.format(self.console, self.equipment, self.case))

            return JsonResponse({MESSAGE_KEY: UPDATED_SUCCESSFULLY},
                                safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse({MESSAGE_KEY: EXCEPTION_CAUSE.format(
                traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #2
0
    def get_furnaces(self):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            self.sync_time = None

            data = []
            temp = []

            try:
                self._psql_session.execute(NON_FURNACES_NAME)
                df = pd.DataFrame(self._psql_session.fetchall())
                temp = yaml.safe_load(df.to_json(orient=RECORDS))

            except Exception as e:
                log_error('Exception due to get_furnaces Function: %s' +
                          str(e))
            return JsonResponse(temp, safe=False)

        except AssertionError as e:
            log_error('Exception due to get_furnaces Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #3
0
    def add_user(self):
        """
        This function will add the user details
        :return: Json payload
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            if self.__is_user_not_authorised():
                return JsonResponse({MESSAGE_KEY: NOT_AUTHORISED},
                                    status=HTTP_401_UNAUTHORIZED)

            return self.__add_user_query()

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def update_price_input(self):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            try:
                for i in self._request_payload:
                    self._psql_session.execute(
                        UPDATE_PRICE_INPUT.format(i['price'],
                                                  i["density LB/Gal"],
                                                  i["density LB/Bbl"],
                                                  i["price_unit"],
                                                  i["tag_name"]))
            except Exception as e:
                log_error('Exception occurs due to: %s' + str(e))

            return JsonResponse({MESSAGE_KEY: UPDATED_SUCCESSFULLY})
        except AssertionError as e:
            log_error('Exception in update_price_input api Function: %s' +
                      str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #5
0
    def get_cases_values(self):
        """
        This will return the cases for the selected console and equipment
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            self._psql_session.execute(
                CASES_POSTGRES_QUERY.format(self.console, self.equipment))

            df = pd.DataFrame(self._psql_session.fetchall())

            if df.shape[0]:
                return JsonResponse(yaml.safe_load(df.to_json(orient=RECORDS)),
                                    safe=False)
            return JsonResponse([], safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_values(self):
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            temp = []

            if self.query_params:
                try:
                    self._psql_session.execute(
                        FEEDS_CONFIG_FURNACE_NAME.format(
                            self.query_params[TYPE_REQUEST]))
                    df = pd.DataFrame(self._psql_session.fetchall())
                    temp = yaml.safe_load(df.to_json(orient=RECORDS))
                except Exception as e:
                    log_error('Exception due to get_values Function: %s' +
                              str(e))
                return JsonResponse(temp, safe=False)

        except AssertionError as e:
            log_error('Exception due to get_values Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_algorithms_details(self):
        """
        This will return all the list of the algorithm in json format and algorithm status from the Database .
        :return: Json Responses
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            final_response = {
                "algorithm_status": self.get_algorithm_status(),
                "algorithm_name": self.get_algorithm_list()
            }

            return JsonResponse(final_response, safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #8
0
    def get_algorithms_by_name_and_type(self, algorithm_name):

        """
        This will return all the list of the algorithm in json format from the Database .
        :return: Json Responses
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR}

            final_response = {PARAMS: self.get_param_data_by_algorithm_name(algorithm_name),
                              FILES: self.get_file_data_by_algorithm_name(algorithm_name)}

            return JsonResponse(final_response, safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse({MESSAGE_KEY: EXCEPTION_CAUSE.format(
                traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_furnaces(self):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            dict_data = self.get_dict_data_values()

            self.get_ext_and_perf_tags(dict_data)

            return JsonResponse(dict_data, safe=False)

        except AssertionError as e:
            log_error('Exception due to : %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #10
0
    def get_price_input__data(self, parameter=None):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            result = []
            try:
                self._psql_session.execute(get_price_input_data)
                df = pd.DataFrame(self._psql_session.fetchall())
                if not df.empty:
                    result_data = df['json_build_object'][0]
                    result.append(result_data)
            except Exception as e:
                log_error('Exception occurs due to: %s' + str(e))

            return JsonResponse(result, safe=False)
        except AssertionError as e:
            log_error('Exception in get_price_input api Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #11
0
    def delete_user(self):
        """
        This function will delete the user details
        :return: Json payload
        """
        try:

            return self.__delete_user_query()

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def update_algorithms(self):
        """
        This will return all the list of the algorithm in json format from the Database .
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            LAST_MODIFIED_DATE = str(round(time.time() * 1000))
            for algo_data in self.request_payload:
                try:
                    query = UPDATE_PARAM_DATA.format(
                        NAME, TABLE_NAME, algo_data["param_value"],
                        algo_data["unit"], algo_data["description"],
                        algo_data["algo_tag"], LAST_MODIFIED_DATE, FLAG,
                        self.algo_name, algo_data["file_param_name"])
                    self._csql_session.execute(query)
                    log_info("updated--------" +
                             '---------Algorithm name-------' +
                             str(self.algo_name) + '----Param name-----' +
                             str(algo_data["file_param_name"]) +
                             '------update param value-----' +
                             str(algo_data["param_value"]))
                except Exception as e:
                    log_error("Exception due to : %s" + str(e))
                    return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                        status=e.args[0][STATUS_KEY])

            return JsonResponse({MESSAGE_KEY: UPDATED_SUCCESSFULLY},
                                safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def update_algorithms(self):
        """
        This will return all the list of the algorithm in json format from the Database .
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            LAST_MODIFIED_DATE = datetime.now()
            for algo_data in self.request_payload:
                try:
                    if algo_data["status"]:
                        query = UPDATE_CONFIGURATION_ACTIVE_STATUS_DATA.format(
                            algo_data["status"], LAST_MODIFIED_DATE,
                            algo_data["module"])
                    else:
                        query = UPDATE_CONFIGURATION_DEACTIVE_STATUS_DATA.format(
                            algo_data["status"], LAST_MODIFIED_DATE,
                            algo_data["module"])

                    self._psql_session.execute(query)
                except Exception as e:
                    log_error("Exception due to : %s" + str(e))
                    return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                        status=e.args[0][STATUS_KEY])

            return JsonResponse({MESSAGE_KEY: UPDATED_SUCCESSFULLY},
                                safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def change_password_user(self):
        """
        This function will change the user details
        :return: Json payload
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            return self.__change_user_password_query()

        except AssertionError as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #15
0
    def update_external_targets(self):
        """
        :return: Json payload
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            return self.__update_external_query()

        except AssertionError as e:
            log_error("Exception due to update_external_targets Function: %s",
                      e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #16
0
    def get_furnace_data(self):
        """
        This will return the module level data for the furnace module
        :return: Json response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            module_level_data = []
            error_details = []
            furnace_details = []
            latest_timestamp = None
            dict_data = {
                TIMESTAMP_KEY: latest_timestamp,
                "data": module_level_data,
                "errors": error_details,
                "furnaces": furnace_details,
                "alarms": []
            }
            if self.equipment == FURNACE_VALUE:
                # if self.module == PASS_A_VALUE:
                #     tag_name = FURNACE_A_CHECK_STATUS_TAGS
                #     online_tag = PASS_A_ONLINE_STATUS_TAG
                #     spall_tag = PASS_A_SPALL_STATUS_TAG
                # else:
                #     tag_name = FURNACE_B_CHECK_STATUS_TAGS
                #     online_tag = PASS_B_ONLINE_STATUS_TAG
                #     spall_tag = PASS_B_SPALL_STATUS_TAG
                try:
                    # self._psql_session.execute(FURNACE_TMT_SPALL_CHECK.format(self.module, tuple(tag_name)))
                    # df_spall_check = pd.DataFrame(self._psql_session.fetchall())
                    # df_spall_check.set_index(TAG_NAME_REQUEST, inplace=True)
                    # df_spall_transposed = df_spall_check.T
                    # if df_spall_transposed[online_tag].iloc[0] == ZERO and df_spall_transposed[spall_tag].iloc[
                    #     0] == ONE:
                    #     """
                    #     This will return the module level data for the furnace module' spall effectiveness status
                    #     """
                    #     dict_data["is_spall"] = True
                    #     self._psql_session.execute(MODULE_LEVEL_SPALL_DATA.format(self.equipment, self.module))
                    #     df = pd.DataFrame(self._psql_session.fetchall())
                    #     self._psql_session.execute(MODULE_LEVEL_SPALL_TAGS_DATA.format(self.equipment, self.module))
                    #     df_tag = pd.DataFrame(self._psql_session.fetchall())
                    # else:
                    """
                    This will return the module level data for the furnace module
                    """
                    # dict_data["is_spall"] = False
                    self._psql_session.execute(
                        MODULE_LEVEL_TMT_DATA.format(self.equipment,
                                                     self.module))
                    df = pd.DataFrame(self._psql_session.fetchall())
                    self._psql_session.execute(
                        MODULE_LEVEL_TMT_TAGS_DATA.format(
                            self.equipment, self.module))
                    df_tag = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        df = df.where(pd.notnull(df) == True, None)
                        dict_data[TIMESTAMP_KEY] = df[CREATE_TS].iloc[0]
                        df_data = df.drop(columns=CREATE_TS)
                        dict_data["data"] = dict_data["data"] + yaml.safe_load(
                            df_data.to_json(orient=RECORDS))
                        alarm_tags = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains(ALARM)]
                        # if dict_data["is_spall"] == False:
                        reco_tags = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains('Reco')]
                        # reco_tags.loc[reco_tags['tag_value'].isnull(), 'condition'] = ' - '
                        reco_tags = (reco_tags[reco_tags.tag_value.notnull()])
                        if not reco_tags.empty:
                            alarm_tags = pd.concat([alarm_tags, reco_tags])
                        if len(alarm_tags) != 0:
                            # if dict_data["is_spall"] == True:
                            #     alarm_tags = alarm_tags[alarm_tags.condition != " - "]
                            if DEBUG == ZERO:
                                print("Sorry for spall Added new files")
                        else:
                            alarm_tags = df_data[
                                df_data[TAG_NAME_REQUEST].str.contains(ALARM)]

                        dict_data[
                            "alarms"] = dict_data["alarms"] + yaml.safe_load(
                                alarm_tags.to_json(orient=RECORDS))
                        self._psql_session.execute(
                            ERROR_DATA.format(self.equipment, self.module,
                                              df[CREATE_TS].iloc[0]))
                        df_error = pd.DataFrame(self._psql_session.fetchall())
                        if not df_error.empty:
                            df_error = df_error.where(
                                pd.notnull(df_error) == True, None)
                            df_error = df_error.drop_duplicates()
                            dict_data["errors"] = dict_data[
                                "errors"] + yaml.safe_load(
                                    df_error.to_json(orient=RECORDS))
                        else:
                            if DEBUG == ONE:
                                print("Currently no error details!")
                        if not df_tag.empty:
                            df_tag = df_tag.where(
                                pd.notnull(df_tag) == True, None)
                            df_tag = df_tag.drop(columns=CREATE_TS)
                            dict_data["furnaces"] = dict_data[
                                "furnaces"] + yaml.safe_load(
                                    df_tag.to_json(orient=RECORDS))
                        else:
                            if DEBUG == ONE:
                                print(
                                    "Currently no tag details for spall efficiency!"
                                )
                    else:
                        return dict_data
                    return dict_data
                except Exception as e:
                    log_error("Exception due to : %s" + str(e))
            else:
                return JsonResponse(
                    "This equipment is not registered with us!", safe=False)
        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #17
0
    def get_furnace_spall_data(self):
        """
        This will return the module level data for the furnace module
        :return: Json response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            module_level_data = []
            error_details = []
            furnace_details = []
            latest_timestamp = None
            rst = None
            dict_data = {
                TIMESTAMP_KEY: latest_timestamp,
                "data": module_level_data,
                "errors": error_details,
                "furnaces": furnace_details,
                "alarms": []
            }
            if self.module == 'H3901A: Pass 3 & 4 (Spall)':
                rst = PASS_A_VALUE
            if self.module == 'H3901B: Pass 1 & 2 (Spall)':
                rst = PASS_B_VALUE
            try:
                if self.module == 'H3901A: Pass 3 & 4 (Spall)':
                    tag_name = FURNACE_A_CHECK_STATUS_TAGS
                    online_tag = PASS_A_ONLINE_STATUS_TAG
                    spall_tag = PASS_A_SPALL_STATUS_TAG
                else:
                    tag_name = FURNACE_B_CHECK_STATUS_TAGS
                    online_tag = PASS_B_ONLINE_STATUS_TAG
                    spall_tag = PASS_B_SPALL_STATUS_TAG
                self._psql_session.execute(
                    FURNACE_TMT_SPALL_CHECK.format(rst, tuple(tag_name)))
                df_spall_check = pd.DataFrame(self._psql_session.fetchall())
                df_spall_check.set_index(TAG_NAME_REQUEST, inplace=True)
                df_spall_transposed = df_spall_check.T
                if df_spall_transposed[online_tag].iloc[
                        0] == ZERO and df_spall_transposed[spall_tag].iloc[
                            0] == ONE:
                    """
                    This will return the module level data for the furnace module' spall effectiveness status
                    """
                    dict_data["is_spall"] = True
                else:
                    """
                    This will return the module level data for the furnace module
                    """
                    dict_data["is_spall"] = False
                self._psql_session.execute(
                    MODULE_LEVEL_SPALL_DATA.format(self.equipment,
                                                   self.module))
                df = pd.DataFrame(self._psql_session.fetchall())
                self._psql_session.execute(
                    MODULE_LEVEL_SPALL_TAGS_DATA.format(
                        self.equipment, self.module))
                df_tag = pd.DataFrame(self._psql_session.fetchall())
                self._psql_session.execute(
                    "select max(create_ts) as create_ts  from tmt_result t left join equipment_master e on "
                    "t.equipment_id=e.id "
                    "where equipment_name = '{}' and module_name ='{}' and create_ts = (SELECT max(create_ts) from "
                    "tmt_result)".format(self.equipment, rst))
                df_time = pd.DataFrame(self._psql_session.fetchall())
                self._psql_session.execute(
                    SPALL_TAGS_DATA.format(self.equipment, self.module))
                df_spall_tag = pd.DataFrame(self._psql_session.fetchall())
                self._psql_session.execute(
                    MODULE_LEVEL_SPALL_TAGS.format(self.module))
                df_module_tag = pd.DataFrame(self._psql_session.fetchall())
                if df.empty:
                    dict_data[TIMESTAMP_KEY] = df_time[CREATE_TS].iloc[0]
                    if not df_spall_tag.empty:
                        df_spall_tag['tag_type'], df_spall_tag[
                            'tag_value'], df_spall_tag[
                                'condition'] = None, None, None
                        dict_data["data"] = dict_data["data"] + yaml.safe_load(
                            df_spall_tag.to_json(orient=RECORDS))
                    if not df_module_tag.empty:
                        df_module_tag['tag_value'], df_module_tag[
                            'tag_type'], df_module_tag[
                                'condition'] = None, None, None
                        dict_data["furnaces"] = dict_data[
                            "furnaces"] + yaml.safe_load(
                                df_module_tag.to_json(orient=RECORDS))
                if not df.empty:
                    df = df.where(pd.notnull(df) == True, None)
                    dict_data[TIMESTAMP_KEY] = df[CREATE_TS].iloc[0]
                    df_data = df.drop(columns=CREATE_TS)
                    if not df_spall_tag.empty:
                        df_data = pd.merge(df_data,
                                           df_spall_tag,
                                           on=[
                                               'tag_name', 'equipment_name',
                                               'module_name', 'equipment_id'
                                           ],
                                           how='right')
                        df_data.drop(['unit_x', 'description_x'],
                                     axis=1,
                                     inplace=True)
                        df_data.rename(
                            {
                                'unit_y': 'unit',
                                'description_y': 'description'
                            },
                            axis=1,
                            inplace=True)
                    dict_data["data"] = dict_data["data"] + yaml.safe_load(
                        df_data.to_json(orient=RECORDS))
                    alarm_tags = df_data[
                        df_data[TAG_NAME_REQUEST].str.contains(ALARM)]
                    if len(alarm_tags) != 0:
                        alarm_tags = alarm_tags[alarm_tags.condition != " - "]
                        alarm_tags = alarm_tags[
                            alarm_tags['condition'].notna()]
                    else:
                        alarm_tags = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains(ALARM)]
                    dict_data["alarms"] = dict_data["alarms"] + yaml.safe_load(
                        alarm_tags.to_json(orient=RECORDS))
                    self._psql_session.execute(
                        ERROR_DATA.format(self.equipment, self.module,
                                          df[CREATE_TS].iloc[0]))
                    df_error = pd.DataFrame(self._psql_session.fetchall())
                    if not df_error.empty:
                        df_error = df_error.where(
                            pd.notnull(df_error) == True, None)
                        df_error = df_error.drop_duplicates()
                        dict_data[
                            "errors"] = dict_data["errors"] + yaml.safe_load(
                                df_error.to_json(orient=RECORDS))
                    else:
                        if DEBUG == ONE:
                            print("Currently no error details!")
                    if not df_tag.empty:
                        df_tag = df_tag.where(pd.notnull(df_tag) == True, None)
                        df_tag = df_tag.drop(columns=CREATE_TS)
                        if not df_module_tag.empty:
                            df_tag = pd.merge(df_tag,
                                              df_module_tag,
                                              on=[
                                                  'tag_name', 'equipment_name',
                                                  'module_name'
                                              ],
                                              how='right')
                            df_tag.drop(['unit_x', 'description_x'],
                                        axis=1,
                                        inplace=True)
                            df_tag.rename(
                                {
                                    'unit_y': 'unit',
                                    'description_y': 'description'
                                },
                                axis=1,
                                inplace=True)
                        dict_data["furnaces"] = dict_data[
                            "furnaces"] + yaml.safe_load(
                                df_tag.to_json(orient=RECORDS))
                    else:
                        if DEBUG == ONE:
                            print(
                                "Currently no tag details for spall efficiency!"
                            )
                else:
                    return dict_data
                return dict_data

            except Exception as e:
                log_error("Exception due to : %s" + str(e))
        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_names_values(self):
        """
        This will return the names for every unit with console and equipments
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            if self.get_value == 0:
                self._psql_session.execute(MASTER_TABLE_QUERY)
            elif self.get_value == 1:
                self._psql_session.execute(
                    MASTER_TABLE_QUERY_OPERATOR.format(HOT_CONSOLE_1_VALUE))
            elif self.get_value == 2:
                self._psql_session.execute(
                    MASTER_TABLE_QUERY_OPERATOR.format(HOT_CONSOLE_2_VALUE))
            elif self.get_value == 3:
                self._psql_session.execute(
                    MASTER_TABLE_QUERY_OPERATOR.format(COLD_CONSOLE_1_VALUE))
            elif self.get_value == 4:
                self._psql_session.execute(
                    MASTER_TABLE_QUERY_OPERATOR.format(COLD_CONSOLE_2_VALUE))

            df = pd.DataFrame(self._psql_session.fetchall())
            if df.shape[0]:
                unit_name = df["unit_name"].unique()
                console_name = df["console_name"].unique()
                unit_val = {}
                final_val = []
                for unit in unit_name:
                    console_val = []
                    for console in console_name:
                        equipment_val = {}
                        equipment_val["console_name"] = console
                        equipment_val["equipments"] = df[[
                            "equipment_tag_name", "equipment_name",
                            "equipment_id"
                        ]][(df["console_name"] == console)
                           & (df["unit_name"] == unit)].to_dict(orient=RECORDS)
                        console_val.append(equipment_val)

                    unit_val["unit_name"] = unit
                    unit_val["consoles"] = console_val
                    final_val.append(unit_val)

                return JsonResponse(final_val, safe=False)
            return JsonResponse([], safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_values(self):
        """
        This will return the hgi multi line graph data
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR}
            graph = []
            max_data = None
            min_data = None
            empty_dict = {"data": [],
                          "description": None,
                          "unit": None,
                          "min_data": None,
                          "max_data": None
                          }
            dict1 = {}
            dict2 = {}
            dict3 = {}
            dict4 = {}
            dict5 = {}
            dict6 = {}
            dict7 = {"data": []}
            if self.equipment == COKE_DRUM_VALUE and self.module == HGI_VALUE:
                query_params = {
                    START_DATE_REQUEST: self.query_params.GET[START_DATE_REQUEST],
                    END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]
                }

                if TAG_NAME_REQUEST not in self.query_params.GET:
                    """
                    This will return the hgi multi line graph data without the secondary tag
                    """
                    multi_line_tags = tuple(HGI_MULTI_LINE_TAGS)
                    if query_params[START_DATE_REQUEST] and query_params[END_DATE_REQUEST]:
                        multiline_data = django_search_query_all(HGI_MULTI_LINE_GRAPH.format(
                            self.module,
                            multi_line_tags,
                            query_params[START_DATE_REQUEST],
                            query_params[END_DATE_REQUEST]))
                        df_data = pd.DataFrame(multiline_data)

                        min_max = django_search_query_all(HGI_MIN_MAX_DATA.format(
                            self.module,
                            multi_line_tags))

                elif TAG_NAME_REQUEST in self.query_params.GET:
                    query_params[TAG_NAME_REQUEST] = self.query_params.GET[TAG_NAME_REQUEST]
                    if query_params[START_DATE_REQUEST] and query_params[END_DATE_REQUEST] and query_params[
                        TAG_NAME_REQUEST]:
                        """
                        This will return the hgi multi line graph data with the primary tag
                        """
                        if query_params[TAG_NAME_REQUEST] in HGI_SECONDARY_TAGS:
                            HGI_MULTI_LINE_TAGS.append(query_params["tag_name"])
                        else:
                            if DEBUG == 1:
                                print("Sorry tag is not there")
                        tags = tuple(HGI_MULTI_LINE_TAGS)
                        multiline_data = django_search_query_all(HGI_MULTI_LINE_GRAPH.format(
                            self.module,
                            tags,
                            query_params[START_DATE_REQUEST],
                            query_params[END_DATE_REQUEST]))
                        df_data = pd.DataFrame(multiline_data)
                        min_max = django_search_query_all(HGI_MIN_MAX_DATA.format(
                            self.module,
                            tags))
                        HGI_MULTI_LINE_TAGS.pop()

                if TAG_NAME_REQUEST in self.query_params.GET:
                    if not df_data.empty:
                        query_params[TAG_NAME_REQUEST] = self.query_params.GET[TAG_NAME_REQUEST]
                        final_dict = {
                            HGI_PRED: dict1,
                            NORTH_HGI_PRED: dict2,
                            NORTH_HGI_ACTUAL: dict3,
                            SOUTH_HGI_PRED: dict4,
                            SOUTH_HGI_ACTUAL: dict5,
                            query_params[TAG_NAME_REQUEST]: dict6,
                            "x-axis": dict7,
                        }
                    else:
                        final_dict = {
                            HGI_PRED: empty_dict,
                            NORTH_HGI_PRED: empty_dict,
                            NORTH_HGI_ACTUAL: empty_dict,
                            SOUTH_HGI_PRED: empty_dict,
                            SOUTH_HGI_ACTUAL: empty_dict,
                            query_params[TAG_NAME_REQUEST]: empty_dict,
                            "x-axis": dict7,
                        }
                else:
                    if not df_data.empty:
                        final_dict = {
                            HGI_PRED: dict1,
                            NORTH_HGI_PRED: dict2,
                            NORTH_HGI_ACTUAL: dict3,
                            SOUTH_HGI_PRED: dict4,
                            SOUTH_HGI_ACTUAL: dict5,
                            "tags_list": HGI_SECONDARY_TAGS,
                            "x-axis": dict7,
                        }
                    else:
                        final_dict = {
                            HGI_PRED: empty_dict,
                            NORTH_HGI_PRED: empty_dict,
                            NORTH_HGI_ACTUAL: empty_dict,
                            SOUTH_HGI_PRED: empty_dict,
                            SOUTH_HGI_ACTUAL: empty_dict,
                            "tags_list": HGI_SECONDARY_TAGS,
                            "x-axis": dict7
                        }
                df_min_max_data = pd.DataFrame(min_max)
                if not df_data.empty:
                    df_data = df_data.where(pd.notnull(df_data) == True, None)
                    df_data.sort_values(TIMESTAMP_KEY, ascending=True, inplace=True)
                    data_now = df_data.groupby(TAG_NAME_REQUEST)
                    df_time = df_data[TIMESTAMP_KEY].unique()
                    old_dict = {}
                    for name, group in data_now:
                        old_dict[name] = list(group[TAG_VALUE])
                    keys = []
                    for key in old_dict.keys():
                        keys.append(key)
                    if HGI_PRED in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(HGI_PRED)][UNIT].iloc[0]
                        description = df_data[df_data[TAG_NAME_REQUEST].str.contains(HGI_PRED)][DESCRIPTION].iloc[0]
                        if not df_min_max_data.empty:
                            min_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(HGI_PRED)][
                                    MIN_VALUE].iloc[
                                    0]
                            max_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(HGI_PRED)][
                                    MAX_VALUE].iloc[
                                    0]
                        dict1["data"] = old_dict[HGI_PRED]
                        dict1["unit"] = unit
                        dict1["description"] = description
                        dict1["min_data"] = min_data
                        dict1["max_data"] = max_data
                    elif HGI_PRED not in keys:
                        dict1["data"] = []
                        dict1["unit"] = None
                        dict1["description"] = None
                        dict1["min_data"] = None
                        dict1["max_data"] = None
                    if NORTH_HGI_PRED in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_PRED)][UNIT].iloc[0]
                        description = df_data[df_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_PRED)][DESCRIPTION].iloc[
                            0]
                        if not df_min_max_data.empty:
                            min_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_PRED)][
                                    MIN_VALUE].iloc[0]
                            max_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_PRED)][
                                    MAX_VALUE].iloc[0]
                        dict2["data"] = old_dict[NORTH_HGI_PRED]
                        dict2["unit"] = unit
                        dict2["description"] = description
                        dict2["min_data"] = min_data
                        dict2["max_data"] = max_data
                    elif NORTH_HGI_PRED not in keys:
                        dict2["data"] = []
                        dict2["unit"] = None
                        dict2["description"] = None
                        dict2["min_data"] = None
                        dict2["max_data"] = None
                    if NORTH_HGI_ACTUAL in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_ACTUAL)][UNIT].iloc[0]
                        description = \
                            df_data[df_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_ACTUAL)][DESCRIPTION].iloc[
                                0]
                        if not df_min_max_data.empty:
                            min_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_ACTUAL)][
                                    MIN_VALUE].iloc[
                                    0]
                            max_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(NORTH_HGI_ACTUAL)][
                                    MAX_VALUE].iloc[
                                    0]
                        dict3["data"] = old_dict[NORTH_HGI_ACTUAL]
                        dict3["unit"] = unit
                        dict3["description"] = description
                        dict3["min_data"] = min_data
                        dict3["max_data"] = max_data
                    elif NORTH_HGI_ACTUAL not in keys:
                        dict3["data"] = []
                        dict3["unit"] = None
                        dict3["description"] = None
                        dict3["min_data"] = None
                        dict3["max_data"] = None
                    if SOUTH_HGI_PRED in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_PRED)][UNIT].iloc[0]
                        description = df_data[df_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_PRED)][DESCRIPTION].iloc[
                            0]
                        if not df_min_max_data.empty:
                            min_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_PRED)][
                                    MIN_VALUE].iloc[
                                    0]
                            max_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_PRED)][
                                    MAX_VALUE].iloc[
                                    0]
                        dict4["data"] = old_dict[SOUTH_HGI_PRED]
                        dict4["unit"] = unit
                        dict4["description"] = description
                        dict4["min_data"] = min_data
                        dict4["max_data"] = max_data
                    elif SOUTH_HGI_PRED not in keys:
                        dict4["data"] = []
                        dict4["unit"] = None
                        dict4["description"] = None
                        dict4["min_data"] = None
                        dict4["max_data"] = None
                    if SOUTH_HGI_ACTUAL in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_ACTUAL)][UNIT].iloc[0]
                        description = \
                            df_data[df_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_ACTUAL)][DESCRIPTION].iloc[
                                0]
                        if not df_min_max_data.empty:
                            min_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_ACTUAL)][
                                    MIN_VALUE].iloc[
                                    0]
                            max_data = \
                                df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(SOUTH_HGI_ACTUAL)][
                                    MAX_VALUE].iloc[
                                    0]
                        dict5["data"] = old_dict[SOUTH_HGI_ACTUAL]
                        dict5["unit"] = unit
                        dict5["description"] = description
                        dict5["min_data"] = min_data
                        dict5["max_data"] = max_data
                    elif SOUTH_HGI_ACTUAL not in keys:
                        dict5["data"] = []
                        dict5["unit"] = None
                        dict5["description"] = None
                        dict5["min_data"] = None
                        dict5["max_data"] = None
                    if TAG_NAME_REQUEST in self.query_params.GET:
                        if query_params[TAG_NAME_REQUEST] in keys:
                            unit = \
                                df_data[df_data[TAG_NAME_REQUEST].str.contains(query_params[TAG_NAME_REQUEST])][
                                    UNIT].iloc[0]
                            description = \
                                df_data[df_data[TAG_NAME_REQUEST].str.contains(query_params[TAG_NAME_REQUEST])][
                                    DESCRIPTION].iloc[0]
                            if not df_min_max_data.empty:
                                min_tags = list(df_min_max_data[TAG_NAME_REQUEST])
                                if query_params[TAG_NAME_REQUEST] in min_tags:
                                    min_data = \
                                        df_min_max_data[
                                            df_min_max_data[TAG_NAME_REQUEST].str.contains(
                                                query_params[TAG_NAME_REQUEST])][
                                            MIN_VALUE].iloc[0]
                                    max_data = \
                                        df_min_max_data[
                                            df_min_max_data[TAG_NAME_REQUEST].str.contains(
                                                query_params[TAG_NAME_REQUEST])][
                                            MAX_VALUE].iloc[0]
                                else:
                                    min_data = None
                                    max_data = None

                            dict6["data"] = old_dict[query_params[TAG_NAME_REQUEST]]
                            dict6["unit"] = unit
                            dict6["description"] = description
                            dict6["min_data"] = min_data
                            dict6["max_data"] = max_data
                        else:
                            dict6["data"] = []
                            dict6["unit"] = None
                            dict6["description"] = None
                            dict6["min_data"] = None
                            dict6["max_data"] = None
                    else:
                        if DEBUG == 1:
                            print("sorry")
                    dict7["data"] = list(df_time)

                graph.append(final_dict)
                return JsonResponse(graph, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: EXCEPTION_CAUSE.format(
                traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #20
0
    def get_values(self):
        """
        This will return the revenue loss data for the overhead pdi module value
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            if self.equipment == COKE_DRUM_VALUE:
                if self.query_params:
                    if START_DATE_REQUEST not in self.query_params or not self.query_params[
                            START_DATE_REQUEST]:
                        self._psql_session.execute(
                            DETAILED_REVENUE_GRAPH_NULL_START_DATE.format(
                                self.module,
                                self.query_params[TAG_NAME_REQUEST],
                                self.query_params[END_DATE_REQUEST]))

                    else:
                        self._psql_session.execute(
                            DETAILED_REVENUE_GRAPH.format(
                                self.module,
                                self.query_params[TAG_NAME_REQUEST],
                                self.query_params[START_DATE_REQUEST],
                                self.query_params[END_DATE_REQUEST]))

                    df_data = pd.DataFrame(self._psql_session.fetchall())
                    graph = []
                    temp = {}
                    if not df_data.empty:
                        df_data = df_data.where(
                            pd.notnull(df_data) == True, None)
                        df_data.sort_values(TIMESTAMP_KEY,
                                            ascending=True,
                                            inplace=True)
                        df_temp = df_data[df_data[MODULE_NAME] == PDI_VALUE]

                        if not df_temp.empty:
                            temp["unit"] = df_temp[UNIT].iloc[0]
                            temp["y_axis"] = list(df_temp[TAG_VALUE])
                            temp["x_axis"] = list(df_temp[TIMESTAMP_KEY])
                    else:
                        temp["unit"] = None
                        temp["y_axis"] = []
                        temp["x_axis"] = []

                    if START_DATE_REQUEST not in self.query_params or not self.query_params[
                            START_DATE_REQUEST]:

                        self._psql_session.execute(
                            DETAILED_COST_GRAPH_NULL_START_DATE.format(
                                self.module,
                                self.query_params[END_DATE_REQUEST]))

                    else:
                        self._psql_session.execute(
                            DETAILED_COST_GRAPH.format(
                                self.module,
                                self.query_params[START_DATE_REQUEST],
                                self.query_params[END_DATE_REQUEST]))
                    df_cost_data = pd.DataFrame(self._psql_session.fetchall())
                    if not df_cost_data.empty:
                        df_cost_data = df_cost_data.where(
                            pd.notnull(df_cost_data) == True, None)
                        df_cost_data.sort_values(MODIFIED_TIME,
                                                 ascending=True,
                                                 inplace=True)
                        # df_cost_temp = df_cost_data[df_cost_data[MODULE_NAME] == PDI_VALUE]
                        temp["updated_price"] = df_cost_data.to_dict(
                            orient=RECORDS)
                    else:
                        temp["updated_price"] = []
                    graph.append(temp)

                    return JsonResponse(graph, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def update_equip(self, body, equipment, feed_name, is_active):
        """
        This function will update external targets
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            conn = pg_connection()
            if conn:
                cursor = conn.cursor()
                try_now = list(body.keys())
                count = 0
                counter = 0
                performance_case_name = body[
                    try_now[count]]['performance_tags']

                if is_active == 'true' and int(equipment[0]) < 15:
                    try:
                        try:
                            for each_equipment in equipment:
                                try:
                                    for each in json.loads(
                                            json.dumps(body[try_now[counter]]
                                                       ['external_targets'])):
                                        if float(each["min"]) <= float(
                                                each['target']):
                                            update_external_lbt = SET_UPDATE_EXTERNAL_TAGS_LBT.format(
                                                ('Between' + ' ' +
                                                 each["min"] + ' and ' +
                                                 each["max"]), each['target'],
                                                each['is_active'], is_active,
                                                each_equipment, feed_name,
                                                each['parameter'])
                                            cursor.execute(update_external_lbt)
                                        else:
                                            pass
                                    counter += 1
                                except Exception as e:
                                    pass
                            try:
                                conn.commit()
                            except Exception as commit_err:
                                log_error(commit_err)

                        except Exception as e:
                            log_error("The Exception is" + str(e))
                        if int(equipment[0]) < 15:
                            for each_equipment in equipment:
                                update_perf_lbt = update_perf(
                                    body[try_now[count]]['performance_tags'],
                                    each_equipment, is_active)
                                cursor.execute(update_perf_lbt)
                                count += 1
                        else:
                            pass
                    except Exception as err:
                        pass
                elif int(equipment[0]) > 14:
                    try:
                        for each in json.loads(
                                json.dumps(
                                    body[try_now[count]]['external_targets'])):
                            if float(each["min"]) <= float(each['target']):
                                update_external_lbt = UPDATED_QUERY_FOR_EXTERANL_TARGETS_NON_FURNACE.format(
                                    ('Between' + ' ' + each["min"] + ' and ' +
                                     each["max"]), each['target'],
                                    each['is_active'], equipment[0],
                                    each['parameter'])

                                cursor.execute(update_external_lbt)
                            else:
                                return JsonResponse({MESSAGE_KEY: SET_LIMIT},
                                                    status=404)

                        if int(equipment[0]) > 14:
                            update_perf_lbt = update_perf(
                                body[try_now[0]]['performance_tags'],
                                equipment[0], is_active)
                            cursor.execute(update_perf_lbt)
                        else:
                            pass

                        case_equip = UPDATE_CASE_EQUIPMENT_MAPPING.format(
                            equipment[0])
                        cursor.execute(case_equip)
                        insert_case = INSERT_CASE_EQUIP_MAPPING.format(
                            equipment[0], performance_case_name)
                        cursor.execute(insert_case)
                    except Exception as err:
                        log_error("The Exception is" + str(err))
                else:
                    print('The Function Done')
                try:
                    conn.commit()

                except Exception as commit_err:
                    log_error(commit_err)

                if conn:
                    cursor.close()
                    conn.close()
            return 0

        except AssertionError as e:
            log_error('Exception due to update_equip Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #22
0
    def download_file(self):
        """
        This will download all the zip files (contains xml/csv) for the selected algorithm.
        :return: zip file to download
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            files = self.query_params["files"].split(",")

            if len(files) == 1:
                query = SINGLE_FILE_DOWNLOAD_QUERY.format(
                    NAME, TABLE_NAME, self.algorithm_name, files[0])
            else:
                query = MULTIPLE_FILES_DOWNLOAD_QUERY.format(
                    NAME, TABLE_NAME, self.algorithm_name, tuple(files))

            result_set = self._csql_session.execute(query)
            result_set_list = list(result_set)

            if len(result_set_list) != len(files):
                return JsonResponse("Please enter the correct file names",
                                    safe=False)

            df_data = pd.DataFrame(result_set_list)
            """
            Creates Zip File
            """
            file_to_download = self.algorithm_name + ".zip"
            zip_object = zipfile.ZipFile(file_to_download, "w")
            """
            Iterate each file(row entry from DB) and creates csv, xml files inside zip file with the file name  
            """

            for index, row in df_data.iterrows():

                extension = os.path.splitext(row['file_param_name'])[1]
                if extension == ".csv":
                    df = pd.read_json(row['value'])
                    zip_object.writestr(row['file_param_name'],
                                        df.to_csv(index=False))
                elif extension == ".xml":
                    obj = json.loads(row['value'])
                    xml = dicttoxml.dicttoxml(obj, root=False, attr_type=False)
                    xml.partition(b'<?xml version="1.0" encoding="UTF-8" ?>'
                                  )  # added the xml version
                    dom = parseString(xml)
                    zip_object.writestr(row['file_param_name'],
                                        dom.toprettyxml())
                elif extension == ".joblib":
                    #obj = json.loads(unescape(row['value']))
                    value = unescape(row['value'])
                    # print(value)
                    # print(joblib.dump(value, "hgi.joblib"))
                    # file_name = os.path.splitext(row['file_param_name'])[0]
                    # print(joblib.dump(bytes(value, 'utf-8'), row['file_param_name']))
                    # zip_object.writestr(row['file_param_name'], joblib.dump(value, row['file_param_name']))
                    # n = os.path.splitext(row['file_param_name'])[0]
                    # a = n+'.sav'
                    print(joblib.dump(value, row['file_param_name']))
                    # zip_object.writestr(row['file_param_name'], joblib.dump(value, a))
                    zip_object.writestr(row['file_param_name'], value)
                    # for row in modelDetails.index:
                    #     functionName = str(modelDetails.loc[row, 'functionName'])
                    #     modelName = str(modelDetails.loc[row, 'modelName']) + ".joblib"
                    #     modelPath = os.path.join(self.folder, "lib", modelName)
                    #     model = joblib.load(modelPath)
                    #     self.savedModels[functionName] = model
            #         , joblib.dump(value, row['file_param_name'])

            zip_object.close()
            """Download Created Zip File"""

            with open(file_to_download, 'rb') as fh:
                response = HttpResponse(fh.read(),
                                        content_type="application/zip")
                response[
                    'Content-Disposition'] = 'attachment; file_name=' + os.path.basename(
                        file_to_download)
                return response

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #23
0
    def upload_file(self):
        """
        This will upload all the selected csv/xml files in the Database for the given algorithm.
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            batch = BatchStatement()
            error = False
            error_message = None

            if not self.files:
                error = True
                error_message = "No files to upload"

            file_names_list = self.file_names
            select_query = SELECT_ALGORITHM_NAME_QUERY.format(
                NAME, TABLE_NAME, self.algo_name,
                ",".join(map(lambda x: "'" + x + "'", file_names_list)))
            result_set = self._csql_session.execute(select_query)
            if result_set[0]['count'] == 0 or result_set[0]['count'] < len(
                    file_names_list):
                error_message = "Please give the existing algorithm or file name"
                return JsonResponse({MESSAGE_KEY: error_message},
                                    status=HTTP_500_INTERNAL_SERVER_ERROR)

            for file in self.files:

                if file.name not in self.file_names:
                    error = True
                    error_message = "Uploaded file name(" + file.name + ") not found in given file name list"
                    break

                description = None
                if file.name in self.description:
                    description = self.description[file.name]
                LAST_MODIFIED_DATE = str(round(time.time() * 1000))

                extension = os.path.splitext(file.name)[1]
                json_data = ""
                if self.algo_name == 'last_10_tmt' and file.name == 'features.csv':
                    file_data = pandas.read_csv(file,
                                                encoding='unicode escape')
                    json_data = file_data.to_json()
                elif extension == ".csv":
                    file_data = pandas.read_csv(file, encoding='ISO-8859-1')
                    json_data = file_data.to_json()
                elif extension == ".xml":
                    file_data = et.parse(file)
                    xml_str = ElementTree.tostring(file_data.getroot(),
                                                   encoding='unicode')
                    json_data = json.dumps(xmltodict.parse(xml_str))
                elif extension == ".joblib":

                    json_datas = joblib.load(file)
                    json_data = escape(str(json_datas))
                """ insert query into cassandra table """
                insert_query = FILE_UPLOAD_QUERY.format(
                    NAME, TABLE_NAME, self.algo_name, file.name, description,
                    "textAsBlob('" + json_data + "')", LAST_MODIFIED_DATE,
                    FLAG)

                batch.add(SimpleStatement(insert_query))

            if error is True:
                return JsonResponse({MESSAGE_KEY: error_message},
                                    status=HTTP_500_INTERNAL_SERVER_ERROR)

            self._csql_session.execute(batch, timeout=200.0)
            return JsonResponse({MESSAGE_KEY: UPLOADED_SUCCESSFULLY},
                                safe=False)

        except AssertionError as e:
            log_error("Exception due to : %s", e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])
        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #24
0
    def get_values(self):
        """
        This will return the outage multiline graph data
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            graph = []
            empty_dict = {
                "data": [],
                "description": None,
                "unit": None,
                "min_data": None,
                "max_data": None
            }
            dict1 = {}
            dict2 = {}
            dict3 = {}
            dict4 = {}
            dict5 = {"data": []}
            if self.equipment == COKE_DRUM_VALUE and self.module == OUTAGE_VALUE:
                query_params = {
                    START_DATE_REQUEST:
                    self.query_params.GET[START_DATE_REQUEST],
                    END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]
                }

                if PRIMARY_TAG not in self.query_params.GET:
                    """
                    This will return the outage multiline graph data without the primary tag
                    """
                    multiline_tags = tuple(LIST_OF_OUTAGE_MULTILINE_TAGS)
                    if query_params[START_DATE_REQUEST] and query_params[
                            END_DATE_REQUEST]:
                        multi_line = django_search_query_all(
                            OUTAGE_MULTILINE.format(
                                self.module, multiline_tags,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        df_data = pd.DataFrame(multi_line)

                        min_max = django_search_query_all(
                            OUTAGE_MIN_MAX_DATA.format(self.module,
                                                       multiline_tags))

                elif PRIMARY_TAG in self.query_params.GET:
                    query_params[PRIMARY_TAG] = self.query_params.GET[
                        PRIMARY_TAG]
                    if query_params[START_DATE_REQUEST] and query_params[
                            END_DATE_REQUEST] and query_params[PRIMARY_TAG]:
                        """
                        This will return the outage multiline graph data with the primary tag
                        """
                        LIST_OF_OUTAGE_MULTILINE_TAGS.append(
                            query_params["primary_tag"])
                        tags = tuple(LIST_OF_OUTAGE_MULTILINE_TAGS)
                        multi_line = django_search_query_all(
                            OUTAGE_MULTILINE.format(
                                self.module, tags,
                                query_params[START_DATE_REQUEST],
                                query_params[END_DATE_REQUEST]))
                        df_data = pd.DataFrame(multi_line)
                        min_max = django_search_query_all(
                            OUTAGE_MIN_MAX_DATA.format(self.module, tags))
                        LIST_OF_OUTAGE_MULTILINE_TAGS.pop()

                if PRIMARY_TAG in self.query_params.GET:
                    if not df_data.empty:
                        query_params[PRIMARY_TAG] = self.query_params.GET[
                            PRIMARY_TAG]
                        final_dict = {
                            "Coke Height": dict1,
                            "Foam Height": dict2,
                            "Current Outage": dict3,
                            query_params[PRIMARY_TAG]: dict4,
                            "x-axis": dict5,
                            "online-drum": []
                        }
                    else:
                        final_dict = {
                            "Coke Height": empty_dict,
                            "Foam Height": empty_dict,
                            "Current Outage": empty_dict,
                            query_params[PRIMARY_TAG]: empty_dict,
                            "x-axis": dict5,
                            "online-drum": []
                        }
                else:
                    if not df_data.empty:
                        final_dict = {
                            "Coke Height": dict1,
                            "Foam Height": dict2,
                            "Current Outage": dict3,
                            "x-axis": dict5,
                            "tags_list": LIST_OF_OUTAGE_PRIMARY_TAGS,
                            "online-drum": []
                        }
                    else:
                        final_dict = {
                            "Coke Height": empty_dict,
                            "Foam Height": empty_dict,
                            "Current Outage": empty_dict,
                            "x-axis": dict5,
                            "tags_list": LIST_OF_OUTAGE_PRIMARY_TAGS,
                            "online-drum": []
                        }
                df_min_max_data = pd.DataFrame(min_max)

                if not df_data.empty:
                    df_data = df_data.where(pd.notnull(df_data) == True, None)
                    df_data.sort_values(TIMESTAMP_KEY,
                                        ascending=True,
                                        inplace=True)
                    data_now = df_data.groupby(TAG_NAME_REQUEST)
                    df_time = df_data[TIMESTAMP_KEY].unique()
                    data_online_coke = data_now.get_group(COKE_HEIGHT_TAG)
                    data_online_foam = data_now.get_group(FOAM_HEIGHT_TAG)
                    data_online_current = data_now.get_group(FOAM_HEIGHT_TAG)
                    if not data_online_coke.empty:
                        final_dict["online-drum"] = list(
                            data_online_coke[DRUM_ONLINE])
                    elif not data_online_foam.empty:
                        final_dict["online-drum"] = list(
                            data_online_foam[DRUM_ONLINE])
                    elif not data_online_current.empty:
                        final_dict["online-drum"] = list(
                            data_online_current[DRUM_ONLINE])
                    old_dict = {}
                    for name, group in data_now:
                        old_dict[name] = list(group[TAG_VALUE])
                    keys = []
                    for key in old_dict.keys():
                        keys.append(key)
                    if COKE_HEIGHT_TAG in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(
                            COKE_HEIGHT)][UNIT].iloc[0]
                        description = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains(
                                COKE_HEIGHT)][DESCRIPTION].iloc[0]
                        min_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(COKE_HEIGHT)][
                                MIN_VALUE].iloc[
                                0]
                        max_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(COKE_HEIGHT)][
                                MAX_VALUE].iloc[
                                0]
                        dict1["data"] = old_dict[COKE_HEIGHT_TAG]
                        dict1["unit"] = unit
                        dict1["description"] = description
                        dict1["min_data"] = min_data
                        dict1["max_data"] = max_data
                    elif COKE_HEIGHT_TAG not in keys:
                        dict1["data"] = None
                        dict1["unit"] = None
                        dict1["description"] = None
                        dict1["min_data"] = None
                        dict1["max_data"] = None
                    if FOAM_HEIGHT_TAG in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(
                            FOAM_HEIGHT)][UNIT].iloc[0]
                        description = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains(
                                FOAM_HEIGHT)][DESCRIPTION].iloc[0]
                        min_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(FOAM_HEIGHT)][
                                MIN_VALUE].iloc[0]
                        max_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(FOAM_HEIGHT)][
                                MAX_VALUE].iloc[0]
                        dict2["data"] = old_dict[FOAM_HEIGHT_TAG]
                        dict2["unit"] = unit
                        dict2["description"] = description
                        dict2["min_data"] = min_data
                        dict2["max_data"] = max_data
                    elif FOAM_HEIGHT_TAG not in keys:
                        dict2["data"] = None
                        dict2["unit"] = None
                        dict2["description"] = None
                        dict2["min_data"] = None
                        dict2["max_data"] = None
                    if OUTAGE_TREND_TAG in keys:
                        unit = df_data[df_data[TAG_NAME_REQUEST].str.contains(
                            CURRENT_OUTAGE)][UNIT].iloc[0]
                        description = df_data[
                            df_data[TAG_NAME_REQUEST].str.contains(
                                CURRENT_OUTAGE)][DESCRIPTION].iloc[0]
                        min_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(CURRENT_OUTAGE)][
                                MIN_VALUE].iloc[
                                0]
                        max_data = \
                            df_min_max_data[df_min_max_data[TAG_NAME_REQUEST].str.contains(CURRENT_OUTAGE)][
                                MAX_VALUE].iloc[
                                0]
                        dict3["data"] = old_dict[OUTAGE_TREND_TAG]
                        dict3["unit"] = unit
                        dict3["description"] = description
                        dict3["min_data"] = min_data
                        dict3["max_data"] = max_data
                    elif OUTAGE_TREND_TAG not in keys:
                        dict3["data"] = None
                        dict3["unit"] = None
                        dict3["description"] = None
                        dict3["min_data"] = None
                        dict3["max_data"] = None

                    if PRIMARY_TAG in self.query_params.GET:
                        if query_params[PRIMARY_TAG] in keys:
                            data_online_primary = data_now.get_group(
                                query_params[PRIMARY_TAG])
                            if not data_online_primary.empty:
                                final_dict["online-drum"] = list(
                                    data_online_primary[DRUM_ONLINE])
                            else:
                                print("sorry")
                            unit = \
                                df_data[df_data[TAG_NAME_REQUEST].str.contains(query_params[PRIMARY_TAG])][UNIT].iloc[0]
                            description = \
                                df_data[df_data[TAG_NAME_REQUEST].str.contains(query_params[PRIMARY_TAG])][
                                    DESCRIPTION].iloc[0]
                            min_data = \
                                df_min_max_data[
                                    df_min_max_data[TAG_NAME_REQUEST].str.contains(query_params[PRIMARY_TAG])][
                                    MIN_VALUE].iloc[0]
                            max_data = \
                                df_min_max_data[
                                    df_min_max_data[TAG_NAME_REQUEST].str.contains(query_params[PRIMARY_TAG])][
                                    MAX_VALUE].iloc[0]
                            dict4["data"] = old_dict[query_params[PRIMARY_TAG]]
                            dict4["unit"] = unit
                            dict4["description"] = description
                            dict4["min_data"] = min_data
                            dict4["max_data"] = max_data
                        else:
                            dict4["data"] = None
                            dict4["unit"] = None
                            dict4["description"] = None
                            dict4["min_data"] = None
                            dict4["max_data"] = None
                    else:
                        print("sorry")
                    dict5["data"] = list(df_time)

                graph.append(final_dict)
                return JsonResponse(graph, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #25
0
    def get_values(self):
        """
        This will get query from the Database for the console and equipment name
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            self._psql_session.execute(CONFIGURATON_SINGLE_PERF)
            perf = pd.DataFrame(self._psql_session.fetchall())
            if not perf.empty:
                value = perf['case_name'].to_string()
                result = ''.join([i for i in value if not i.isdigit()]).strip()
            else:
                result = []

            dict_data = {
                "Targets": [],
                "Performance_tag": result,
                "Match_tags": [],
                "Noise_Tags": [],
                "Performance_tags_list": []
            }

            dynamic_benchmarking_df = pd.DataFrame()

            try:
                self._psql_session.execute(CONFIGURATION_TARGETS_TAGS)
                df = pd.DataFrame(self._psql_session.fetchall())

                if not df.empty:
                    df = df.where(pd.notnull(df) == True, None)
                    dynamic_benchmarking_df = dynamic_benchmarking_df.append(
                        df, ignore_index=True)
                    dict_data["Targets"] = yaml.safe_load(
                        df.to_json(orient=RECORDS))

            except Exception as e:
                log_error(e)
            try:
                self._psql_session.execute(CONFIGURATION_MATCH_TAGS)

                df = pd.DataFrame(self._psql_session.fetchall())

                if not df.empty:
                    df = df.where(pd.notnull(df) == True, None)
                    dynamic_benchmarking_df = dynamic_benchmarking_df.append(
                        df, ignore_index=True)

                    dict_data["Match_tags"] = yaml.safe_load(
                        df.to_json(orient=RECORDS))

            except Exception as e:
                log_error(e)

            try:
                self._psql_session.execute(CONFIGURATION_PERF_TAGS_LIST)
                df = pd.DataFrame(self._psql_session.fetchall())

                if not df.empty:
                    df = df.where(pd.notnull(df) == True, None)
                    dynamic_benchmarking_df = dynamic_benchmarking_df.append(
                        df, ignore_index=True)
                    dict_data["Performance_tags_list"] = yaml.safe_load(
                        df.to_json(orient=RECORDS))

            except Exception as e:
                log_error(e)

            try:
                self._psql_session.execute(CONFIGURATION_NOISE_TAGS)
                df = pd.DataFrame(self._psql_session.fetchall())

                if not df.empty:
                    df = df.where(pd.notnull(df) == True, None)
                    dynamic_benchmarking_df = dynamic_benchmarking_df.append(
                        df, ignore_index=True)
                    dict_data["Noise_Tags"] = yaml.safe_load(
                        df.to_json(orient=RECORDS))

            except Exception as e:
                log_error(e)

            return JsonResponse(dict_data, safe=False)

        except AssertionError as e:
            log_error(e)
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_externl_targets(self):
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            self._psql_session.execute(
                "select timestamp from color_coding_graph limit 1 ")

            timetsamp_record = self._psql_session.fetchone()
            if not timetsamp_record:
                return JsonResponse(
                    "No data available right now ! We'll be sending the empty response soon! ",
                )

            self.sync_time = timetsamp_record[TIMESTAMP_KEY]

            self._psql_session.execute(GET_EXTERNAL_TARGET_RESULT)

            df = pd.DataFrame(self._psql_session.fetchall())

            if df.shape[0]:
                df = df.where(pd.notnull(df) == True, None)
                console_name = df["console_name"].unique()
                unit_val = {}
                final_val = []
                console_val = []
                for console in console_name:
                    equipment_val = {
                        "console_name":
                        console,
                        "equipments":
                        df[[
                            "equipment_tag_name", "min", "max", "target",
                            "actual", "feed_type", "comment", "description",
                            "status", "alert_flag", "tag", "equipment_id"
                        ]][(df["console_name"] == console)].to_dict(
                            orient=RECORDS)
                    }

                    console_val.append(equipment_val)

                unit_val = console_val
                final_val.append(unit_val)

                return JsonResponse(final_val, safe=False)
            else:
                return JsonResponse([], safe=False)

        except AssertionError as e:
            log_error('Exception due to get_externl_targets Function: %s' +
                      str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
Example #27
0
    def get_furnaces(self):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            if self.query_params:
                equipment = self.query_params[EQUIPMENT].split(",")
                """
                This condition for used to it will select single equipment also even multiple equipment also
                based on user selection
                """
                if len(equipment) == 1:
                    equipment_param = '(' + str(equipment[0]) + ')'
                    equipment_param = '(' + str(equipment[0]) + ')'
                else:
                    equipment_param = tuple(equipment)
                perform_list_all = []
                try:
                    if self.query_params[IS_ACTIVE] == "true" and int(
                            equipment[0]) < 15:
                        self._psql_session.execute(
                            MULTIPLE_CONFIG_EQUIPMENT.format(
                                self.query_params[IS_ACTIVE], equipment_param,
                                self.query_params[FEED_NAME]))
                    elif int(equipment[0]) > 14:
                        self._psql_session.execute(
                            NON_FURNACE_EXTERNAL_TARGETS.format(
                                equipment_param))
                    else:
                        pass

                    df = pd.DataFrame(self._psql_session.fetchall())

                    dt = df.groupby('equipment_tag_name').apply(
                        lambda x: x.to_json(orient='records'))

                    df.sort_values('parameter', ascending=True, inplace=True)
                    obj = {}
                    array = []
                    for each_data in dt:
                        for each in json.loads(each_data):
                            obj[each['equipment_tag_name']] = {
                                'external_targets': json.loads(each_data),
                                'performance_tags': None
                            }

                    perform = []
                    try:
                        self._psql_session.execute(
                            MULTIPLE_CONFIG_CASE_NAME_PERFORMACE_TAGS.format(
                                equipment_param))

                    except Exception as e:
                        log_error(
                            'Exception due to get_furnaces Function: %s' +
                            str(e))

                    performance_list = json.loads(
                        json.dumps(self._psql_session.fetchall()))
                    perf_list = json.loads(json.dumps(performance_list))

                    try:
                        self._psql_session.execute(
                            ALL_PERF_TAGS_FOR_NON_FURNACES.format(
                                equipment_param))

                    except Exception as e:
                        log_error(
                            'Exception due to get_furnaces Function: %s' +
                            str(e))
                    perameter_list = json.loads(
                        json.dumps(self._psql_session.fetchall()))
                    perform_list = json.loads(json.dumps(perameter_list))

                    if len(perform_list) > 0:
                        for each_perform in perform_list:
                            perform_list_all.append(each_perform['result'])
                    else:
                        pass
                    for each_data in perf_list:
                        try:

                            obj[each_data["equipment_tag_name"]][
                                "performance_tags"] = each_data['case_name']

                        except Exception as err:

                            pass
                    for each_data in perform_list_all:
                        try:
                            obj[each_data["equipment_tag_name"]][
                                "performance_tags_list"] = each_data[
                                    'parameter']

                        except Exception as err:
                            pass
                    return JsonResponse(obj, safe=False, status=200)

                except Exception as e:
                    log_error('Exception due to get_furnaces Function: %s' +
                              str(e))
                    return JsonResponse({"message": str(e)}, safe=False)

        except AssertionError as e:
            log_error('Exception due to get_furnaces Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_sanky_output(self):
        """
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }

            result = []
            max_production_overview_data = None
            min_spec_energy_overview_data = None
            config_msg = None
            dict_data = {}
            try:
                """ Getting the  Max_production_current ts,historical best,ambient condition,feed slate"""
                dict_data["max_production_overview"] = {}
                try:
                    self._psql_session.execute(max_production_overview)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        max_production_overview_data = df
                        dict_data["max_production_overview"] = {
                            "feed_slate":
                            max_production_overview_data.feed_slate.iloc[0],
                            "ambient_condition":
                            max_production_overview_data.ambient_condition.
                            iloc[0],
                            "timestamp":
                            max_production_overview_data.timestamp.iloc[0],
                            "hystorical_best":
                            max_production_overview_data.hystorical_best.
                            iloc[0],
                        }
                    else:
                        dict_data["max_production_overview"] = {}
                except Exception as e:
                    log_error('Exception in max_production_overview_data: %s' +
                              str(e))
                """ ADDING CONFIG_MESSAGE """

                try:
                    self._psql_session.execute(max_production_config_msg_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        config_msg = df
                        dict_data["max_production_overview"][
                            "config_message"] = config_msg.tag_value_txt.iloc[
                                0]
                    else:
                        dict_data["max_production_overview"][
                            "config_message"] = None
                except Exception as e:
                    log_error(
                        'Exception in max_production_overview_data_config_message: %s'
                        + str(e))
                """Getting minimum energy current ts,historical best,ambient condition,feed slate data"""

                dict_data["min_spec_energy_overview"] = {}
                try:
                    self._psql_session.execute(min_sp_energy_overview)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        min_spec_energy_overview_data = df
                        dict_data["min_spec_energy_overview"] = {
                            "feed_slate":
                            min_spec_energy_overview_data.feed_slate.iloc[0],
                            "ambient_condition":
                            min_spec_energy_overview_data.ambient_condition.
                            iloc[0],
                            "timestamp":
                            min_spec_energy_overview_data.timestamp.iloc[0],
                            "hystorical_best":
                            min_spec_energy_overview_data.hystorical_best.
                            iloc[0]
                        }

                    else:
                        dict_data["min_spec_energy_overview"] = {}

                except Exception as e:
                    log_error('Exception in min_spec_energy_overview: : %s' +
                              str(e))
                try:
                    self._psql_session.execute(min_sp_energy_config_msg_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        config_msg = df
                        dict_data["min_spec_energy_overview"][
                            "config_message"] = config_msg.tag_value_txt.iloc[
                                0]
                    else:
                        dict_data["min_spec_energy_overview"][
                            "config_message"] = None
                except Exception as e:
                    log_error(
                        'Exception in min_spec_energy_overview_data_config_message: %s'
                        + str(e))
                """ Getting the  Max_production total margin data"""

                dict_data["max_production_data"] = {}
                try:
                    self._psql_session.execute(max_production_total_margin)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["max_production_data"][
                            "Total Margin"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["max_production_data"]["Total Margin"] = []
                except Exception as e:
                    log_error(
                        'Exception in max_production_data total_margin: %s' +
                        str(e))
                """TOTAL RAW MATERIAL"""
                try:
                    self._psql_session.execute(
                        max_production_total_raw_material)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["max_production_data"][
                            "Total Raw Material"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["max_production_data"][
                            "Total Raw Material"] = []
                except Exception as e:
                    log_error(
                        'Exception in max_production_data total_raw_material: %s'
                        + str(e))
                """PRODUCT"""

                try:
                    self._psql_session.execute(max_production_product_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["max_production_data"][
                            "Product"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["max_production_data"]["Product"] = []
                except Exception as e:
                    log_error(
                        'Exception in max_production_data product data: %s' +
                        str(e))
                """Max_product_data energy data"""

                try:
                    self._psql_session.execute(max_production_energy_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["max_production_data"][
                            "Energy"] = yaml.safe_load(
                                df.to_json(orient="records"))
                    else:
                        dict_data["max_production_data"]["Energy"] = []
                except Exception as e:
                    log_error(
                        'Exception in max_production_data energy data: %s' +
                        str(e))
                """Getting min_spec_energy_data"""

                dict_data["min_spec_energy_data"] = {}

                try:
                    self._psql_session.execute(min_sp_energy_total_margin)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["min_spec_energy_data"][
                            "Total Margin"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["min_spec_energy_data"]["Total Margin"] = []
                except Exception as e:
                    log_error(
                        'Exception in min_spec_energy_data total_margin: %s' +
                        str(e))
                """TOTAL RAW MATERIAL"""
                try:
                    self._psql_session.execute(
                        min_sp_energy_total_raw_material)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["min_spec_energy_data"][
                            "Total Raw Material"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["min_spec_energy_data"][
                            "Total Raw Material"] = []
                except Exception as e:
                    log_error(
                        'Exception in min_spec_energy_data total_raw_material: %s'
                        + str(e))
                """min_spec_energy_data PRODUCT"""

                try:
                    self._psql_session.execute(min_sp_energy_product_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["min_spec_energy_data"][
                            "Product"] = yaml.safe_load(
                                df.to_json(orient="records"))

                    else:
                        dict_data["min_spec_energy_data"]["Product"] = []
                except Exception as e:
                    log_error(
                        'Exception in min_spec_energy_data product data: %s' +
                        str(e))
                """min_spec_energy_data energy data"""

                try:
                    self._psql_session.execute(min_sp_energy_energy_data)
                    df = pd.DataFrame(self._psql_session.fetchall())
                    if not df.empty:
                        dict_data["min_spec_energy_data"][
                            "Energy"] = yaml.safe_load(
                                df.to_json(orient="records"))
                    else:
                        dict_data["min_spec_energy_data"]["Energy"] = []
                except Exception as e:
                    log_error(
                        'Exception in min_spec_energy_data energy data: %s' +
                        str(e))
                result.append(dict_data)
            except Exception as e:
                log_error('Exception due to get_sanky_output Function: %s' +
                          str(e))
            return JsonResponse(dict_data, safe=False)

        except AssertionError as e:
            log_error('Exception in get_sanky_output Function: %s' + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error(traceback.format_exc())
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_values(self):
        """
        This will return the furnace runlength graph data
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            graph = []
            tag_left = None
            dict_data = {}
            if self.equipment == FURNACE_VALUE:
                query_params = {
                    START_DATE_REQUEST:
                    self.query_params.GET[START_DATE_REQUEST],
                    END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]
                }
                multi_line_tags = None

                if self.module == FURNACE_A_VALUE:
                    multi_line_tags = tuple(FURNACE_A_RUN_LENGTH_TAGS)

                elif self.module == FURNACE_B_VALUE:
                    multi_line_tags = tuple(FURNACE_B_RUN_LENGTH_TAGS)
                """
                This will return the furnace runlength graph data
                """

                if query_params[START_DATE_REQUEST] and query_params[
                        END_DATE_REQUEST]:
                    self._psql_session.execute(
                        FURNACE_MULTI_LINE_GRAPH.format(
                            self.module, multi_line_tags,
                            query_params[START_DATE_REQUEST],
                            query_params[END_DATE_REQUEST],
                            query_params[START_DATE_REQUEST]))
                    df_data = pd.DataFrame(self._psql_session.fetchall())
                    self._psql_session.execute(
                        FURNACE_MIN_MAX_DATA.format(self.module,
                                                    multi_line_tags))
                    df_min_max_data = pd.DataFrame(
                        self._psql_session.fetchall())

                    if not df_data.empty:
                        df_data = df_data.where(
                            pd.notnull(df_data) == True, None)
                        df_time = list(df_data['timestamp'].unique())
                        df = df_data.groupby("tag_name")

                        if not df_min_max_data.empty:
                            for name, group in df:
                                if name in list(df_min_max_data["tag_name"]):
                                    group['tag_value'] = group[
                                        'tag_value'].astype(float).round()
                                    group = group.where(
                                        pd.notnull(group) == True, None)
                                    dict_data[name] = {
                                        "data":
                                        group[TAG_VALUE].to_list(),
                                        "description":
                                        group['description'].iloc[0],
                                        "unit":
                                        group['unit'].iloc[0],
                                        "min_data":
                                        df_min_max_data[
                                            df_min_max_data["tag_name"] ==
                                            name][MIN_VALUE].iloc[0],
                                        "max_data":
                                        df_min_max_data[
                                            df_min_max_data["tag_name"] ==
                                            name][MAX_VALUE].iloc[0]
                                    }
                                else:
                                    dict_data[name] = {
                                        "data": group[TAG_VALUE].to_list(),
                                        "description":
                                        group['description'].iloc[0],
                                        "unit": group['unit'].iloc[0],
                                        "min_data": None,
                                        "max_data": None
                                    }

                        dict_data["x-axis"] = {"data": df_time}

                        # if TAG_NAME_REQUEST not in self.query_params.GET:
                        if self.module == FURNACE_A_VALUE:
                            tag_left = list(FURNACE_A_RUN_LENGTH_TAGS -
                                            dict_data.keys())
                        else:
                            tag_left = list(FURNACE_B_RUN_LENGTH_TAGS -
                                            dict_data.keys())

                        for each in tag_left:
                            dict_data[each] = {
                                "data": [],
                                "description": None,
                                "unit": None,
                                "min_data": None,
                                "max_data": None
                            }

                    else:
                        # if TAG_NAME_REQUEST not in self.query_params.GET:
                        if self.module == FURNACE_A_VALUE:
                            tag_left = FURNACE_A_RUN_LENGTH_TAGS
                        else:
                            tag_left = FURNACE_B_RUN_LENGTH_TAGS

                        for each in tag_left:
                            dict_data[each] = {
                                "data": [],
                                "description": None,
                                "unit": None,
                                "min_data": None,
                                "max_data": None
                            }
                        dict_data["x-axis"] = {"data": []}
                    graph.append(dict_data)
                    return JsonResponse(graph, safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)
    def get_names_values(self):
        """
        This will return the names for all the equipment and module
        :return: Json Response
        """
        try:
            assert self._db_connection, {
                STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
                MESSAGE_KEY: DB_ERROR
            }
            self._psql_session.execute(MASTER_TABLE_QUERY)
            df = pd.DataFrame(self._psql_session.fetchall())
            df = pd.DataFrame(df, index=[0, 1, 2, 3, 5, 4, 6])
            # df.loc[df['module_name'] == 'H3901A: Pass 3 & 4(Spall)', 'id'] = 5
            # df.loc[df['module_name'] == 'H3901A: Pass 3 & 4 (Spall)', 'id'] = 5
            # df.loc[df['module_name'] == 'H3901B: Pass 1 & 2', 'id'] = 6
            # df.sort_values(by=['id'], inplace=True)
            hgi = self.loggedin_userid_details['permissions']['HGI']
            over_head_pdi = self.loggedin_userid_details['permissions']['PDI']
            outage = self.loggedin_userid_details['permissions']['Outage']
            furnace_a = self.loggedin_userid_details['permissions'][
                'Furnace: H3901A']
            furnace_a_spall = self.loggedin_userid_details['permissions'][
                'Furnace: H3901A (Spall)']
            furnace_b = self.loggedin_userid_details['permissions'][
                'Furnace: H3901B']
            furnace_b_spall = self.loggedin_userid_details['permissions'][
                'Furnace: H3901B (Spall)']
            left_equipment = self.loggedin_userid_details['permissions'][
                'Left Pane']
            if 'Equipment' not in left_equipment:
                return JsonResponse([], safe=False)
            if 'Left Pane : Module Access' not in hgi:
                df.drop(df.loc[df['module_name'] == 'HGI'].index, inplace=True)
            if 'Left Pane : Module Access' not in furnace_a:
                df.drop(
                    df.loc[df['module_name'] == 'H3901A: Pass 3 & 4'].index,
                    inplace=True)
            if 'Left Pane : Module Access' not in furnace_a_spall:
                df.drop(df.loc[df['module_name'] ==
                               'H3901A: Pass 3 & 4 (Spall)'].index,
                        inplace=True)
            if 'Left Pane : Module Access' not in furnace_b_spall:
                df.drop(df.loc[df['module_name'] ==
                               'H3901B: Pass 1 & 2 (Spall)'].index,
                        inplace=True)
            if 'Left Pane : Module Access' not in furnace_b:
                df.drop(
                    df.loc[df['module_name'] == 'H3901B: Pass 1 & 2'].index,
                    inplace=True)
            if 'Left Pane : Module Access' not in outage:
                df.drop(df.loc[df['module_name'] == 'Outage'].index,
                        inplace=True)
            if 'Left Pane : Module Access' not in over_head_pdi:
                df.drop(df.loc[df['module_name'] == 'Overhead PDI'].index,
                        inplace=True)

            if df.shape[0]:
                unit_val = {}
                final_val = []

                equipment_name = df[EQUIPMENT_NAME].unique()
                equipment_val = []
                for equipment in equipment_name:
                    module_val = {
                        "equipment_name":
                        equipment,
                        "modules":
                        df[[MODULE_NAME,
                            ID]][(df[EQUIPMENT_NAME] == equipment)].to_dict(
                                orient=RECORDS)
                    }
                    equipment_val.append(module_val)

                unit_val["unit_name"] = EQUIPMENT
                unit_val["equipments"] = equipment_val
                final_val.append(unit_val)
                return JsonResponse(final_val, safe=False)
            return JsonResponse([], safe=False)

        except AssertionError as e:
            log_error("Assertion error due to : %s" + str(e))
            return JsonResponse({MESSAGE_KEY: e.args[0][MESSAGE_KEY]},
                                status=e.args[0][STATUS_KEY])

        except Exception as e:
            log_error("Exception due to : %s" + str(e))
            return JsonResponse(
                {MESSAGE_KEY: EXCEPTION_CAUSE.format(traceback.format_exc())},
                status=HTTP_500_INTERNAL_SERVER_ERROR)