class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_getPDFTable'} @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _getPDFTable(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ # empty parameters extraction_method = None path = None template = None for request_rows in request: # pull duals from each row, and the numData from duals for row in request_rows.rows: if extraction_method is None: extraction_method = [d.strData for d in row.duals][0] if path is None: path = [d.strData for d in row.duals][1] if template is None: template = [d.strData for d in row.duals][2] # read PDF with template if extraction_method == 'stream': df_list = read_pdf_with_template(path, template, stream=True) else: df_list = read_pdf_with_template(path, template, lattice=True) final_df = pd.DataFrame() count = 1 for df in df_list: df['tableID'] = str(count) final_df = pd.concat([final_df, df], axis=0, ignore_index=True) count = count + 1 columns = final_df.columns # iterate through df columns and format as SSE duals dualsList = [] for col in columns: tmpList = final_df[col].tolist() dualsList.append([ SSE.Dual(strData=d) if type(d) is str else SSE.Dual(numData=d) for d in tmpList ]) # create response rows response_rows = [] for i in range(len(tmpList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) # return response yield SSE.BundledRows(rows=response_rows) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Sentiment', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_translate', 1: '_translateScript'} @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _translate(request, context): translationsList = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # grab the text text = [d.strData for d in row.duals][0] # grab the source src = [d.strData for d in row.duals][1] # grab the destination dest = [d.strData for d in row.duals][2] translationsList.append(text) try: translator = Translator() translations = translator.translate(translationsList, src=src, dest=dest) except: pass resultList = [i.text for i in translations] logging.info('Records translated: ' + str(len(resultList))) # Create an iterable of dual with the result duals = iter([[SSE.Dual(strData=d)] for d in resultList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals]) @staticmethod def _translateScript(request, context): idNumList = [] translationsList = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # grab the text text = [d.strData for d in row.duals][0] # grab the id idNum = [d.numData for d in row.duals][1] idNumList.append(idNum) # grab the source src = [d.strData for d in row.duals][2] # grab the destination dest = [d.strData for d in row.duals][3] translationsList.append(text) try: translator = Translator() translations = translator.translate(translationsList, src=src, dest=dest) except: pass resultList = [i.text for i in translations] logging.info('Records translated: ' + str(len(resultList))) # Create an iterable of dual with the result dualsList = [] dualsList.append([SSE.Dual(numData=d) for d in idNumList]) dualsList.append([SSE.Dual(strData=d) for d in resultList]) response_rows = [] for i in range(len(idNumList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) # Set and send Table header table = SSE.TableDescription(name='Translations') table.fields.add(dataType=SSE.NUMERIC) table.fields.add(dataType=SSE.STRING) md = (('qlik-tabledescription-bin', table.SerializeToString()), ) context.send_initial_metadata(md) yield SSE.BundledRows(rows=response_rows) """ Implementation of rpc functions. """ def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Translation', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return { 0: '_hello_world', 1: '_hello_world_aggr', 2: '_cache', 3: '_no_cache', 4: '_echo_table' } @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _hello_world(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ for request_rows in request: yield request_rows @staticmethod def _hello_world_aggr(request, context): """ Aggregates the parameters to a single comma separated string. :param request: iterable sequence of bundled rows :return: string """ params = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] params.append(param) # Aggregate parameters to a single string result = ', '.join(params) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _cache(request, context): """ Cache enabled. Add the datetime stamp to the end of each string value. :param request: iterable sequence of bundled rows :param context: not used. :return: string """ # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _no_cache(request, context): """ Cache disabled. Add the datetime stamp to the end of each string value. :param request: :param context: used for disabling the cache in the header. :return: string """ # Disable caching. md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _echo_table(request, context): """ Echo the input table. :param request: :param context: :return: """ for request_rows in request: response_rows = [] for row in request_rows.rows: response_rows.append(row) yield SSE.BundledRows(rows=response_rows) """ Implementation of rpc functions. """ def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Hello World - Qlik', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted(definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format(function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format(func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials([(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info('*** Running server in secure mode on port: {} ***'.format(port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info('*** Running server in insecure mode on port: {} ***'.format(port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_prophet', 1: '_prophetScript'} @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _prophetScript(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ # instantiate a list for measure data dateStampList = [] figuresList = [] forecastPeriods = None forecastType = None m = None yhat = None changePoint = None minFloor = None maxCap = None for request_rows in request: # iterate over each request row (contains rows, duals, numData) # pull duals from each row, and the numData from duals for row in request_rows.rows: # the first numData contains the date stamps dateStamps = [d.numData for d in row.duals][0] pythonDate = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + int(dateStamps) - 2) dateStampList.append(pythonDate) # the second numData contains the figures figures = int([d.numData for d in row.duals][1]) figuresList.append(figures) # this is redundant and is the same in every row if not forecastPeriods: forecastPeriods = int([d.numData for d in row.duals][2]) if not forecastType: forecastType = [d.strData for d in row.duals][3] if not yhat: yhat = [d.strData for d in row.duals][6] if not changePoint: changePoint = int([d.numData for d in row.duals][7]) if not minFloor: minFloor = int([d.numData for d in row.duals][8]) if not maxCap: maxCap = int([d.numData for d in row.duals][9]) # create data frame dataFrame = pd.DataFrame({'ds': dateStampList, 'y': figuresList}) print(dataFrame) if forecastType == 'hourly': # fit data to prophet m = Prophet(changepoint_prior_scale=changePoint) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods, freq='H') if forecastType == 'daily': # fit data to prophet m = Prophet(changepoint_prior_scale=changePoint) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods) if forecastType == 'monthly': # fit data to prophet m = Prophet(weekly_seasonality=False, changepoint_prior_scale=changePoint) m.add_seasonality(name='monthly', period=30.5, fourier_order=5) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods, freq='MS') #create forecast and create a list if not m: # fit data to prophet m = Prophet(weekly_seasonality=False, changepoint_prior_scale=changePoint) m.add_seasonality(name='monthly', period=30.5, fourier_order=5) m.fit(dataFrame) forecast = m.predict(future) forecastList = forecast[yhat].values.tolist() dateList = pd.to_datetime(forecast['ds'].values.tolist()) #convert forecast results to ints resultsList = [] for val in forecastList: try: resultsList.append(int(val)) except: resultsList.append(0) finalDateList = [] for ds in dateList: try: finalDateList.append(str(ds)) except: finalDateList.append(0) # Create an iterable of dual with the result dualsList = [] dualsList.append([SSE.Dual(numData=d) for d in resultsList]) dualsList.append([SSE.Dual(strData=d) for d in finalDateList]) #create response rows response_rows = [] for i in range(len(resultsList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) #set and send table header table = SSE.TableDescription(name='ProphetForecast') table.fields.add(dataType=SSE.NUMERIC) table.fields.add(dataType=SSE.STRING) md = (('qlik-tabledescription-bin', table.SerializeToString()), ) context.send_initial_metadata(md) yield SSE.BundledRows(rows=response_rows) @staticmethod def _prophet(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ # instantiate a list for measure data dateStampList = [] figuresList = [] forecastPeriods = None outliers = None forecastType = None adjustments = None forecastReturnType = None changePoint = None fourierOrder = None m = None for request_rows in request: # iterate over each request row (contains rows, duals, numData) # pull duals from each row, and the numData from duals for row in request_rows.rows: # this is redundant and is the same in every row if not adjustments: adjustments = [d.strData for d in row.duals][0] if not changePoint: tmpChangePoint = [d.numData for d in row.duals][1] if math.isnan(tmpChangePoint): changePoint = 0.05 else: changePoint = tmpChangePoint # the first numData contains the date stamps dateStamp = [d.numData for d in row.duals][2] try: pythonDate = datetime.fromordinal( datetime(1900, 1, 1).toordinal() + int(dateStamp) - 2) dateStampList.append(pythonDate) except ValueError: dateStampList.append(dateStamp) # the second numData contains the figures figures = int([d.numData for d in row.duals][3]) figuresList.append(figures) if not forecastType: forecastType = [d.strData for d in row.duals][4] if not forecastPeriods: forecastPeriods = int([d.numData for d in row.duals][5]) if not forecastReturnType: forecastReturnType = [d.strData for d in row.duals][6] if not fourierOrder: tmpFourierOrder = [d.numData for d in row.duals][7] if math.isnan(tmpFourierOrder): fourierOrder = 5 else: fourierOrder = int(tmpFourierOrder) if not outliers: outliers = [d.strData for d in row.duals][8] # create data frame dataFrame = pd.DataFrame({'ds': dateStampList, 'y': figuresList}) print(dataFrame) # Store the original indexes for re-ordering output later index = dataFrame.copy() # remove null values from df dataFrame = dataFrame.dropna() # Sort the Request Data Frame based on dates, as Qlik may send unordered data dataFrame = dataFrame.sort_values('ds') # drop extra periods from data frame dataFrame = dataFrame.reset_index() dataFrame.drop(dataFrame.tail(forecastPeriods).index, inplace=True) # remove outliers if len(outliers) > 2: outliersList = outliers.split(",") for outlier in outliersList: dataFrame.loc[dataFrame['ds'] == outlier, 'y'] = None if forecastType == 'hourly': # fit data to prophet m = Prophet(changepoint_prior_scale=changePoint) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods, freq='H') if forecastType == 'daily': # fit data to prophet m = Prophet(changepoint_prior_scale=changePoint) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods) if forecastType == 'monthly': # fit data to prophet m = Prophet(weekly_seasonality=False, changepoint_prior_scale=changePoint) m.add_seasonality(name='yearly', period=365.25, fourier_order=fourierOrder) m.fit(dataFrame) #create future dataframe future = m.make_future_dataframe(periods=forecastPeriods, freq='MS') if not m: # fit data to prophet m = Prophet(seasonality_mode='multiplicative').fit(dataFrame) #m.add_seasonality(name='monthly', period=365.25, fourier_order=5) #m.fit(dataFrame) #create forecast forecast = m.predict(future) #loop through adjustments for each time period and change yhat try: adjJson = json.loads(adjustments) for index in range(len(forecast)): for item in adjJson: dt = datetime.strptime(item['firstField'], '%Y-%m-%d') if dt == forecast.at[index, 'ds']: adjustmentString = item["adjustment"].replace( "m", "000000").replace("M", "000000").replace( "k", "0000").replace("K", "0000") if "%" in adjustmentString: adjustmentPercent = float( adjustmentString.replace("%", "")) / 100 + 1 forecast.at[index, forecastReturnType] = float( forecast.at[index, forecastReturnType] ) * adjustmentPercent else: forecast.at[index, forecastReturnType] = float( forecast.at[index, forecastReturnType] ) + float(adjustmentString) except: print('No adjustments!') #drop index column from data frame dataFrame.drop(columns=['index'], inplace=True) # keep only the needed columns from the forecast forecast = forecast[['ds', forecastReturnType]] print(forecast) # merge two dataframes forecast = pd.merge(index, forecast, how='outer', on='ds', left_index=False, right_index=False, sort=False) forecast['result'] = forecast.apply(lambda row: row[forecastReturnType] if row['y'] == 0 else row['y'], axis=1) forecastList = forecast['result'].values.tolist() #convert forecast results to ints resultsList = [] for i, val in enumerate(forecastList): try: resultsList.append(int(val)) except: resultsList.append(0) # Create an iterable of dual with the result duals = iter([[SSE.Dual(numData=d)] for d in resultsList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals]) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Prophet', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ SSE-plugin with support for full script functionality. """ def __init__(self): """ Class initializer. :param funcdef_file: a function definition JSON file """ self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') """ Implementation of rpc functions. """ def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities( allowScript=True, pluginIdentifier='Full Script Support using Pandas- Qlik', pluginVersion='v1.0.0') return capabilities def EvaluateScript(self, request, context): """ This plugin supports full script functionality, that is, all function types and all data types. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) return self.ScriptEval.EvaluateScript(header, request, context) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return { 0: '_impute', 1: '_SomaLinha', } @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _SomaLinha(request): params = [] for request_rows in request: for row in request_rows.rows: p1 = [d.numData for d in row.duals][0] p2 = [d.numData for d in row.duals][1] p3 = [d.numData for d in row.duals][2] params.append(p1) params.append(p2) params.append(p3) result = sum(params) print('Variaveis: ', params, ' Soma é: ', result) duals = iter([SSE.Dual(numData=result)]) yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _impute(request): df = pd.DataFrame(columns=['Age', 'High', "Weight"]) # Iterate over bundlwxed rows i = -1 for request_rows in request: # Iterate over rows for row in request_rows.rows: i += 1 Age = [d.numData for d in row.duals][0] High = [d.numData for d in row.duals][1] Weight = [d.numData for d in row.duals][2] df.loc[i] = [Age, High, Weight] print(df) df.High[df['High'].isnull()] = np.nan # use simple imputer with mean strategy imp = SimpleImputer(missing_values=np.nan, strategy='mean') df = imp.fit_transform(df) # transform df to a dataframe df = pd.DataFrame(df) # calculate sum of columns for each row , just an additional operation result = df.sum(axis=1) df['sum'] = result # the print statement just to check values in cmd print(df) data = df.values.tolist() response_rows = [ iter([ SSE.Dual(numData=row[0]), SSE.Dual(numData=row[1]), SSE.Dual(numData=row[2]), SSE.Dual(numData=row[3]) ]) for row in data ] response_rows = [SSE.Row(duals=duals) for duals in response_rows] # Yield Row data as Bundled rows yield SSE.BundledRows(rows=response_rows) """Implementation of rpc functions.""" def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='impute data - Qlik', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'logger.config') logging.config.fileConfig(log_file) logging.info(self._function_definitions) logging.info('Logging enabled') function_name = "none" @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return { 0: '_rest_single', 1: '_rest_30', 2: '_ws_single', 3: '_ws_batch', 4: '_get_table_data' #, #4: '_echo_table' } @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId @staticmethod def _get_table_data(request, context): """ Echo the input table. :param request: :param context: :return: """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') logging.debug("Rest Url is set to {}" .format(url)) bCache= config.get(q_function_name, 'cache') logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower() =="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) #'Get The Table Name' for request_rows in request: response_rows = [] temp = MessageToDict(request_rows) table_name = temp["rows"][0]["duals"][0]["strData"] logging.debug("Table Name : {}" .format(table_name)) #'Get The JSON Key And Values for Table' table_id = precog.get_table_id(table_name, url) print(table_id[0]) print(table_id[1]) #try catch to catch bad url and kick out if resp is not 2000 logging.debug('Input Table Name: {} Table ID: {}' .format(table_name, table_id[0])) create_token_tuple = precog.create_token(url,table_id[0]) print(create_token_tuple) print(precog.get_count_of_all_tokens(url)) new_token = create_token_tuple[0] new_secret = create_token_tuple[1] response = create_token_tuple[2] result = precog.get_result_csv(url, new_secret) print(result[0]) output_str = result[1] print(output_str) parsed_csv = precog.convert_csv(result[1]) print(parsed_csv) resp_clean = precog.cleanup_token(new_token, table_id[0], url) print(resp_clean) print(precog.get_count_of_all_tokens(url)) bundledRows = SSE.BundledRows() yield SSE.BundledRows(rows=resp_clean) @staticmethod def _rest_single(request, context): """ Rest using single variable """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') logging.debug("Rest Url is set to {}" .format(url)) bCache= config.get(q_function_name, 'cache') logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower() =="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) response_rows = [] request_counter = 1 for request_rows in request: logging.debug('Printing Request Rows - Request Counter {}' .format(request_counter)) request_counter = request_counter +1 #temp = MessageToDict(request_rows) #test_rows = temp['rows'] #request_size = len(test_rows) #logging.debug('Bundled Row Number of Rows - {}' .format(request_size)) for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp payload = '{"data":"' + param + '"}' logging.debug('Showing Payload: {}'.format(payload)) resp = requests.post(url, data=payload) logging.debug('Show Payload Response as Text: {}'.format(resp.text)) result = resp.text result = result.replace('"', '') result = result.strip() logging.debug('Show Result: {}'.format(result)) #Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) logging.info('Exiting {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) @staticmethod def _ws_single(request, context): """ Single Row Processing for Websockets :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) #Start by Gathering Environmental Varaiable host = socket.gethostname() ip_addr = socket.gethostbyname(host) ws_url = config.get(q_function_name, 'ws_url') token = config.get(q_function_name, 'token') user_name= config.get(q_function_name, 'username') ws_route= config.get(q_function_name, 'ws_route') bCache= config.get(q_function_name, 'cache') logging.debug('Pringint Route for WS {}' .format(ws_route)) logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower()=="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) #In Future we will use the Token for Liencensing and Throttling #Currently we are using Comblination of host+ipaddr+username for Client Identification ws_url = ws_url + host +'_'+ ip_addr+'_'+ user_name+'_' logging.debug('Websocket URL : {}' .format(ws_url)) ws = create_connection(ws_url) response_rows = [] for request_rows in request: # Iterate over rows # Default code for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] result = '' if (len(param) ==0): logging.debug('Parameters are Empty') result = 'Error' else: payload = '{"action":"'+ ws_route +'","data":"' + param + '"}' logging.debug('Showing Payload: {}'.format(payload)) ws.send(payload) #logging.info('Show Payload Response: {}'.format(resp.text)) resp = json.loads(ws.recv()) logging.debug(resp) result = resp['result'] logging.debug('Show Result: {}'.format(result)) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) ws.close() logging.info('Exiting {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) @staticmethod def _ws_batch(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) host = socket.gethostname() ip_addr = socket.gethostbyname(host) logging.debug('Calling qrag.ini section "{}' .format(q_function_name)) ws_url = config.get(q_function_name, 'ws_url') token = config.get(q_function_name, 'token') user_name= config.get(q_function_name, 'username') batch_size = int(config.get(q_function_name, 'batch_size')) logging.debug('Batch Size {}' .format(batch_size)) ws_route= config.get(q_function_name, 'ws_route') #ws_route= '"' + ws_route + '"' logging.info('API Route : {}' .format(ws_route)) # setup Caching bCache= config.get(q_function_name, 'cache') logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower()=="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) ws_url = ws_url + host +'_'+ ip_addr+'_'+ user_name+'_' logging.debug('Full url for ws: {} '.format(ws_url)) ws = create_connection(ws_url) response_rows = [] outer_counter = 1 inner_counter = 1 request_counter = 1 for request_rows in request: logging.debug('Printing Request Rows - Request Counter {}' .format(request_counter)) request_counter+=1 temp = MessageToDict(request_rows) logging.debug('Temp Message to Dict {}' .format(temp)) test_rows = temp['rows'] logging.debug('Test Rows: {}' .format(test_rows)) request_size = len(test_rows) logging.debug('Bundled Row Number of Rows - {}' .format(request_size)) batches = list(qlist.divide_chunks(test_rows, batch_size)) for i in batches: payload_t ={"action":ws_route} logging.debug('PreFix Route Seletection {}' .format(payload_t)) logging.debug(len(batches)) payload_t["data"] = i logging.debug('Size of payload {}' .format(pysize.get_size(payload_t))) logging.debug('Showing Payload: {}'.format(payload_t)) logging.debug('batch number {}'.format(outer_counter)) ws.send(json.dumps(payload_t)) logging.debug('message sent WS') outer_counter +=1 payload_t.clear() for j in i: #logging.debug("Priniting i {}" .format(i)) resp = json.loads(ws.recv()) #logging.debug('Response Type : {}' .format(type(resp))) logging.debug('Counter: {} Payload Size: {} Payload Response: {}'.format(inner_counter, pysize.get_size(resp), resp)) inner_counter +=1 result = resp['result'] logging.debug('Log Resulst: {}' .format(result)) duals = iter([SSE.Dual(strData=result)]) #logging.debug(duals) #logging.debug('Printing Duals {}' .format(duals)) #Yield the row data as bundled rows response_rows.append(SSE.Row(duals=duals)) logging.debug('Exiting Inner Loop: Printing j {}' .format(j)) yield SSE.BundledRows(rows=response_rows) ws.close() logging.info('Exiting {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) @staticmethod def _rest_30(request, context): """ Aggregates the parameters to a single comma separated string. """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') bCache= config.get(q_function_name, 'cache') logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower()=="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) # Iterate over bundled rows response_rows = [] for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals] #logging.info('Showing Payload: {}'.format(param)) #Aggregate parameters to a single string #Join payload via =','.join(param) payload = '{"data":"' + (','.join(param)) + '"}' logging.debug('Showing Payload: {}'.format(payload)) resp = requests.post(url, data=payload) logging.debug('Show Payload Response: {}'.format(resp.text)) result = resp.text result = result.replace('"', '') result = result.strip() logging.debug('Show Result: {}'.format(result)) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) logging.info('Exiting Predict v2 TimeStamp: {}' .format(datetime.now().strftime("%H:%M:%S.%f"))) @staticmethod def _cache(request, context): """ Cache enabled. Add the datetime stamp to the end of each string value. :param request: iterable sequence of bundled rows :param context: not used. :return: string """ # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _no_cache(request, context): """ Cache disabled. Add the datetime stamp to the end of each string value. :param request: :param context: used for disabling the cache in the header. :return: string """ # Disable caching. md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) def _get_call_info(self, context): """ Retreive useful information for the function call. :param context: context :return: string containing header info """ # Get metadata for the call from the context metadata = dict(context.invocation_metadata()) # Get the function ID func_header = SSE.FunctionRequestHeader() func_header.ParseFromString(metadata['qlik-functionrequestheader-bin']) func_id = func_header.functionId # Get the common request header common_header = SSE.CommonRequestHeader() common_header.ParseFromString(metadata['qlik-commonrequestheader-bin']) # Get capabilities if not hasattr(self, 'capabilities'): self.capabilities = self.GetCapabilities(None, context) # Get the name of the capability called in the function capability = [function.name for function in self.capabilities.functions if function.functionId == func_id][0] # Get the user ID using a regular expression match = re.match(r"UserDirectory=(?P<UserDirectory>\w*)\W+UserId=(?P<UserId>\w*)", common_header.userId, re.IGNORECASE) if match: userId = match.group('UserDirectory') + '/' + match.group('UserId') else: userId = common_header.userId # Get the app ID appId = common_header.appId # Get the call's origin peer = context.peer() return "{0} - Capability '{1}' called by user {2} from app {3}".format(peer, capability, userId, appId) def EvaluateScript(self, request, context): """ This plugin supports full script functionality, that is, all function types and all data types. :param request: :param context: :return: """ logging.debug('In EvaluateScript: Main') # Parse header for script request metadata = dict(context.invocation_metadata()) logging.debug('Metadata {}',metadata) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) logging.debug('Header is : {}'.format(header)) logging.debug('Request is : {}' .format(request)) logging.debug("Context is: {}" .format(context)) return self.ScriptEval.EvaluateScript(header, request, context) @staticmethod def _echo_table(request, context): """ Echo the input table. :param request: :param context: :return: """ for request_rows in request: response_rows = [] for row in request_rows.rows: response_rows.append(row) yield SSE.BundledRows(rows=response_rows) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Qlik Rapid API Gateway - Partner Engineering', pluginVersion='v0.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted(definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format(function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ func_id = self._get_function_id(context) logging.info(self._get_call_info(context)) # Call corresponding function logging.info('ExecuteFunctions (functionId: {})' .format(func_id)) #self.functions[func_id])) current_function_def = (json.load(open(self.function_definitions))['Functions'])[func_id] logging.debug(current_function_def) global q_function_name q_function_name = current_function_def["Name"] logging.debug('Logical Method Called is: {}' .format(q_function_name)) current_qrap_type = current_function_def["QRAP_Type"] qrag_function_name ='_' + current_qrap_type logging.debug('This is the type of QRAG Method Name: {}' .format(current_qrap_type)) logging.debug('Physical Method Called is: {}' .format(qrag_function_name)) # Convers to Method Name to Physical Main Function qrag_id = qlist.find_key(self.functions, qrag_function_name) logging.debug('QRAG ID: {}' .format(qrag_id)) global function_name function_name = self.functions[qrag_id] return getattr(self, self.functions[qrag_id])(request_iterator, context) def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials([(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info('*** Running server in secure mode on port: {} ***'.format(port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info('*** Running server in insecure mode on port: {} ***'.format(port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_predictPlayByPlay'} @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _predictPlayByPlay(request, context): df = pd.DataFrame(columns=[ 'quarter', 'seconds_elapsed', 'offense_team', 'yardline', 'down', 'yards_to_go', 'home_team', 'away_team', 'curr_home_score', 'curr_away_score' ]) # Iterate over bundled rows i = -1 for request_rows in request: # Iterate over rows for row in request_rows.rows: i += 1 modelName = [d.strData for d in row.duals][0] gameId = [d.strData for d in row.duals][1] quarter = [d.strData for d in row.duals][2] seconds_elapsed = [d.strData for d in row.duals][3] offense_team = [d.strData for d in row.duals][4] yardline = [d.strData for d in row.duals][5] down = [d.strData for d in row.duals][6] yards_to_go = [d.strData for d in row.duals][7] home_team = [d.strData for d in row.duals][8] away_team = [d.strData for d in row.duals][9] curr_home_score = [d.strData for d in row.duals][10] curr_away_score = [d.strData for d in row.duals][11] df.loc[i] = [ quarter, int(seconds_elapsed), offense_team, int(yardline), int(down), int(yards_to_go), home_team, away_team, int(curr_home_score), int(curr_away_score) ] logging.info(modelName) model = p.load(open('models\\' + modelName, 'rb'), encoding='latin1') wp = list(model.predict_wp(df)) # Create an iterable of dual with the result duals = iter([[SSE.Dual(numData=d)] for d in wp]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals]) """ Implementation of rpc functions. """ def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Sentiment', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the Column Operations example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.scriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_sum_of_rows', 1: '_sum_of_column', 2: '_max_of_columns_2'} """ Implementation of added functions. """ @staticmethod def _sum_of_rows(request, context): """ Summarize two parameters row wise. Tensor function. :param request: an iterable sequence of RowData :param context: :return: the same iterable sequence of row data as received """ # Iterate over bundled rows for request_rows in request: response_rows = [] # Iterating over rows for row in request_rows.rows: # Retrieve the numerical value of the parameters # Two columns are sent from the client, hence the length of params will be 2 params = [d.numData for d in row.duals] # Sum over each row result = sum(params) # Create an iterable of Dual with a numerical value duals = iter([SSE.Dual(numData=result)]) # Append the row data constructed to response_rows response_rows.append(SSE.Row(duals=duals)) # Yield Row data as Bundled rows yield SSE.BundledRows(rows=response_rows) @staticmethod def _sum_of_column(request, context): """ Summarize the column sent as a parameter. Aggregation function. :param request: an iterable sequence of RowData :param context: :return: int, sum if column """ params = [] # Iterate over bundled rows for request_rows in request: # Iterating over rows for row in request_rows.rows: # Retrieve numerical value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.numData for d in row.duals][0] params.append(param) # Sum all rows collected the the params variable result = sum(params) # Create an iterable of dual with numerical value duals = iter([SSE.Dual(numData=result)]) # Yield the row data constructed yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _max_of_columns_2(request, context): """ Find max of each column. This is a table function. :param request: an iterable sequence of RowData :param context: :return: a table with numerical values, two columns and one row """ result = [_MINFLOAT] * 2 # Iterate over bundled rows for request_rows in request: # Iterating over rows for row in request_rows.rows: # Retrieve the numerical value of each parameter # and update the result variable if it's higher than the previously saved value for i in range(0, len(row.duals)): result[i] = max(result[i], row.duals[i].numData) # Create an iterable of dual with numerical value duals = iter([SSE.Dual(numData=r) for r in result]) # Set and send Table header table = SSE.TableDescription(name='MaxOfColumns', numberOfRows=1) table.fields.add(name='Max1', dataType=SSE.NUMERIC) table.fields.add(name='Max2', dataType=SSE.NUMERIC) md = (('qlik-tabledescription-bin', table.SerializeToString()), ) context.send_initial_metadata(md) # Yield the row data constructed yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of rpc functions. """ def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities( allowScript=True, pluginIdentifier='Column Operations - Qlik', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the Capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Call corresponding function based on function id sent in header. :param request_iterator: an iterable sequence of RowData. :param context: the context. :return: an iterable sequence of RowData. """ # Retrieve function id func_id = self._get_function_id(context) logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ Support script evaluation, based on different function and data types. :param request: :param context: :return: """ # Retrieve header from request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.scriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Tensor) or (func_type == FunctionType.Aggregation): return self.scriptEval.EvaluateScript(request, context, header, func_type) else: # This plugin does not support other function types than tensor and aggregation. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Server :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'logger.config') logging.debug(log_file) logging.config.fileConfig(log_file) logging.info(self._function_definitions) logging.info('Logging enabled') function_name = "none" @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return { 0: '_instructors', 1: '_user_info', 2: '_user_workout', 3: '_workout_details', 4: '_echo_table' } @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId @staticmethod def _instructors(request, context): """ Rest using single variable """ logging.info('Entering {} TimeStamp: {}'.format( function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') logging.debug("Rest Url is set to {}".format(url)) bCache = config.get(q_function_name, 'cache') logging.debug("Caching is set to {}".format(bCache)) if (bCache.lower() == "true"): logging.info( "Caching ****Enabled*** for {}".format(q_function_name)) else: logging.info( "Caching ****Disabled**** for {}".format(q_function_name)) md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) instructors = peloton.get_instructors(url) logging.debug(instructors) for request_rows in request: response_rows = [] for row in request_rows.rows: response_rows.append(row) yield SSE.BundledRows(rows=response_rows) logging.info('Exiting {} TimeStamp: {}'.format( function_name, datetime.now().strftime("%H:%M:%S.%f"))) @staticmethod def _cache(request, context): """ Cache enabled. Add the datetime stamp to the end of each string value. :param request: iterable sequence of bundled rows :param context: not used. :return: string """ # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) @staticmethod def _no_cache(request, context): """ Cache disabled. Add the datetime stamp to the end of each string value. :param request: :param context: used for disabling the cache in the header. :return: string """ # Disable caching. md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)]) def _get_call_info(self, context): """ Retreive useful information for the function call. :param context: context :return: string containing header info """ # Get metadata for the call from the context metadata = dict(context.invocation_metadata()) # Get the function ID func_header = SSE.FunctionRequestHeader() func_header.ParseFromString(metadata['qlik-functionrequestheader-bin']) func_id = func_header.functionId # Get the common request header common_header = SSE.CommonRequestHeader() common_header.ParseFromString(metadata['qlik-commonrequestheader-bin']) # Get capabilities if not hasattr(self, 'capabilities'): self.capabilities = self.GetCapabilities(None, context) # Get the name of the capability called in the function capability = [ function.name for function in self.capabilities.functions if function.functionId == func_id ][0] # Get the user ID using a regular expression match = re.match( r"UserDirectory=(?P<UserDirectory>\w*)\W+UserId=(?P<UserId>\w*)", common_header.userId, re.IGNORECASE) if match: userId = match.group('UserDirectory') + '/' + match.group('UserId') else: userId = common_header.userId # Get the app ID appId = common_header.appId # Get the call's origin peer = context.peer() return "{0} - Capability '{1}' called by user {2} from app {3}".format( peer, capability, userId, appId) @staticmethod def _echo_table(request, context): """ Echo the input table. :param request: :param context: :return: """ for request_rows in request: response_rows = [] for row in request_rows.rows: response_rows.append(row) yield SSE.BundledRows(rows=response_rows) def EvaluateScript(self, request, context): """ This plugin supports full script functionality, that is, all function types and all data types. :param request: :param context: :return: """ logging.debug('In EvaluateScript: Main') # Parse header for script request metadata = dict(context.invocation_metadata()) logging.debug('Metadata {}', metadata) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) logging.debug('Header is : {}'.format(header)) logging.debug('Request is : {}'.format(request)) logging.debug("Context is: {}".format(context)) return self.ScriptEval.EvaluateScript(header, request, context) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but ll added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities( allowScript=True, pluginIdentifier='Qlik Rapid API Gateway - Partner Engineering', pluginVersion='v0.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ func_id = self._get_function_id(context) logging.info(self._get_call_info(context)) # Call corresponding function logging.info('ExecuteFunctions (functionId: {})'.format(func_id)) # self.functions[func_id])) current_function_def = (json.load(open( self.function_definitions))['Functions'])[func_id] logging.debug(current_function_def) global q_function_name q_function_name = current_function_def["Name"] logging.debug('Logical Method Called is: {}'.format(q_function_name)) current_qrap_type = current_function_def["QRAP_Type"] qrag_function_name = '_' + current_qrap_type logging.debug('This is the type of QRAG Method Name: {}'.format( current_qrap_type)) logging.debug( 'Physical Method Called is: {}'.format(qrag_function_name)) # Convers to Method Name to Physical Main Function qrag_id = qlist.find_key(self.functions, qrag_function_name) logging.debug('QRAG ID: {}'.format(qrag_id)) global function_name function_name = self.functions[qrag_id] return getattr(self, self.functions[qrag_id])(request_iterator, context) def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return { 0: '_marketBasket' #ADD YOUR FUNCTION HERE } @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _marketBasket(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ orderIdList = [] productIdList = [] purchasedList = [] for request_rows in request: #print(request_rows) for row in request_rows.rows: # the first numData contains the orderIds orderIdList.append([d.numData for d in row.duals][0]) # the second numData contains the figures productIdList.append([d.numData for d in row.duals][1]) # the third numData contains the figures purchasedList.append([d.numData for d in row.duals][2]) #print(orderIdList[:10]) #first 10 entries of array datafrm = pd.DataFrame({ 'orderId': orderIdList, 'productId': productIdList, 'purchased': purchasedList }) #print(datafrm.head()) #.head returns only the first 5 elements basket = (datafrm.groupby( ['orderId', 'productId'])['purchased'].sum().unstack().reset_index().fillna( 0).set_index('orderId')) #print(basket.head()) # Create frequent_itemsets = apriori(basket, min_support=0.005, use_colnames=True) #print(frequent_itemsets.head()) rules = association_rules(frequent_itemsets, metric="lift", min_threshold=1) #print(rules.head()) antList = rules['antecedents'].values.tolist() antList = [list(x) for x in antList] antList = [int(x[0]) for x in antList] conList = rules['consequents'].values.tolist() conList = [list(x) for x in conList] conList = [int(x[0]) for x in conList] dualsList = [] dualsList.append([SSE.Dual(numData=d) for d in antList]) dualsList.append([SSE.Dual(numData=d) for d in conList]) dualsList.append( [SSE.Dual(numData=d) for d in rules['support'].values.tolist()]) dualsList.append( [SSE.Dual(numData=d) for d in rules['confidence'].values.tolist()]) dualsList.append( [SSE.Dual(numData=d) for d in rules['lift'].values.tolist()]) #print(dualsList) response_rows = [] for i in range(len(antList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) print(request_rows) yield SSE.BundledRows(rows=response_rows) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Sentiment', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)
class ExtensionService(SSE.ConnectorServicer): """ A simple SSE-plugin created for the HelloWorld example. """ def __init__(self, funcdef_file): """ Class initializer. :param funcdef_file: a function definition JSON file """ self._function_definitions = funcdef_file self.ScriptEval = ScriptEval() os.makedirs('logs', exist_ok=True) log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') logging.config.fileConfig(log_file) logging.info('Logging enabled') @property def function_definitions(self): """ :return: json file with function definitions """ return self._function_definitions @property def functions(self): """ :return: Mapping of function id and implementation """ return {0: '_liveSQL'} @staticmethod def _get_function_id(context): """ Retrieve function id from header. :param context: context :return: function id """ metadata = dict(context.invocation_metadata()) header = SSE.FunctionRequestHeader() header.ParseFromString(metadata['qlik-functionrequestheader-bin']) return header.functionId """ Implementation of added functions. """ @staticmethod def _liveSQL(request, context): # Disable cache md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) sqlQuery = None column = None connectionString = None indexField = None for request_rows in request: # iterate over each request row (contains rows, duals, numData) # pull duals from each row, and the numData from duals for row in request_rows.rows: print(row) # get name of dimension if not sqlQuery: sqlQuery = [d.strData for d in row.duals][0] # get list of static filters if not column: column = int([d.strData for d in row.duals][1]) if not connectionString: connectionString = [d.strData for d in row.duals][2] print(connectionString) if not indexField: indexField = [d.strData for d in row.duals][3] print(indexField) conn = pyodbc.connect(connectionString) data = pd.read_sql(sqlQuery, conn) data = data.applymap(str) columnName = data.columns[column] commaReplace = "COMMAREPLACE" data.replace({"'": commaReplace}, inplace=True, regex=True) print(data) column = data.iloc[:, column].values.tolist() print(column) finalList = [] finalList.append("Pick(FieldIndex('" + indexField + "',[" + indexField + "])," + str(tuple(column))[1:].replace(commaReplace, "''") + " /**" + columnName + "**/") print(finalList) # Create an iterable of dual with the result duals = iter([[SSE.Dual(strData=d)] for d in finalList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals]) def GetCapabilities(self, request, context): """ Get capabilities. Note that either request or context is used in the implementation of this method, but still added as parameters. The reason is that gRPC always sends both when making a function call and therefore we must include them to avoid error messages regarding too many parameters provided from the client. :param request: the request, not used in this method. :param context: the context, not used in this method. :return: the capabilities. """ logging.info('GetCapabilities') # Create an instance of the Capabilities grpc message # Enable(or disable) script evaluation # Set values for pluginIdentifier and pluginVersion capabilities = SSE.Capabilities(allowScript=True, pluginIdentifier='Sentiment', pluginVersion='v1.1.0') # If user defined functions supported, add the definitions to the message with open(self.function_definitions) as json_file: # Iterate over each function definition and add data to the capabilities grpc message for definition in json.load(json_file)['Functions']: function = capabilities.functions.add() function.name = definition['Name'] function.functionId = definition['Id'] function.functionType = definition['Type'] function.returnType = definition['ReturnType'] # Retrieve name and type of each parameter for param_name, param_type in sorted( definition['Params'].items()): function.params.add(name=param_name, dataType=param_type) logging.info('Adding to capabilities: {}({})'.format( function.name, [p.name for p in function.params])) return capabilities def ExecuteFunction(self, request_iterator, context): """ Execute function call. :param request_iterator: an iterable sequence of Row. :param context: the context. :return: an iterable sequence of Row. """ # Retrieve function id func_id = self._get_function_id(context) # Call corresponding function logging.info('ExecuteFunction (functionId: {})'.format(func_id)) return getattr(self, self.functions[func_id])(request_iterator, context) def EvaluateScript(self, request, context): """ This plugin provides functionality only for script calls with no parameters and tensor script calls. :param request: :param context: :return: """ # Parse header for script request metadata = dict(context.invocation_metadata()) header = SSE.ScriptRequestHeader() header.ParseFromString(metadata['qlik-scriptrequestheader-bin']) # Retrieve function type func_type = self.ScriptEval.get_func_type(header) # Verify function type if (func_type == FunctionType.Aggregation) or (func_type == FunctionType.Tensor): return self.ScriptEval.EvaluateScript(header, request, context, func_type) else: # This plugin does not support other function types than aggregation and tensor. # Make sure the error handling, including logging, works as intended in the client msg = 'Function type {} is not supported in this plugin.'.format( func_type.name) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg) """ Implementation of the Server connecting to gRPC. """ def Serve(self, port, pem_dir): """ Sets up the gRPC Server with insecure connection on port :param port: port to listen on. :param pem_dir: Directory including certificates :return: None """ # Create gRPC server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) SSE.add_ConnectorServicer_to_server(self, server) if pem_dir: # Secure connection with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: private_key = f.read() with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: cert_chain = f.read() with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: root_cert = f.read() credentials = grpc.ssl_server_credentials( [(private_key, cert_chain)], root_cert, True) server.add_secure_port('[::]:{}'.format(port), credentials) logging.info( '*** Running server in secure mode on port: {} ***'.format( port)) else: # Insecure connection server.add_insecure_port('[::]:{}'.format(port)) logging.info( '*** Running server in insecure mode on port: {} ***'.format( port)) # Start gRPC server server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop(0)