コード例 #1
0
    def _sum_of_rows(request):
        """
        Summarize two parameters row wise. Tensor function.
        :param request: an iterable sequence of RowData
        :return: the same iterable sequence of row data as received
        """
        # Iterate over bundled rows
        for request_rows in request:
            response_rows = []
            # Iterating over rows
            for row in request_rows.rows:
                # Retrieve the numerical value of the parameters
                # Two columns are sent from the client, hence the length of params will be 2
                params = [d.numData for d in row.duals]

                # Sum over each row
                result = sum(params)

                # Create an iterable of Dual with a numerical value
                duals = iter([SSE.Dual(numData=result)])

                # Append the row data constructed to response_rows
                response_rows.append(SSE.Row(duals=duals))

            # Yield Row data as Bundled rows
            yield SSE.BundledRows(rows=response_rows)
コード例 #2
0
    def evaluate(context, script, ret_type, params=[]):
        """
        Evaluates a script with given params.
        :param context: the context sent from client
        :param script:  script to evaluate
        :param ret_type: return data type
        :param params: params to evaluate. Default: []
        :return: a RowData of string dual
        """
        if ret_type == ReturnType.Numeric:
            # Evaluate script
            result = eval(script, {'args': params})
            # Transform the result to an iterable of dual data
            duals = iter([SSE.Dual(numData=result)])

            # Create row data out of duals
            return SSE.BundledRows(rows=[SSE.Row(duals=duals)])
        else:
            # This plugin does not support other return types than numeric
            # Make sure the error handling, including logging, works as intended in the client
            msg = 'Return type {} is not supported in this plugin.'.format(
                ret_type)
            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
            context.set_details(msg)
            # Raise error on the plugin-side
            raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg)
コード例 #3
0
    def _sum_of_column(request):
        """
        Summarize the column sent as a parameter. Aggregation function.
        :param request: an iterable sequence of RowData
        :return: int, sum if column
        """
        params = []

        # Iterate over bundled rows
        for request_rows in request:
            # Iterating over rows
            for row in request_rows.rows:
                # Retrieve numerical value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.numData for d in row.duals][0]
                params.append(param)

        # Sum all rows collected the the params variable
        result = sum(params)

        # Create an iterable of dual with numerical value
        duals = iter([SSE.Dual(numData=result)])

        # Yield the row data constructed
        yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
コード例 #4
0
    def _get_sql(request):
        """
        GetSQL function, tensor
        """
        # Iterate over bundled rows
        for request_rows in request:
            response_rows = []
            # Iterating over rows
            for row in request_rows.rows:
                # Retrieve numerical value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.strData for d in row.duals][0]

                # This is pretty costly.. remove it when you see SQL generating normally
                print(param)

                # Execute SQL command
                cursor.execute(param)
                results = cursor.fetchall()
                results = results[0][0]
                # Create an iterable of dual with numerical value
                duals = iter([SSE.Dual(numData=results)])
                response_rows.append(SSE.Row(duals=duals))

            # Yield the row data constructed
            yield SSE.BundledRows(rows=response_rows)
コード例 #5
0
    def _hello_world_aggr(request, context):
        """
        Aggregates the parameters to a single comma separated string.
        :param request: iterable sequence of bundled rows
        :return: string
        """
        params = []

        # Iterate over bundled rows
        for request_rows in request:
            # Iterate over rows
            for row in request_rows.rows:
                # Retrieve string value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.strData for d in row.duals][0]
                params.append(param)

        # Aggregate parameters to a single string
        result = ', '.join(params)

        # Create an iterable of dual with the result
        duals = iter([SSE.Dual(strData=result)])

        # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
コード例 #6
0
    def _max_of_columns_2(request, context):
        """
        Find max of each column. This is a table function.
        :param request: an iterable sequence of RowData
        :param context:
        :return: a table with numerical values, two columns and one row
        """

        result = [_MINFLOAT] * 2

        # Iterate over bundled rows
        for request_rows in request:
            # Iterating over rows
            for row in request_rows.rows:
                # Retrieve the numerical value of each parameter
                # and update the result variable if it's higher than the previously saved value
                for i in range(0, len(row.duals)):
                    result[i] = max(result[i], row.duals[i].numData)

        # Create an iterable of dual with numerical value
        duals = iter([SSE.Dual(numData=r) for r in result])

        # Set and send Table header
        table = SSE.TableDescription(name='MaxOfColumns', numberOfRows=1)
        table.fields.add(name='Max1', dataType=SSE.NUMERIC)
        table.fields.add(name='Max2', dataType=SSE.NUMERIC)
        md = (('qlik-tabledescription-bin', table.SerializeToString()), )
        context.send_initial_metadata(md)

        # Yield the row data constructed
        yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
コード例 #7
0
    def Serve(self, port, key, certificate):
        """
        Server
        :param port: port to listen on.
        :param key: key to use for secure mode.
        :param certificate: certificate to use for secure mode.
        :return: None
        """
        server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
        SSE.add_ConnectorServicer_to_server(self, server)

        if key and certificate:
            with open(key, 'rb') as f:
                private_key = f.read()
            with open(certificate, 'rb') as f:
                cert_chain = f.read()
            credentials = grpc.ssl_server_credentials([(private_key,
                                                        cert_chain)])
            server.add_secure_port('[::]:{}'.format(port), credentials)
            logging.info(
                '*** Running server in secure mode on port: {} ***'.format(
                    port))
        else:
            server.add_insecure_port('[::]:{}'.format(port))
            logging.info(
                '*** Running server in insecure mode on port: {} ***'.format(
                    port))

        server.start()
        try:
            while True:
                time.sleep(_ONE_DAY_IN_SECONDS)
        except KeyboardInterrupt:
            server.stop(0)
コード例 #8
0
    def Serve(self, port, pem_dir):
        """
        Server
        :param port: port to listen on.
        :param pem_dir: Directory including certificates
        :return: None
        """
        server = grpc.server(futures.ThreadPoolExecutor(max_workers=10),\
        options=[('grpc.max_send_message_length', MAX_MESSAGE_LENGTH),('grpc.max_receive_message_length', MAX_MESSAGE_LENGTH)])
        
        SSE.add_ConnectorServicer_to_server(self, server)

        if pem_dir:
            # Secure connection
            with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f:
                private_key = f.read()
            with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f:
                cert_chain = f.read()
            with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f:
                root_cert = f.read()
            credentials = grpc.ssl_server_credentials([(private_key, cert_chain)], root_cert, True)
            server.add_secure_port('[::]:{}'.format(port), credentials)
            logging.info('*** Running server in secure mode on port: {} ***'.format(port))
        else:
            # Insecure connection
            server.add_insecure_port('[::]:{}'.format(port))
            logging.info('*** Running server in insecure mode on port: {} ***'.format(port))

        server.start()
        try:
            while True:
                time.sleep(_ONE_DAY_IN_SECONDS)
        except KeyboardInterrupt:
            server.stop(0)
コード例 #9
0
    def test_evaluatescript(self):
        """
        Test EvaluateScript HelloWorld.

        This test calls the EvaluateScript function with the
        script 'str1 + str2' and two strings as parameters,
        effectively concatenating the two input strings.
        """
        params = to_string_parameters('str1', 'str2')

        header = SSE.ScriptRequestHeader(script='args[0] + args[1]',
                                         functionType=SSE.TENSOR,
                                         returnType=SSE.STRING,
                                         params=params)

        duals = strings_to_duals('Hello', 'World')
        rows = duals_to_rows(duals)

        # Trailing commas to make iterable sequences of tuples.
        bundled_rows = (SSE.BundledRows(rows=rows)),
        metadata = (('qlik-scriptrequestheader-bin',
                     header.SerializeToString()), )

        result = self.stub.EvaluateScript(request_iterator=iter(bundled_rows),
                                          metadata=metadata)

        for bundled_row in result:
            for row in bundled_row.rows:
                for dual in row.duals:
                    assert dual.strData == 'HelloWorld'
コード例 #10
0
    def test_executefunction(self):
        """
        Test ExecuteFunction HelloWorld.

        This tests calls the ExecuteFunction function, with an
        id corresponding to the HelloWorld plugin function.
        """

        header = SSE.FunctionRequestHeader(functionId=HELLO_WORLD_ID,
                                           version="1")

        duals = strings_to_duals('Hello World!')
        rows = duals_to_rows(duals)

        # Trailing commas to make iterable sequences of tuples.
        bundled_rows = (SSE.BundledRows(rows=rows)),
        metadata = (('qlik-functionrequestheader-bin',
                     header.SerializeToString()), )

        result = self.stub.ExecuteFunction(request_iterator=iter(bundled_rows),
                                           metadata=metadata)

        for bundled_row in result:
            for row in bundled_row.rows:
                for dual in row.duals:
                    assert dual.strData == 'Hello World!'
コード例 #11
0
    def test_evaluatescript(self):
        """
        Test EvaluateScript ColumnOperations.

        This test calls the EvaluateScript function with the
        script 'num1 + num2' and two numbers as parameters,
        effectively summarizing the two input numbers.
        """
        params = to_numeric_parameters('num1', 'num2')

        header = SSE.ScriptRequestHeader(script='args[0] + args[1]',
                                         functionType=SSE.TENSOR,
                                         returnType=SSE.NUMERIC,
                                         params=params)

        duals = numbers_to_duals(42, 42)
        rows = duals_to_rows(duals)

        # Trailing commas to make iterable sequences of tuples.
        bundled_rows = (SSE.BundledRows(rows=rows)),
        metadata = (('qlik-scriptrequestheader-bin', header.SerializeToString()), )

        result = self.stub.EvaluateScript(request_iterator=iter(bundled_rows), metadata=metadata)

        for bundled_row in result:
            for row in bundled_row.rows:
                for dual in row.duals:
                    assert dual.numData == 84
コード例 #12
0
    def _no_cache(request, context):
        """
        Cache disabled. Add the datetime stamp to the end of each string value.
        :param request:
        :param context: used for disabling the cache in the header.
        :return: string
        """
        # Disable caching.
        md = (('qlik-cache', 'no-store'), )
        context.send_initial_metadata(md)

        # Iterate over bundled rows
        for request_rows in request:
            # Iterate over rows
            for row in request_rows.rows:
                # Retrieve string value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.strData for d in row.duals][0]

                # Join with current timedate stamp
                result = param + ' ' + datetime.now().isoformat()
                # Create an iterable of dual with the result
                duals = iter([SSE.Dual(strData=result)])

                # Yield the row data as bundled rows
                yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
コード例 #13
0
ファイル: __main__.py プロジェクト: essalj/qlik-py-tools
 def _prophet_seasonality(request, context):
     """
     Provide the seasonality component of the Prophet timeseries forecast. Scalar function.
     :param request: an iterable sequence of RowData
     :param context: not used for now
     :return: the forecasted value for each row
     :
     :Qlik expression example:
     :<AAI Connection Name>.Prophet_Seasonality(Month, $(vConcatSeries), $(vHolidays), 'seasonality=yearly, freq=MS, debug=true')
     :The fourth argument in the Qlik expression is a string of parameters. 
     :This should take the form of a comma separated string:
     :e.g 'seasonality=yearly, freq=MS, debug=true' or 'seasonality=weekly, freq=D'
     :
     :Parameters accepted for the Prophet() function are: cap, floor, changepoint_prior_scale, interval_width, 
     :lower_window, upper_window 
     :
     :Parameters accepted for the make_future_dataframe() function are: freq
     :
     :For more information on these parameters go here: https://facebook.github.io/prophet/docs/quick_start.html
     :
     :Additional parameters used are: return, take_log, debug
     :
     :cap = 1000 : A logistic growth model can be defined using cap and floor. Values should be double or integer
     :changepoint_prior_scale = 0.05 : Decrease if the trend changes are being overfit, increase for underfit
     :interval_width = 0.08 : Set the width of the uncertainty intervals
     :lower_window = -1 : Only used with holidays. Extend the holiday by certain no. of days prior to the date.
     :upper_window = 1 : Only used with holidays. Extend the holiday by certain no. of days after the date.
     :freq = MS : The frequency of the time series. e.g. MS for Month Start. See the possible options here:
     :          : http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases
     :return = yhat : Any of the options in the forecast result. You can see these options with debug=true 
     :              : yhat, yhat_upper, yhat_lower : Forecast, upper and lower limits
     :              : y_then_yhat, y_then_yhat_upper, y_then_yhat_lower : Return forecast only for forecast periods
     :              : trend, trend_upper, trend_lower : Trend component of the timeseries
     :              : seasonal, seasonal_upper, seasonal_lower: Seasonal component of the timeseries 
     :take_log = false : Apply logarithm to the values before the forecast. Default is true
     :debug = true : Print exexution information to the terminal and logs in ..\logs\Prophet Log <n>.txt
     """
     
     # Get a list from the generator object so that it can be iterated over multiple times
     request_list = [request_rows for request_rows in request]
                           
     # Create an instance of the ProphetForQlik class
     # This will take the request data from Qlik and prepare it for forecasting
     predictor = ProphetForQlik.init_seasonality(request_list)
     
     # Calculate the forecast and store in a Pandas series
     forecast = predictor.predict()
     
     # Values in the series are converted to type SSE.Dual
     response_rows = forecast.apply(lambda result: iter([SSE.Dual(numData=result)]))
     
     # Values in the series are converted to type SSE.Row
     # The series is then converted to a list
     response_rows = response_rows.apply(lambda duals: SSE.Row(duals=duals)).tolist()        
     
     # Iterate over bundled rows
     for request_rows in request_list:
         # Yield Row data as Bundled rows
         yield SSE.BundledRows(rows=response_rows)  
コード例 #14
0
 def get_duals(result, ret_type):
     if isinstance(result, str) or not hasattr(result, '__iter__'):
         result = [result]
     # Transform the result to an iterable of Dual data
     if ret_type == ReturnType.String:
         return iter([SSE.Dual(strData=col) for col in result])
     elif ret_type == ReturnType.Numeric:
         return iter([SSE.Dual(numData=col) for col in result])
コード例 #15
0
    def _geocode(request, context):
        api_selection = None
        coordinateList = []

        # Iterate over bundled rows
        for request_rows in request:

            # Iterate over rows
            for row in request_rows.rows:

                # The first strData is the coordinate
                data = [d.strData for d in row.duals][0]

                # Grab selected API
                # Possible choices are: Google, Open Street, GeocoderDotUS
                if not api_selection:
                    api_selection = [d.strData for d in row.duals][1]

                    if 'google' in api_selection.lower():
                        geolocator = GoogleV3(
                            api_key="AIzaSyDlLSgsXiYG-zKYTuLxsbj1BNr7XoGpRMk")
                    elif 'open street' in api_selection.lower():
                        geolocator = Nominatim(scheme='http')
                    elif 'geocoderdotus' in api_selection.lower():
                        geolocator = GeocoderDotUS()
                    else:
                        geolocator = Nominatim()

                # geocode
                for i in range(2):
                    try:
                        location = geolocator.geocode(data, timeout=2)
                        break
                    except geopy.exc.GeocoderTimedOut:
                        pass
                    except geopy.exc.GeocoderQueryError:
                        pass
                    except urllib.error.HTTPError:
                        pass

                try:
                    latitude = location.latitude
                except AttributeError:
                    latitude = 'Unavailable'
                try:
                    longitude = location.longitude
                except AttributeError:
                    longitude = 'Unavailable'

                coordinates = '[' + str(longitude) + ', ' + str(latitude) + ']'

                coordinateList.append(coordinates)

        # Create an iterable of dual with the result
        duals = iter([[SSE.Dual(strData=c)] for c in coordinateList])

        # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
コード例 #16
0
    def _cluster(request, context):
        """
        Look for clusters within a dimension using the given features. Scalar function.
        Three variants are implemented:
        :0: one dimension and a string of numeric features.
        :1: two dimensions and one measure. This will only work when used in the Qlik load script.
        :2: one dimension, the latitude and longitude.
        :
        :param request: an iterable sequence of RowData
        :param context:
        :return: the clustering classification for each row
        :Qlik expression examples:
        :<AAI Connection Name>.Cluster(dimension, sum(value1) & ';' & sum(value2), 'scaler=standard')
        """
        # Get a list from the generator object so that it can be iterated over multiple times
        request_list = [request_rows for request_rows in request]

        # Get the function id from the header to determine the variant being called
        function = ExtensionService._get_function_id(context)

        if function == 0:
            # The standard variant takes in one dimension and a string of semi-colon separated features.
            variant = "standard"
        elif function == 1:
            # In the second variant features for clustering are produced by pivoting the measure for the second dimension.
            variant = "two_dims"
        elif function == 2:
            # The third variant if for finding clusters in geospatial coordinates.
            variant = "lat_long"

        # Create an instance of the HDBSCANForQlik class
        # This will take the request data from Qlik and prepare it for clustering
        clusterer = HDBSCANForQlik(request_list, context, variant=variant)

        # Calculate the clusters and store in a Pandas series (or DataFrame in the case of a load script call)
        clusters = clusterer.scan()

        # Check if the response is a DataFrame.
        # This occurs when the load_script=true argument is passed in the Qlik expression.
        response_is_df = isinstance(clusters, pd.DataFrame)

        # Convert the response to a list of rows
        clusters = clusters.values.tolist()

        # We convert values to type SSE.Dual, and group columns into a iterable
        if response_is_df:
            response_rows = [
                iter([SSE.Dual(strData=row[0]),
                      SSE.Dual(numData=row[1])]) for row in clusters
            ]
        else:
            response_rows = [iter([SSE.Dual(numData=row)]) for row in clusters]

        # Values are then structured as SSE.Rows
        response_rows = [SSE.Row(duals=duals) for duals in response_rows]

        # Yield Row data as Bundled rows
        yield SSE.BundledRows(rows=response_rows)
コード例 #17
0
    def _evaluate(request):
        """
        Summarize the column sent as a parameter. Aggregation function.
        :param request: an iterable sequence of RowData
        :return: int, sum if column
        """
        params = []
        logging.info('_evaluate')
        logging.info('_evaluate request {}'.format(request))

        print(request)

        # Iterate over bundled rows
        for request_rows in request:
            print(request_rows.rows)
            print(len(request_rows.rows))
            # Iterating over rows
            logging.info('_evaluate request_rows {}'.format(request_rows.rows))
            for row in request_rows.rows:
                # Retrieve numerical value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.numData for d in row.duals]
                logging.info('_evaluate row {}'.format(param))
                params.append(param)

        h2o.init()

        dir_path = os.path.dirname(
            os.path.realpath(__file__)) + "\\kmeans_iris"
        #results = h2o.load_model("C:/Users/daniel/Documents/Qlik Advanced Analytics/Examples/Python/H2O/kmeans_iris")
        results = h2o.load_model(dir_path)
        newData = h2o.H2OFrame(params)

        predictedNew = results.predict(newData)
        predicted_as_list = h2o.as_list(predictedNew, use_pandas=False)

        predicted_as_list.pop(0)

        response_rows = []

        for result in predicted_as_list:
            # Create an iterable of Dual with a numerical value
            duals = iter([SSE.Dual(numData=int(result[0]))])
            # Append the row data constructed to response_rows
            response_rows.append(SSE.Row(duals=duals))

        #print(predicted_as_list)
        logging.info('_evaluate params {}'.format(params))
        logging.info(
            '_evaluate predicted_as_list {}'.format(predicted_as_list))
        # Sum all rows collected the the params variable
        #result = sum(params[0])

        # Create an iterable of dual with numerical value
        #duals = iter([SSE.Dual(numData=result)])

        # Yield the row data constructed
        yield SSE.BundledRows(rows=response_rows)
コード例 #18
0
ファイル: __main__.py プロジェクト: Parkman328/QRAG-Docker
 def _ws_single(request, context):
     """
      Single Row Processing for Websockets
     :param request: iterable sequence of bundled rows
     :return: the same iterable sequence as received
     """
     logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f")))
     #Start by Gathering Environmental Varaiable
     
     host = socket.gethostname()
     ip_addr = socket.gethostbyname(host)
     ws_url = config.get(q_function_name, 'ws_url')
     token = config.get(q_function_name, 'token')
     user_name= config.get(q_function_name, 'username')
     ws_route= config.get(q_function_name, 'ws_route')
     bCache= config.get(q_function_name, 'cache')
     logging.debug('Pringint Route for WS {}' .format(ws_route))
     logging.debug("Caching is set to {}" .format(bCache))
     if (bCache.lower()=="true"):
         logging.info("Caching ****Enabled*** for {}" .format(q_function_name))
     else:
         logging.info("Caching ****Disabled**** for {}" .format(q_function_name))
         md = (('qlik-cache', 'no-store'),)
         context.send_initial_metadata(md)
     #In Future we will use the Token for Liencensing and Throttling
     #Currently we are using Comblination of host+ipaddr+username for Client Identification
     ws_url = ws_url + host +'_'+ ip_addr+'_'+ user_name+'_'
     logging.debug('Websocket URL : {}' .format(ws_url))
     ws = create_connection(ws_url)
     response_rows = []
     for request_rows in request:
         # Iterate over rows
         # Default code
         for row in request_rows.rows:
             # Retrieve string value of parameter and append to the params variable
             # Length of param is 1 since one column is received, the [0] collects the first value in the list
             param = [d.strData for d in row.duals][0]
             result = ''
             if (len(param) ==0):
                 logging.debug('Parameters are Empty')
                 result = 'Error'
             else:
                 payload = '{"action":"'+ ws_route +'","data":"' + param + '"}'
                 logging.debug('Showing Payload: {}'.format(payload))
                 ws.send(payload)
                 #logging.info('Show  Payload Response: {}'.format(resp.text))
                 resp =  json.loads(ws.recv())
                 logging.debug(resp)
                 result = resp['result']
                 logging.debug('Show  Result: {}'.format(result))
             # Create an iterable of dual with the result
                 duals = iter([SSE.Dual(strData=result)])
                 response_rows.append(SSE.Row(duals=duals))
             # Yield the row data as bundled rows
     yield SSE.BundledRows(rows=response_rows)
     ws.close()
     logging.info('Exiting {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f")))
コード例 #19
0
    def _linearRegression(request, context):
        # clear the log for ARIMA details
        ##        f = open(logger,'w')
        ##        f.write('New function call\n')

        # instantiate a list for measure data
        dataList = []

        for request_rows in request:
            # iterate over each request row (contains rows, duals, numData)
            ##            f.write('Request Rows: ' + str(request_rows) + '\n')

            # pull duals from each row, and the numData from duals
            for row in request_rows.rows:
                # the first numData contains the measure data
                data = [d.numData for d in row.duals][0]

                # try to convert number to float
                try:
                    float(data)
                except ValueError:
                    data = 0

                # append each data point to a list and log it
                dataList.append(data)


##                f.write('Row: ' + str(data) + '\n')

# grab the length of the data list and convert
        X_len = len(dataList)
        X = np.asarray(range(X_len))

        # convert the data into an array
        Y = np.asarray(dataList)

        # fit linear regression model
        mdl = LinearRegression().fit(X.reshape(-1, 1), Y)

        # grab m and b from y = mx + b
        m = mdl.coef_[0]
        b = mdl.intercept_

        # calculate regression line points
        regressionResults = []
        gen = (i for i in range(X_len))

        for i in gen:
            y = m * i + b
            regressionResults.append(y)

        # Create an iterable of dual with the result
        duals = iter([[SSE.Dual(numData=d)] for d in regressionResults])

        # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
コード例 #20
0
    def _getField(request, context):
        """
        Mirrors the input and sends back the same data.
        :param request: iterable sequence of bundled rows
        :return: the same iterable sequence as received
        """
        mytime = datetime.utcnow()
        params = []

        csvrows = []
        csvrows.append(['DateTimeUTC','FieldRef','UserRef','Comment'])

        logging.debug("Request: " + str(request) + "\n")

        dataUpdate()

        #Not optimal but will need to fix later.
        global qpiData
        global qpiObsfData
        global qpiAccess

        for request_rows in request:
            logging.debug("Request rows: " + str(request_rows) + "\n")
            response_rows = []

            for row in request_rows.rows:

                # Retrieve string value of parameter and append to the params variable
                params = [d.strData for d in row.duals]

                #Retrieve the requested value from the loaded data here...
                try:
                    if params[2] in qpiAccess:
                        returnValue = str(qpiData[params[1]][params[0]])

                        csvrows.append([mytime,params[1],params[2],'Field: ' + params[0] + ' - ' + params[3]])

                        #Added threading from https://stackoverflow.com/questions/39440015/how-to-do-simple-async-calls-in-python
                        #    needs to be thoroughly tested on LARGE data volumes
                        with futures.ThreadPoolExecutor(max_workers=1) as executor:
                            executor.submit(csvlogger,csvrows)
                    else:
                        # User is not allowed to see real data, returning obfuscated data and no need to log access.
                        returnValue = str(qpiObfuscated[params[1]][params[0]])

                except:
                    returnValue = 'Can not retrieve requested data, see log file.'#
                    logging.error('Error in retrieving data\nNo data exists for ID: ' + params[1] + ' Field: ' + params[0] + ' for User: '******' Comment: ' + params[3])

                # Create an iterable of dual with the result
                duals = iter([SSE.Dual(strData=returnValue)])

                response_rows.append(SSE.Row(duals=duals))


        yield SSE.BundledRows(rows=response_rows)
コード例 #21
0
    def _translateScript(request, context):

        idNumList = []
        translationsList = []

        # Iterate over bundled rows
        for request_rows in request:
            # Iterate over rows
            for row in request_rows.rows:
                # grab the text
                text = [d.strData for d in row.duals][0]

                # grab the id
                idNum = [d.numData for d in row.duals][1]
                idNumList.append(idNum)

                # grab the source
                src = [d.strData for d in row.duals][2]

                # grab the destination
                dest = [d.strData for d in row.duals][3]

                translationsList.append(text)

        try:
            translator = Translator()
            translations = translator.translate(translationsList,
                                                src=src,
                                                dest=dest)
        except:
            pass

        resultList = [i.text for i in translations]
        logging.info('Records translated: ' + str(len(resultList)))

        # Create an iterable of dual with the result
        dualsList = []
        dualsList.append([SSE.Dual(numData=d) for d in idNumList])
        dualsList.append([SSE.Dual(strData=d) for d in resultList])

        response_rows = []
        for i in range(len(idNumList)):
            duals = [dualsList[z][i] for z in range(len(dualsList))]
            response_rows.append(SSE.Row(duals=iter(duals)))

        # Set and send Table header
        table = SSE.TableDescription(name='Translations')
        table.fields.add(dataType=SSE.NUMERIC)
        table.fields.add(dataType=SSE.STRING)
        md = (('qlik-tabledescription-bin', table.SerializeToString()), )
        context.send_initial_metadata(md)

        yield SSE.BundledRows(rows=response_rows)
コード例 #22
0
 def _rest_single(request, context):
     """
     Rest using single variable
     """
     logging.info('Entering {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
     url = config.get(q_function_name, 'url')
     logging.debug("Rest Url is set to {}".format(url))
     bCache = config.get(q_function_name, 'cache')
     logging.debug("Caching is set to {}".format(bCache))
     if (bCache.lower() == "true"):
         logging.info(
             "Caching ****Enabled*** for {}".format(q_function_name))
     else:
         logging.info(
             "Caching ****Disabled**** for {}".format(q_function_name))
         md = (('qlik-cache', 'no-store'), )
         context.send_initial_metadata(md)
     response_rows = []
     request_counter = 1
     for request_rows in request:
         logging.debug('Printing Request Rows - Request Counter {}'.format(
             request_counter))
         request_counter = request_counter + 1
         for row in request_rows.rows:
             # Retrieve string value of parameter and append to the params variable
             # Length of param is 1 since one column is received, the [0] collects the first value in the list
             param = [d.strData for d in row.duals][0]
             # Join with current timedate stamp
             if (len(param) == 0):
                 logging.info(
                     'Exiting {} TimeStamp: {} due to Data being Empty '.
                     format(function_name,
                            datetime.now().strftime("%H:%M:%S.%f")))
             else:
                 payload = '{"data":"' + param + '"}'
                 logging.debug('Showing Payload: {}'.format(payload))
                 resp = requests.post(url, data=payload)
                 logging.debug('Show Payload Response as Text: {}'.format(
                     resp.text))
                 result = resp.text
                 result = result.replace('"', '')
                 result = result.strip()
                 logging.debug('Show  Result: {}'.format(result))
                 # Create an iterable of dual with the result
                 duals = iter([SSE.Dual(strData=result)])
                 response_rows.append(SSE.Row(duals=duals))
             # Yield the row data as bundled rows
     yield SSE.BundledRows(rows=response_rows)
     logging.info('Exiting {} TimeStamp: {}'.format(
         function_name,
         datetime.now().strftime("%H:%M:%S.%f")))
コード例 #23
0
    def _rest_30(request, context):
        """
        Aggregates the parameters to a single comma separated string.

        """

        logging.info('Entering {} TimeStamp: {}'.format(
            function_name,
            datetime.now().strftime("%H:%M:%S.%f")))
        url = config.get(q_function_name, 'url')
        bCache = config.get(q_function_name, 'cache')
        logging.debug("Caching is set to {}".format(bCache))
        if (bCache.lower() == "true"):
            logging.info(
                "Caching ****Enabled*** for {}".format(q_function_name))
        else:
            logging.info(
                "Caching ****Disabled**** for {}".format(q_function_name))
            md = (('qlik-cache', 'no-store'), )
            context.send_initial_metadata(md)
        # Iterate over bundled rows
        response_rows = []
        for request_rows in request:
            # Iterate over rows
            for row in request_rows.rows:
                # Retrieve string value of parameter and append to the params variable
                # Length of param is 1 since one column is received, the [0] collects the first value in the list
                param = [d.strData for d in row.duals]
                if (len(param) == 0):
                    logging.debug('Parameters are Empty')
                    result = 'Error'
                #logging.info('Showing Payload: {}'.format(param))
                # Aggregate parameters to a single string
                # Join payload via =','.join(param)
                else:
                    payload = '{"data":"' + (','.join(param)) + '"}'
                    logging.debug('Showing Payload: {}'.format(payload))
                    resp = requests.post(url, data=payload)
                    logging.debug('Show  Payload Response: {}'.format(
                        resp.text))
                    result = resp.text
                    result = result.replace('"', '')
                    result = result.strip()
                    logging.debug('Show  Result: {}'.format(result))
                    # Create an iterable of dual with the result
                    duals = iter([SSE.Dual(strData=result)])
                    response_rows.append(SSE.Row(duals=duals))
        # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=response_rows)
        logging.info('Exiting Predict  v2 TimeStamp: {}'.format(
            datetime.now().strftime("%H:%M:%S.%f")))
コード例 #24
0
    def _getPDFTable(request, context):
        """
        Mirrors the input and sends back the same data.
        :param request: iterable sequence of bundled rows
        :return: the same iterable sequence as received
        """
        # empty parameters
        extraction_method = None
        path = None
        template = None
        for request_rows in request:
            # pull duals from each row, and the numData from duals
            for row in request_rows.rows:
                if extraction_method is None:
                    extraction_method = [d.strData for d in row.duals][0]
                if path is None:
                    path = [d.strData for d in row.duals][1]
                if template is None:
                    template = [d.strData for d in row.duals][2]
        # read PDF with template
        if extraction_method == 'stream':
            df_list = read_pdf_with_template(path, template, stream=True)
        else:
            df_list = read_pdf_with_template(path, template, lattice=True)
        final_df = pd.DataFrame()
        count = 1
        for df in df_list:
            df['tableID'] = str(count)
            final_df = pd.concat([final_df, df], axis=0, ignore_index=True)
            count = count + 1
        columns = final_df.columns

        # iterate through df columns and format as SSE duals
        dualsList = []
        for col in columns:
            tmpList = final_df[col].tolist()
            dualsList.append([
                SSE.Dual(strData=d) if type(d) is str else SSE.Dual(numData=d)
                for d in tmpList
            ])

        # create response rows
        response_rows = []
        for i in range(len(tmpList)):
            duals = [dualsList[z][i] for z in range(len(dualsList))]
            response_rows.append(SSE.Row(duals=iter(duals)))

        # return response
        yield SSE.BundledRows(rows=response_rows)
コード例 #25
0
    def _get_table_data(request, context):
        """
        Echo the input table.
        :param request:
        :param context:
        :return:
        """
        logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f")))
        url = config.get(q_function_name, 'url')
        logging.debug("Rest Url is set to {}" .format(url))
        bCache= config.get(q_function_name, 'cache')
        logging.debug("Caching is set to {}" .format(bCache))
        if (bCache.lower() =="true"):
            logging.info("Caching ****Enabled*** for {}" .format(q_function_name))
        else:
            logging.info("Caching ****Disabled**** for {}" .format(q_function_name))
            md = (('qlik-cache', 'no-store'),)
            context.send_initial_metadata(md)

        #'Get The Table Name'
        for request_rows in request:
            response_rows = []
        temp = MessageToDict(request_rows) 
        table_name = temp["rows"][0]["duals"][0]["strData"]
        logging.debug("Table Name : {}" .format(table_name))
        #'Get The JSON Key And Values for Table'
        table_id  = precog.get_table_id(table_name, url)
        print(table_id[0])
        print(table_id[1])
        #try catch to catch bad url and kick out if resp is not 2000
        logging.debug('Input Table Name: {} Table ID: {}' .format(table_name, table_id[0]))
        create_token_tuple = precog.create_token(url,table_id[0])
        print(create_token_tuple)
        print(precog.get_count_of_all_tokens(url))
        new_token = create_token_tuple[0]
        new_secret = create_token_tuple[1]
        response = create_token_tuple[2]
        result = precog.get_result_csv(url, new_secret)
        print(result[0])
        output_str = result[1]
        print(output_str)
        parsed_csv = precog.convert_csv(result[1])
        print(parsed_csv)
        resp_clean = precog.cleanup_token(new_token, table_id[0], url)
        print(resp_clean)
        print(precog.get_count_of_all_tokens(url))
        bundledRows = SSE.BundledRows()

        yield SSE.BundledRows(rows=resp_clean)
コード例 #26
0
ファイル: __main__.py プロジェクト: iulihardt/Qlik_SSE_python
    def _SomaLinha(request):
        params = []
        for request_rows in request:
            for row in request_rows.rows:
                p1 = [d.numData for d in row.duals][0]
                p2 = [d.numData for d in row.duals][1]
                p3 = [d.numData for d in row.duals][2]
                params.append(p1)
                params.append(p2)
                params.append(p3)

        result = sum(params)
        print('Variaveis: ', params, ' Soma é: ', result)
        duals = iter([SSE.Dual(numData=result)])
        yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
コード例 #27
0
    def _predictPlayByPlay(request, context):
        df = pd.DataFrame(columns=[
            'quarter', 'seconds_elapsed', 'offense_team', 'yardline', 'down',
            'yards_to_go', 'home_team', 'away_team', 'curr_home_score',
            'curr_away_score'
        ])

        # Iterate over bundled rows
        i = -1
        for request_rows in request:
            # Iterate over rows
            for row in request_rows.rows:
                i += 1

                modelName = [d.strData for d in row.duals][0]
                gameId = [d.strData for d in row.duals][1]
                quarter = [d.strData for d in row.duals][2]
                seconds_elapsed = [d.strData for d in row.duals][3]
                offense_team = [d.strData for d in row.duals][4]
                yardline = [d.strData for d in row.duals][5]
                down = [d.strData for d in row.duals][6]
                yards_to_go = [d.strData for d in row.duals][7]
                home_team = [d.strData for d in row.duals][8]
                away_team = [d.strData for d in row.duals][9]
                curr_home_score = [d.strData for d in row.duals][10]
                curr_away_score = [d.strData for d in row.duals][11]

                df.loc[i] = [
                    quarter,
                    int(seconds_elapsed), offense_team,
                    int(yardline),
                    int(down),
                    int(yards_to_go), home_team, away_team,
                    int(curr_home_score),
                    int(curr_away_score)
                ]

        logging.info(modelName)

        model = p.load(open('models\\' + modelName, 'rb'), encoding='latin1')

        wp = list(model.predict_wp(df))

        # Create an iterable of dual with the result
        duals = iter([[SSE.Dual(numData=d)] for d in wp])

        # Yield the row data as bundled rows
        yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
コード例 #28
0
    def EvaluateScript(self, request, context):
        """
        This plugin provides functionality only for script calls with no parameters and tensor script calls.
        :param request:
        :param context:
        :return:
        """
        # Parse header for script request
        metadata = dict(context.invocation_metadata())
        header = SSE.ScriptRequestHeader()
        header.ParseFromString(metadata['qlik-scriptrequestheader-bin'])

        # Retrieve function type
        func_type = self.ScriptEval.get_func_type(header)

        # Verify function type
        if (func_type == FunctionType.Aggregation) or (func_type
                                                       == FunctionType.Tensor):
            return self.ScriptEval.EvaluateScript(header, request, context,
                                                  func_type)
        else:
            # This plugin does not support other function types than aggregation  and tensor.
            # Make sure the error handling, including logging, works as intended in the client
            msg = 'Function type {} is not supported in this plugin.'.format(
                func_type.name)
            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
            context.set_details(msg)
            # Raise error on the plugin-side
            raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg)
コード例 #29
0
ファイル: _common.py プロジェクト: zerubroberts/qlik-py-tools
    def _send_table_description(self, variant):
        """
        Send the table description to Qlik as meta data.
        Used when the SSE is called from the Qlik load script.
        """
        
        # Set up the table description to send as metadata to Qlik
        self.table = SSE.TableDescription()
        self.table.name = "SSE-Response"
        self.table.numberOfRows = len(self.response_df)

        # Set up fields for the table
        if variant == "apriori":
            'rule', 'rule_lhs', 'rule_rhs', 'support', 'confidence', 'lift'
            self.table.fields.add(name="rule")
            self.table.fields.add(name="rule_lhs")
            self.table.fields.add(name="rule_rhs")
            self.table.fields.add(name="support", dataType=1)
            self.table.fields.add(name="confidence", dataType=1)
            self.table.fields.add(name="lift", dataType=1)
                
        # Debug information is printed to the terminal and logs if the paramater debug = true
        if self.debug:
            self._print_log(5)
            
        # Send table description
        table_header = (('qlik-tabledescription-bin', self.table.SerializeToString()),)
        self.context.send_initial_metadata(table_header)
コード例 #30
0
    def GetCapabilities(self, request, context):
        """
        Get capabilities.
        Note that either request or context is used in the implementation of this method, but still added as
        parameters. The reason is that gRPC always sends both when making a function call and therefore we must include
        them to avoid error messages regarding too many parameters provided from the client.
        :param request: the request, not used in this method.
        :param context: the context, not used in this method.
        :return: the capabilities.
        """
        logging.info('GetCapabilities')
        # Create an instance of the Capabilities grpc message
        # Enable(or disable) script evaluation
        # Set values for pluginIdentifier and pluginVersion
        capabilities = SSE.Capabilities(allowScript=True,
                                        pluginIdentifier='Qlik Rapid API Gateway - Partner Engineering',
                                        pluginVersion='v0.1.0')
        # If user defined functions supported, add the definitions to the message
        with open(self.function_definitions) as json_file:
            # Iterate over each function definition and add data to the capabilities grpc message
            for definition in json.load(json_file)['Functions']:
                function = capabilities.functions.add()
                function.name = definition['Name']
                function.functionId = definition['Id']
                function.functionType = definition['Type']
                function.returnType = definition['ReturnType']
            
                # Retrieve name and type of each parameter
                for param_name, param_type in sorted(definition['Params'].items()):
                    function.params.add(name=param_name, dataType=param_type)

                logging.info('Adding to capabilities: {}({})'.format(function.name,
                                                                     [p.name for p in function.params]))

        return capabilities