def _sum_of_column(request): """ Summarize the column sent as a parameter. Aggregation function. :param request: an iterable sequence of RowData :return: int, sum if column """ params = [] # Iterate over bundled rows for request_rows in request: # Iterating over rows for row in request_rows.rows: # Retrieve numerical value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.numData for d in row.duals][0] params.append(param) # Sum all rows collected the the params variable result = sum(params) # Create an iterable of dual with numerical value duals = iter([SSE.Dual(numData=result)]) # Yield the row data constructed yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def _sum_of_rows(request): """ Summarize two parameters row wise. Tensor function. :param request: an iterable sequence of RowData :return: the same iterable sequence of row data as received """ # Iterate over bundled rows for request_rows in request: response_rows = [] # Iterating over rows for row in request_rows.rows: # Retrieve the numerical value of the parameters # Two columns are sent from the client, hence the length of params will be 2 params = [d.numData for d in row.duals] # Sum over each row result = sum(params) # Create an iterable of Dual with a numerical value duals = iter([SSE.Dual(numData=result)]) # Append the row data constructed to response_rows response_rows.append(SSE.Row(duals=duals)) # Yield Row data as Bundled rows yield SSE.BundledRows(rows=response_rows)
def _translate(request, context): translationsList = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # grab the text text = [d.strData for d in row.duals][0] # grab the source src = [d.strData for d in row.duals][1] # grab the destination dest = [d.strData for d in row.duals][2] translationsList.append(text) try: translator = Translator() translations = translator.translate(translationsList, src=src, dest=dest) except: pass resultList = [i.text for i in translations] logging.info('Records translated: ' + str(len(resultList))) # Create an iterable of dual with the result duals = iter([[SSE.Dual(strData=d)] for d in resultList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
def _ws_single(request, context): """ Single Row Processing for Websockets :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ logging.info('Entering {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) #Start by Gathering Environmental Varaiable host = socket.gethostname() ip_addr = socket.gethostbyname(host) ws_url = config.get(q_function_name, 'ws_url') token = config.get(q_function_name, 'token') user_name= config.get(q_function_name, 'username') ws_route= config.get(q_function_name, 'ws_route') bCache= config.get(q_function_name, 'cache') logging.debug('Pringint Route for WS {}' .format(ws_route)) logging.debug("Caching is set to {}" .format(bCache)) if (bCache.lower()=="true"): logging.info("Caching ****Enabled*** for {}" .format(q_function_name)) else: logging.info("Caching ****Disabled**** for {}" .format(q_function_name)) md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) #In Future we will use the Token for Liencensing and Throttling #Currently we are using Comblination of host+ipaddr+username for Client Identification ws_url = ws_url + host +'_'+ ip_addr+'_'+ user_name+'_' logging.debug('Websocket URL : {}' .format(ws_url)) ws = create_connection(ws_url) response_rows = [] for request_rows in request: # Iterate over rows # Default code for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] result = '' if (len(param) ==0): logging.debug('Parameters are Empty') result = 'Error' else: payload = '{"action":"'+ ws_route +'","data":"' + param + '"}' logging.debug('Showing Payload: {}'.format(payload)) ws.send(payload) #logging.info('Show Payload Response: {}'.format(resp.text)) resp = json.loads(ws.recv()) logging.debug(resp) result = resp['result'] logging.debug('Show Result: {}'.format(result)) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) ws.close() logging.info('Exiting {} TimeStamp: {}' .format(function_name, datetime.now().strftime("%H:%M:%S.%f")))
def evaluate(context, script, ret_type, params=[]): """ Evaluates a script with given parameters. :param context: the context sent from client :param script: script to evaluate :param ret_type: return data type :param params: params to evaluate. Default: [] :return: a RowData of string dual """ if ret_type == ReturnType.String: # Evaluate script result = eval(script, {'args': params}) # Transform the result to an iterable of Dual data with a string value duals = iter([SSE.Dual(strData=result)]) # Create row data out of duals return SSE.BundledRows(rows=[SSE.Row(duals=duals)]) else: # This plugin does not support other return types than string # Make sure the error handling, including logging, works as intended in the client msg = 'Return type {} is not supported in this plugin.'.format( ret_type) context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details(msg) # Raise error on the plugin-side raise grpc.RpcError(grpc.StatusCode.UNIMPLEMENTED, msg)
def _get_sql(request): """ GetSQL function, tensor """ # Iterate over bundled rows for request_rows in request: response_rows = [] # Iterating over rows for row in request_rows.rows: # Retrieve numerical value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # This is pretty costly.. remove it when you see SQL generating normally print(param) # Execute SQL command cursor.execute(param) results = cursor.fetchall() results = results[0][0] # Create an iterable of dual with numerical value duals = iter([SSE.Dual(numData=results)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data constructed yield SSE.BundledRows(rows=response_rows)
def _no_cache(request, context): """ Cache disabled. Add the datetime stamp to the end of each string value. :param request: :param context: used for disabling the cache in the header. :return: string """ # Disable caching. md = (('qlik-cache', 'no-store'),) context.send_initial_metadata(md) # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def duals_to_rows(*args): """ Converts a number of duals to a list of rows. """ rows = [SSE.Row(duals=d) for d in list(args)] return rows
def _max_of_columns_2(request, context): """ Find max of each column. This is a table function. :param request: an iterable sequence of RowData :param context: :return: a table with numerical values, two columns and one row """ result = [_MINFLOAT] * 2 # Iterate over bundled rows for request_rows in request: # Iterating over rows for row in request_rows.rows: # Retrieve the numerical value of each parameter # and update the result variable if it's higher than the previously saved value for i in range(0, len(row.duals)): result[i] = max(result[i], row.duals[i].numData) # Create an iterable of dual with numerical value duals = iter([SSE.Dual(numData=r) for r in result]) # Set and send Table header table = SSE.TableDescription(name='MaxOfColumns', numberOfRows=1) table.fields.add(name='Max1', dataType=SSE.NUMERIC) table.fields.add(name='Max2', dataType=SSE.NUMERIC) md = (('qlik-tabledescription-bin', table.SerializeToString()), ) context.send_initial_metadata(md) # Yield the row data constructed yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def _hello_world_aggr(request, context): """ Aggregates the parameters to a single comma separated string. :param request: iterable sequence of bundled rows :return: string """ params = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] params.append(param) # Aggregate parameters to a single string result = ', '.join(params) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def _prophet_seasonality(request, context): """ Provide the seasonality component of the Prophet timeseries forecast. Scalar function. :param request: an iterable sequence of RowData :param context: not used for now :return: the forecasted value for each row : :Qlik expression example: :<AAI Connection Name>.Prophet_Seasonality(Month, $(vConcatSeries), $(vHolidays), 'seasonality=yearly, freq=MS, debug=true') :The fourth argument in the Qlik expression is a string of parameters. :This should take the form of a comma separated string: :e.g 'seasonality=yearly, freq=MS, debug=true' or 'seasonality=weekly, freq=D' : :Parameters accepted for the Prophet() function are: cap, floor, changepoint_prior_scale, interval_width, :lower_window, upper_window : :Parameters accepted for the make_future_dataframe() function are: freq : :For more information on these parameters go here: https://facebook.github.io/prophet/docs/quick_start.html : :Additional parameters used are: return, take_log, debug : :cap = 1000 : A logistic growth model can be defined using cap and floor. Values should be double or integer :changepoint_prior_scale = 0.05 : Decrease if the trend changes are being overfit, increase for underfit :interval_width = 0.08 : Set the width of the uncertainty intervals :lower_window = -1 : Only used with holidays. Extend the holiday by certain no. of days prior to the date. :upper_window = 1 : Only used with holidays. Extend the holiday by certain no. of days after the date. :freq = MS : The frequency of the time series. e.g. MS for Month Start. See the possible options here: : : http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases :return = yhat : Any of the options in the forecast result. You can see these options with debug=true : : yhat, yhat_upper, yhat_lower : Forecast, upper and lower limits : : y_then_yhat, y_then_yhat_upper, y_then_yhat_lower : Return forecast only for forecast periods : : trend, trend_upper, trend_lower : Trend component of the timeseries : : seasonal, seasonal_upper, seasonal_lower: Seasonal component of the timeseries :take_log = false : Apply logarithm to the values before the forecast. Default is true :debug = true : Print exexution information to the terminal and logs in ..\logs\Prophet Log <n>.txt """ # Get a list from the generator object so that it can be iterated over multiple times request_list = [request_rows for request_rows in request] # Create an instance of the ProphetForQlik class # This will take the request data from Qlik and prepare it for forecasting predictor = ProphetForQlik.init_seasonality(request_list) # Calculate the forecast and store in a Pandas series forecast = predictor.predict() # Values in the series are converted to type SSE.Dual response_rows = forecast.apply(lambda result: iter([SSE.Dual(numData=result)])) # Values in the series are converted to type SSE.Row # The series is then converted to a list response_rows = response_rows.apply(lambda duals: SSE.Row(duals=duals)).tolist() # Iterate over bundled rows for request_rows in request_list: # Yield Row data as Bundled rows yield SSE.BundledRows(rows=response_rows)
def _evaluate(request): """ Summarize the column sent as a parameter. Aggregation function. :param request: an iterable sequence of RowData :return: int, sum if column """ params = [] logging.info('_evaluate') logging.info('_evaluate request {}'.format(request)) print(request) # Iterate over bundled rows for request_rows in request: print(request_rows.rows) print(len(request_rows.rows)) # Iterating over rows logging.info('_evaluate request_rows {}'.format(request_rows.rows)) for row in request_rows.rows: # Retrieve numerical value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.numData for d in row.duals] logging.info('_evaluate row {}'.format(param)) params.append(param) h2o.init() dir_path = os.path.dirname( os.path.realpath(__file__)) + "\\kmeans_iris" #results = h2o.load_model("C:/Users/daniel/Documents/Qlik Advanced Analytics/Examples/Python/H2O/kmeans_iris") results = h2o.load_model(dir_path) newData = h2o.H2OFrame(params) predictedNew = results.predict(newData) predicted_as_list = h2o.as_list(predictedNew, use_pandas=False) predicted_as_list.pop(0) response_rows = [] for result in predicted_as_list: # Create an iterable of Dual with a numerical value duals = iter([SSE.Dual(numData=int(result[0]))]) # Append the row data constructed to response_rows response_rows.append(SSE.Row(duals=duals)) #print(predicted_as_list) logging.info('_evaluate params {}'.format(params)) logging.info( '_evaluate predicted_as_list {}'.format(predicted_as_list)) # Sum all rows collected the the params variable #result = sum(params[0]) # Create an iterable of dual with numerical value #duals = iter([SSE.Dual(numData=result)]) # Yield the row data constructed yield SSE.BundledRows(rows=response_rows)
def _geocode(request, context): api_selection = None coordinateList = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # The first strData is the coordinate data = [d.strData for d in row.duals][0] # Grab selected API # Possible choices are: Google, Open Street, GeocoderDotUS if not api_selection: api_selection = [d.strData for d in row.duals][1] if 'google' in api_selection.lower(): geolocator = GoogleV3( api_key="AIzaSyDlLSgsXiYG-zKYTuLxsbj1BNr7XoGpRMk") elif 'open street' in api_selection.lower(): geolocator = Nominatim(scheme='http') elif 'geocoderdotus' in api_selection.lower(): geolocator = GeocoderDotUS() else: geolocator = Nominatim() # geocode for i in range(2): try: location = geolocator.geocode(data, timeout=2) break except geopy.exc.GeocoderTimedOut: pass except geopy.exc.GeocoderQueryError: pass except urllib.error.HTTPError: pass try: latitude = location.latitude except AttributeError: latitude = 'Unavailable' try: longitude = location.longitude except AttributeError: longitude = 'Unavailable' coordinates = '[' + str(longitude) + ', ' + str(latitude) + ']' coordinateList.append(coordinates) # Create an iterable of dual with the result duals = iter([[SSE.Dual(strData=c)] for c in coordinateList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
def _cluster(request, context): """ Look for clusters within a dimension using the given features. Scalar function. Three variants are implemented: :0: one dimension and a string of numeric features. :1: two dimensions and one measure. This will only work when used in the Qlik load script. :2: one dimension, the latitude and longitude. : :param request: an iterable sequence of RowData :param context: :return: the clustering classification for each row :Qlik expression examples: :<AAI Connection Name>.Cluster(dimension, sum(value1) & ';' & sum(value2), 'scaler=standard') """ # Get a list from the generator object so that it can be iterated over multiple times request_list = [request_rows for request_rows in request] # Get the function id from the header to determine the variant being called function = ExtensionService._get_function_id(context) if function == 0: # The standard variant takes in one dimension and a string of semi-colon separated features. variant = "standard" elif function == 1: # In the second variant features for clustering are produced by pivoting the measure for the second dimension. variant = "two_dims" elif function == 2: # The third variant if for finding clusters in geospatial coordinates. variant = "lat_long" # Create an instance of the HDBSCANForQlik class # This will take the request data from Qlik and prepare it for clustering clusterer = HDBSCANForQlik(request_list, context, variant=variant) # Calculate the clusters and store in a Pandas series (or DataFrame in the case of a load script call) clusters = clusterer.scan() # Check if the response is a DataFrame. # This occurs when the load_script=true argument is passed in the Qlik expression. response_is_df = isinstance(clusters, pd.DataFrame) # Convert the response to a list of rows clusters = clusters.values.tolist() # We convert values to type SSE.Dual, and group columns into a iterable if response_is_df: response_rows = [ iter([SSE.Dual(strData=row[0]), SSE.Dual(numData=row[1])]) for row in clusters ] else: response_rows = [iter([SSE.Dual(numData=row)]) for row in clusters] # Values are then structured as SSE.Rows response_rows = [SSE.Row(duals=duals) for duals in response_rows] # Yield Row data as Bundled rows yield SSE.BundledRows(rows=response_rows)
def _linearRegression(request, context): # clear the log for ARIMA details ## f = open(logger,'w') ## f.write('New function call\n') # instantiate a list for measure data dataList = [] for request_rows in request: # iterate over each request row (contains rows, duals, numData) ## f.write('Request Rows: ' + str(request_rows) + '\n') # pull duals from each row, and the numData from duals for row in request_rows.rows: # the first numData contains the measure data data = [d.numData for d in row.duals][0] # try to convert number to float try: float(data) except ValueError: data = 0 # append each data point to a list and log it dataList.append(data) ## f.write('Row: ' + str(data) + '\n') # grab the length of the data list and convert X_len = len(dataList) X = np.asarray(range(X_len)) # convert the data into an array Y = np.asarray(dataList) # fit linear regression model mdl = LinearRegression().fit(X.reshape(-1, 1), Y) # grab m and b from y = mx + b m = mdl.coef_[0] b = mdl.intercept_ # calculate regression line points regressionResults = [] gen = (i for i in range(X_len)) for i in gen: y = m * i + b regressionResults.append(y) # Create an iterable of dual with the result duals = iter([[SSE.Dual(numData=d)] for d in regressionResults]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
def _getField(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ mytime = datetime.utcnow() params = [] csvrows = [] csvrows.append(['DateTimeUTC','FieldRef','UserRef','Comment']) logging.debug("Request: " + str(request) + "\n") dataUpdate() #Not optimal but will need to fix later. global qpiData global qpiObsfData global qpiAccess for request_rows in request: logging.debug("Request rows: " + str(request_rows) + "\n") response_rows = [] for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable params = [d.strData for d in row.duals] #Retrieve the requested value from the loaded data here... try: if params[2] in qpiAccess: returnValue = str(qpiData[params[1]][params[0]]) csvrows.append([mytime,params[1],params[2],'Field: ' + params[0] + ' - ' + params[3]]) #Added threading from https://stackoverflow.com/questions/39440015/how-to-do-simple-async-calls-in-python # needs to be thoroughly tested on LARGE data volumes with futures.ThreadPoolExecutor(max_workers=1) as executor: executor.submit(csvlogger,csvrows) else: # User is not allowed to see real data, returning obfuscated data and no need to log access. returnValue = str(qpiObfuscated[params[1]][params[0]]) except: returnValue = 'Can not retrieve requested data, see log file.'# logging.error('Error in retrieving data\nNo data exists for ID: ' + params[1] + ' Field: ' + params[0] + ' for User: '******' Comment: ' + params[3]) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=returnValue)]) response_rows.append(SSE.Row(duals=duals)) yield SSE.BundledRows(rows=response_rows)
def _translateScript(request, context): idNumList = [] translationsList = [] # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # grab the text text = [d.strData for d in row.duals][0] # grab the id idNum = [d.numData for d in row.duals][1] idNumList.append(idNum) # grab the source src = [d.strData for d in row.duals][2] # grab the destination dest = [d.strData for d in row.duals][3] translationsList.append(text) try: translator = Translator() translations = translator.translate(translationsList, src=src, dest=dest) except: pass resultList = [i.text for i in translations] logging.info('Records translated: ' + str(len(resultList))) # Create an iterable of dual with the result dualsList = [] dualsList.append([SSE.Dual(numData=d) for d in idNumList]) dualsList.append([SSE.Dual(strData=d) for d in resultList]) response_rows = [] for i in range(len(idNumList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) # Set and send Table header table = SSE.TableDescription(name='Translations') table.fields.add(dataType=SSE.NUMERIC) table.fields.add(dataType=SSE.STRING) md = (('qlik-tabledescription-bin', table.SerializeToString()), ) context.send_initial_metadata(md) yield SSE.BundledRows(rows=response_rows)
def _rest_single(request, context): """ Rest using single variable """ logging.info('Entering {} TimeStamp: {}'.format( function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') logging.debug("Rest Url is set to {}".format(url)) bCache = config.get(q_function_name, 'cache') logging.debug("Caching is set to {}".format(bCache)) if (bCache.lower() == "true"): logging.info( "Caching ****Enabled*** for {}".format(q_function_name)) else: logging.info( "Caching ****Disabled**** for {}".format(q_function_name)) md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) response_rows = [] request_counter = 1 for request_rows in request: logging.debug('Printing Request Rows - Request Counter {}'.format( request_counter)) request_counter = request_counter + 1 for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp if (len(param) == 0): logging.info( 'Exiting {} TimeStamp: {} due to Data being Empty '. format(function_name, datetime.now().strftime("%H:%M:%S.%f"))) else: payload = '{"data":"' + param + '"}' logging.debug('Showing Payload: {}'.format(payload)) resp = requests.post(url, data=payload) logging.debug('Show Payload Response as Text: {}'.format( resp.text)) result = resp.text result = result.replace('"', '') result = result.strip() logging.debug('Show Result: {}'.format(result)) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) logging.info('Exiting {} TimeStamp: {}'.format( function_name, datetime.now().strftime("%H:%M:%S.%f")))
def _rest_30(request, context): """ Aggregates the parameters to a single comma separated string. """ logging.info('Entering {} TimeStamp: {}'.format( function_name, datetime.now().strftime("%H:%M:%S.%f"))) url = config.get(q_function_name, 'url') bCache = config.get(q_function_name, 'cache') logging.debug("Caching is set to {}".format(bCache)) if (bCache.lower() == "true"): logging.info( "Caching ****Enabled*** for {}".format(q_function_name)) else: logging.info( "Caching ****Disabled**** for {}".format(q_function_name)) md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) # Iterate over bundled rows response_rows = [] for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals] if (len(param) == 0): logging.debug('Parameters are Empty') result = 'Error' #logging.info('Showing Payload: {}'.format(param)) # Aggregate parameters to a single string # Join payload via =','.join(param) else: payload = '{"data":"' + (','.join(param)) + '"}' logging.debug('Showing Payload: {}'.format(payload)) resp = requests.post(url, data=payload) logging.debug('Show Payload Response: {}'.format( resp.text)) result = resp.text result = result.replace('"', '') result = result.strip() logging.debug('Show Result: {}'.format(result)) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) response_rows.append(SSE.Row(duals=duals)) # Yield the row data as bundled rows yield SSE.BundledRows(rows=response_rows) logging.info('Exiting Predict v2 TimeStamp: {}'.format( datetime.now().strftime("%H:%M:%S.%f")))
def get_response_rows(response, template): """ Take in a list of responses and covert them to SSE.Rows based on the column type specified in template The template should be a list of the form: ["str", "num", "dual", ...] For string values use: "str" For numeric values use: "num" For dual values: "dual" """ response_rows = [] # For each row in the response list for row in response: i = 0 this_row = [] if len(template) > 1: # For each column in the row for col in row: # Convert values to type SSE.Dual according to the template list if template[i] == "str": if col is None: col = "\x00" elif type(col) is not str: col = "{0:.5f}".format(col) this_row.append(SSE.Dual(strData=col)) elif template[i] == "num": this_row.append(SSE.Dual(numData=col)) elif template[i] == "dual": this_row.append(SSE.Dual(strData=col, numData=col)) i = i + 1 else: # Convert values to type SSE.Dual according to the template list if template[0] == "str": if row is None: row = "\x00" elif type(row) is not str: row = "{0:.5f}".format(row) this_row.append(SSE.Dual(strData=row)) elif template[0] == "num": this_row.append(SSE.Dual(numData=row)) elif template[0] == "dual": this_row.append(SSE.Dual(strData=row, numData=row)) # Group columns into a iterable and add to the the response_rows response_rows.append(iter(this_row)) # Values are then structured as SSE.Rows response_rows = [SSE.Row(duals=duals) for duals in response_rows] return response_rows
def _getPDFTable(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ # empty parameters extraction_method = None path = None template = None for request_rows in request: # pull duals from each row, and the numData from duals for row in request_rows.rows: if extraction_method is None: extraction_method = [d.strData for d in row.duals][0] if path is None: path = [d.strData for d in row.duals][1] if template is None: template = [d.strData for d in row.duals][2] # read PDF with template if extraction_method == 'stream': df_list = read_pdf_with_template(path, template, stream=True) else: df_list = read_pdf_with_template(path, template, lattice=True) final_df = pd.DataFrame() count = 1 for df in df_list: df['tableID'] = str(count) final_df = pd.concat([final_df, df], axis=0, ignore_index=True) count = count + 1 columns = final_df.columns # iterate through df columns and format as SSE duals dualsList = [] for col in columns: tmpList = final_df[col].tolist() dualsList.append([ SSE.Dual(strData=d) if type(d) is str else SSE.Dual(numData=d) for d in tmpList ]) # create response rows response_rows = [] for i in range(len(tmpList)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) # return response yield SSE.BundledRows(rows=response_rows)
def _SomaLinha(request): params = [] for request_rows in request: for row in request_rows.rows: p1 = [d.numData for d in row.duals][0] p2 = [d.numData for d in row.duals][1] p3 = [d.numData for d in row.duals][2] params.append(p1) params.append(p2) params.append(p3) result = sum(params) print('Variaveis: ', params, ' Soma é: ', result) duals = iter([SSE.Dual(numData=result)]) yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def _predictPlayByPlay(request, context): df = pd.DataFrame(columns=[ 'quarter', 'seconds_elapsed', 'offense_team', 'yardline', 'down', 'yards_to_go', 'home_team', 'away_team', 'curr_home_score', 'curr_away_score' ]) # Iterate over bundled rows i = -1 for request_rows in request: # Iterate over rows for row in request_rows.rows: i += 1 modelName = [d.strData for d in row.duals][0] gameId = [d.strData for d in row.duals][1] quarter = [d.strData for d in row.duals][2] seconds_elapsed = [d.strData for d in row.duals][3] offense_team = [d.strData for d in row.duals][4] yardline = [d.strData for d in row.duals][5] down = [d.strData for d in row.duals][6] yards_to_go = [d.strData for d in row.duals][7] home_team = [d.strData for d in row.duals][8] away_team = [d.strData for d in row.duals][9] curr_home_score = [d.strData for d in row.duals][10] curr_away_score = [d.strData for d in row.duals][11] df.loc[i] = [ quarter, int(seconds_elapsed), offense_team, int(yardline), int(down), int(yards_to_go), home_team, away_team, int(curr_home_score), int(curr_away_score) ] logging.info(modelName) model = p.load(open('models\\' + modelName, 'rb'), encoding='latin1') wp = list(model.predict_wp(df)) # Create an iterable of dual with the result duals = iter([[SSE.Dual(numData=d)] for d in wp]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
def _encrypt(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ for request_rows in request: # Iterate over rows for row in request_rows.rows: #Encrypt the first parameter here... encryptedValue = cipher.encrypt(str.encode([d.strData for d in row.duals][0])) # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=encryptedValue)]) yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def _liveSQL(request, context): # Disable cache md = (('qlik-cache', 'no-store'), ) context.send_initial_metadata(md) sqlQuery = None column = None connectionString = None indexField = None for request_rows in request: # iterate over each request row (contains rows, duals, numData) # pull duals from each row, and the numData from duals for row in request_rows.rows: print(row) # get name of dimension if not sqlQuery: sqlQuery = [d.strData for d in row.duals][0] # get list of static filters if not column: column = int([d.strData for d in row.duals][1]) if not connectionString: connectionString = [d.strData for d in row.duals][2] print(connectionString) if not indexField: indexField = [d.strData for d in row.duals][3] print(indexField) conn = pyodbc.connect(connectionString) data = pd.read_sql(sqlQuery, conn) data = data.applymap(str) columnName = data.columns[column] commaReplace = "COMMAREPLACE" data.replace({"'": commaReplace}, inplace=True, regex=True) print(data) column = data.iloc[:, column].values.tolist() print(column) finalList = [] finalList.append("Pick(FieldIndex('" + indexField + "',[" + indexField + "])," + str(tuple(column))[1:].replace(commaReplace, "''") + " /**" + columnName + "**/") print(finalList) # Create an iterable of dual with the result duals = iter([[SSE.Dual(strData=d)] for d in finalList]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=d) for d in duals])
def _decrypt(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ mytime = datetime.utcnow() params = [] csvrows = [] csvrows.append(['DateTimeUTC','FieldRef','UserRef','Comment']) logging.debug("Request: " + str(request) + "\n") for request_rows in request: logging.debug("Request rows: " + str(request_rows) + "\n") response_rows = [] for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable params = [d.strData for d in row.duals] #Decrypt the first parameter here... try: decryptedValue = str(cipher.decrypt(str.encode([d.strData for d in row.duals][0])),'utf-8') except: decryptedValue = 'Can not decrypt data, see log file.' logging.error('Error in decryption occurred:\n Either the string provided is not encrypted or the Cipher token in qpi.config does not match token used to originally encrypt the data.\n To fix the token, either update qpi.config with the correct key OR re-encrypt the data with a new key.') # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=decryptedValue)]) response_rows.append(SSE.Row(duals=duals)) csvrows.append([mytime,params[1],params[2],params[3]]) #Added threading from https://stackoverflow.com/questions/39440015/how-to-do-simple-async-calls-in-python # needs to be thoroughly tested on LARGE data volumes with futures.ThreadPoolExecutor(max_workers=1) as executor: executor.submit(csvlogger,csvrows) yield SSE.BundledRows(rows=response_rows)
def evaluate(script, ret_type, params=[]): """ Evaluates a script with given parameters and construct the result to a Row of duals. :param script: script to evaluate :param ret_type: return data type :param params: params to evaluate. Default: [] :return: a RowData of string dual """ # Evaluate script result = eval(script, {'args': params, 'np': np}) # Transform the result to an iterable of Dual data if ret_type == ReturnType.String: duals = iter([SSE.Dual(strData=result)]) elif ret_type == ReturnType.Numeric: duals = iter([SSE.Dual(numData=result)]) return SSE.BundledRows(rows=[SSE.Row(duals=duals)])
def evaluate(script, ret_type, params=[]): """ Evaluates a script with given parameters. :param script: script to evaluate :param ret_type: return data type :param params: params to evaluate. Default: [] :return: a RowData of string dual """ if ret_type == ReturnType.String: # Evaluate script result = eval(script, {'args': params}) # Transform the result to an iterable of Dual data with a string value duals = iter([SSE.Dual(strData=result)]) # Create row data out of duals return SSE.BundledRows(rows=[SSE.Row(duals=duals)]) else: # This plugin does not support other return types than string raise grpc.RpcError( grpc.StatusCode.UNIMPLEMENTED, 'Return type {} is not supported in this plugin.'.format(ret_type))
def _sentiment(request, context): """ Mirrors the input and sends back the same data. :param request: iterable sequence of bundled rows :return: the same iterable sequence as received """ # initiate vader sentiment analysis global analyser sentence_ids = [] scores = [] for request_rows in request: # pull duals from each row, and the numData from duals for row in request_rows.rows: # get sentence id from the row sentence_id = [d.strData for d in row.duals][0] # add sentence id to the list sentence_ids.append(sentence_id) # get sentence from the row sentence = [d.strData for d in row.duals][1] # get score score = str(analyser.polarity_scores(str(sentence))) # add score to list scores.append(score) print(sentence_ids) print(scores) # Create an iterable of dual with the result dualsList = [] dualsList.append([SSE.Dual(strData=d) for d in sentence_ids]) dualsList.append([SSE.Dual(strData=d) for d in scores]) response_rows = [] for i in range(len(sentence_ids)): duals = [dualsList[z][i] for z in range(len(dualsList))] response_rows.append(SSE.Row(duals=iter(duals))) yield SSE.BundledRows(rows=response_rows)
def _cache(request, context): """ Cache enabled. Add the datetime stamp to the end of each string value. :param request: iterable sequence of bundled rows :param context: not used. :return: string """ # Iterate over bundled rows for request_rows in request: # Iterate over rows for row in request_rows.rows: # Retrieve string value of parameter and append to the params variable # Length of param is 1 since one column is received, the [0] collects the first value in the list param = [d.strData for d in row.duals][0] # Join with current timedate stamp result = param + ' ' + datetime.now().isoformat() # Create an iterable of dual with the result duals = iter([SSE.Dual(strData=result)]) # Yield the row data as bundled rows yield SSE.BundledRows(rows=[SSE.Row(duals=duals)])