Beispiel #1
0
import json
import logging
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions import bif
from ai.functions import TurbineHTTPPreload
from iotfunctions.metadata import EntityType
from iotfunctions.db import Database
from ai import settings
from scripts.simple_mfg_entities import Equipment
import datetime as dt
from iotfunctions.enginelog import EngineLogging
EngineLogging.configure_console_logging(logging.DEBUG)
logging = logging.getLogger(__name__)

#db_schema = 'bluadmin' #  set if you are not using the default
#with open('credentials_MAS-Demo.json', encoding='utf-8') as F:
#    credentials = json.loads(F.read())
print("here")
#with open('credentials.json', encoding='utf-8') as F:
db_schema = 'bluadmin'  #  set if you are not using the default
with open('../bouygues-beta-credentials.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
#db_schema = 'dash100462'  # replace if you are not using the default schema
#with open('credentials_dev2.json', encoding='utf-8') as F:
#    credentials = json.loads(F.read())
print("here db")
db = Database(credentials=credentials)

entity_name = 'ACME_Compressors'
entityType = entity_name
db_schema = None  # replace if you are not using the default schema
def test_light_gbm():

    numba_logger = logging.getLogger('numba')
    numba_logger.setLevel(logging.ERROR)

    # Run on the good pump first
    # Get stuff in
    print('Read Regressor Sample data in')
    df_i = pd.read_csv('./data/RegressionTestData.csv', index_col=False, parse_dates=['DATETIME'])
    df_i = df_i.rename(columns={'DATETIME': 'timestamp'})

    df_i['entity'] = 'MyShop'
    df_i[Temperature] = pd.to_numeric(df_i[Temperature], errors='coerce')
    df_i[Humidity] = pd.to_numeric(df_i[Humidity], errors='coerce')

    # and sort it by timestamp
    df_i = df_i.sort_values(by='timestamp')
    df_i = df_i.set_index(['entity', 'timestamp']).dropna()

    for i in range(0, df_i.index.nlevels):
        print(str(df_i.index.get_level_values(i)))

    EngineLogging.configure_console_logging(logging.DEBUG)

    #####
    print('Create dummy database')
    db_schema=None
    db = DatabaseDummy()
    print (db.model_store)

    #####

    print('lightGBM regressor - testing training pipeline with sklearn 0.21.3')
    db.model_store = FileModelStore('/tmp')

    jobsettings = { 'db': db, '_db_schema': 'public'}

    brgi = GBMRegressor(features=[Temperature, Humidity], targets=[KW], predictions=['KW_pred'],
                        n_estimators=500, num_leaves=40, learning_rate=0.2, max_depth=-1)

    brgi.stop_auto_improve_at = 0.4
    brgi.active_models = dict()

    et = brgi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    brgi._entity_type = et

    df_i = brgi.execute(df=df_i)

    print('lightGBM regressor - testing training pipeline with recent sklearn and lightgbm')

    print('lightGBM regressor - first time training')
    jobsettings = { 'db': db, '_db_schema': 'public'}

    brgi = GBMRegressor(features=[Temperature, Humidity], targets=[KW], predictions=['KW_pred'],
                        n_estimators=500, num_leaves=40, learning_rate=0.2, max_depth=-1)

    brgi.stop_auto_improve_at = 0.4
    brgi.active_models = dict()

    et = brgi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    brgi._entity_type = et

    df_i = brgi.execute(df=df_i)
    print('lightGBM regressor done')

    mtrc = brgi.active_models['model.TEST_ENTITY_FOR_GBMREGRESSOR.GBMRegressor.KW.MyShop'][0].eval_metric_test
    print ('Trained model r2 ', mtrc)
    assert_true(mtrc > 0.4)

    print('lightGBM regressor - testing training pipeline done ')


    #####

    print('lightGBM regressor - inference')

    print('lightGBM regressor - first time training')
    jobsettings = { 'db': db, '_db_schema': 'public'} #, 'save_trace_to_file' : True}

    brgi = GBMRegressor(features=[Temperature, Humidity], targets=[KW], predictions=['KW_pred'])
    brgi.stop_auto_improve_at = 0.4
    brgi.active_models = dict()

    et = brgi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)

    brgi._entity_type = et
    df_i = brgi.execute(df=df_i)
    print('lightGBM regressor done')

    mtrc = brgi.active_models['model.TEST_ENTITY_FOR_GBMREGRESSOR.GBMRegressor.KW.MyShop'][0].eval_metric_test
    print ('Trained model r2 ', mtrc)
    assert_true(mtrc > 0.4)

    print('lightGBM regressor - inference done')

    #####

    print('lightGBM regressor - enforce retraining')

    print('lightGBM regressor - first time training')
    jobsettings = { 'db': db, '_db_schema': 'public'} #, 'save_trace_to_file' : True}

    brgi = GBMRegressor(features=[Temperature, Humidity], targets=[KW], predictions=['KW_pred'])
    brgi.stop_auto_improve_at = mtrc + 2  # force retrain as r2 metric is considered bad now
    brgi.active_models = dict()

    et = brgi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    brgi._entity_type = et
    df_i = brgi.execute(df=df_i)
    print('lightGBM regressor done')

    mtrc = brgi.active_models['model.TEST_ENTITY_FOR_GBMREGRESSOR.GBMRegressor.KW.MyShop'][0].eval_metric_test
    print ('Trained model r2 ', mtrc)
    assert_true(mtrc > 0.4)

    print('lightGBM regressor - enforce retraining done')

    #####

    print('lightGBM forecaster - first time training')
    jobsettings = { 'db': db, '_db_schema': 'public'} #, 'save_trace_to_file' : True}

    brgei = GBMForecaster(features=[Temperature, Humidity], targets=[KW], predictions=['KW_pred'], lags=[1,3,7])
    brgei.stop_auto_improve_at = mtrc + 2  # force retrain as r2 metric is considered bad now
    brgei.active_models = dict()

    et = brgei._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    brgei._entity_type = et
    df_i = brgei.execute(df=df_i)
    print('lightGBM forecaster done')

    mtrc = brgei.active_models['model.TEST_ENTITY_FOR_GBMFORECASTER.GBMForecaster.KW.MyShop'][0].eval_metric_test
    print ('Trained model r2 ', mtrc)
    assert_true(mtrc > 0.4)

    print('lightGBM forecaster - training done')


    pass
def test_anomaly_scores():

    numba_logger = logging.getLogger('numba')
    numba_logger.setLevel(logging.ERROR)

    ####
    print('Create dummy database')
    db_schema=None
    db = DatabaseDummy()
    print (db.model_store)

    #####

    jobsettings = { 'db': db, '_db_schema': 'public'}
    EngineLogging.configure_console_logging(logging.DEBUG)

    # Run on the good pump first
    # Get stuff in
    print('Read Anomaly Sample data in')
    df_i = pd.read_csv('./data/AzureAnomalysample.csv', index_col=False, parse_dates=['timestamp'])

    df_i['entity'] = 'MyRoom'
    df_i[Temperature] = df_i['value'] + 20
    df_i = df_i.drop(columns=['value'])

    # and sort it by timestamp
    df_i = df_i.sort_values(by='timestamp')
    df_i = df_i.set_index(['entity', 'timestamp']).dropna()

    for i in range(0, df_i.index.nlevels):
        print(str(df_i.index.get_level_values(i)))

    #####
    print('Use scaling model generated with sklearn 0.21.3')

    print('Compute Saliency Anomaly Score')
    sali = SaliencybasedGeneralizedAnomalyScoreV2(Temperature, 12, True, sal)
    et = sali._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    sali._entity_type = et
    df_i = sali.execute(df=df_i)

    print('Compute FFT Anomaly Score')
    ffti = FFTbasedGeneralizedAnomalyScoreV2(Temperature, 12, True, fft)
    et = ffti._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    ffti._entity_type = et
    df_i = ffti.execute(df=df_i)

    print('Compute K-Means Anomaly Score')
    kmi = KMeansAnomalyScoreV2(Temperature, 12, True, kmeans)
    et = kmi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    kmi._entity_type = et
    df_comp = kmi.execute(df=df_i)

    print("Executed Anomaly functions on sklearn 0.21.3")

    print("Now generate new scalings with recent sklearn")
    db.model_store = FileModelStore('/tmp')

    print('Compute Spectral Anomaly Score')
    spsi = SpectralAnomalyScoreExt(Temperature, 12, spectral, spectralinv)
    et = spsi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    spsi._entity_type = et
    df_i = spsi.execute(df=df_i)

    print('Compute Saliency Anomaly Score')
    sali = SaliencybasedGeneralizedAnomalyScoreV2(Temperature, 12, True, sal)
    et = sali._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    sali._entity_type = et
    df_i = sali.execute(df=df_i)

    print('Compute FFT Anomaly Score')
    ffti = FFTbasedGeneralizedAnomalyScoreV2(Temperature, 12, True, fft)
    et = ffti._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    ffti._entity_type = et
    df_i = ffti.execute(df=df_i)

    print('Compute K-Means Anomaly Score')
    kmi = KMeansAnomalyScoreV2(Temperature, 12, True, kmeans)
    et = kmi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    kmi._entity_type = et
    df_comp = kmi.execute(df=df_i)

    print("Executed Anomaly functions")

    # df_comp.to_csv('./data/AzureAnomalysampleOutputV2.csv')
    df_o = pd.read_csv('./data/AzureAnomalysampleOutputV2.csv')

    # print('Compare Scores - Linf')

    print('Compare Scores R2-score')

    comp2 = {spectral: r2_score(df_o[spectralinv].values, df_comp[spectralinv].values),
             fft: r2_score(df_o[fft].values, df_comp[fft].values),
             sal: r2_score(df_o[sal].values, df_comp[sal].values),
             kmeans: r2_score(df_o[kmeans].values, df_comp[kmeans].values)}

    print(comp2)

    # assert_true(comp2[spectral] > 0.9)
    assert_true(comp2[fft] > 0.9)
    assert_true(comp2[sal] > 0.9)
    # assert_true(comp2[kmeans] > 0.9)

    df_agg = df_i.copy()

    # add frequency to time
    df_agg = df_agg.reset_index().set_index(['timestamp']).asfreq(freq='T')
    df_agg['site'] = 'Munich'
    df_agg = df_agg.reset_index().set_index(['entity', 'timestamp', 'site']).dropna()

    print('Compute Spectral Anomaly Score - aggr')
    spsi = SpectralAnomalyScoreExt(Temperature, 12, spectral, spectralinv)
    et = spsi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    spsi._entity_type = et
    df_agg = spsi.execute(df=df_agg)

    print('Compute K-Means Anomaly Score - aggr')
    kmi = KMeansAnomalyScoreV2(Temperature, 12, True, kmeans)
    et = kmi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    kmi._entity_type = et
    df_agg = kmi.execute(df=df_agg)

    print('Compute Saliency Anomaly Score - aggr')
    sali = SaliencybasedGeneralizedAnomalyScoreV2(Temperature, 12, True, sal)
    et = sali._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    sali._entity_type = et
    df_agg = sali.execute(df=df_agg)

    print('Compute FFT Anomaly Score - aggr')
    ffti = FFTbasedGeneralizedAnomalyScoreV2(Temperature, 12, True, fft)
    et = ffti._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    ffti._entity_type = et
    df_agg = ffti.execute(df=df_agg)

    print(df_agg.describe())

    comp3 = {spectral: r2_score(df_o[spectralinv].values, df_agg[spectralinv].values),
             fft: r2_score(df_o[fft].values, df_agg[fft].values),
             sal: r2_score(df_o[sal].values, df_agg[sal].values),
             kmeans: r2_score(df_o[kmeans].values, df_agg[kmeans].values)}

    print(comp3)

    print("Executed Anomaly functions on aggregation data")

    pass
Beispiel #4
0
def test_vianomaly_score():

    numba_logger = logging.getLogger('numba')
    numba_logger.setLevel(logging.ERROR)

    # Run on the good pump first
    # Get stuff in
    print('Read VI Anomaly sample data in')
    df_i = pd.read_csv('./data/PumpTestData.csv', index_col=False, parse_dates=['evt_timestamp'])
    df_i = df_i.rename(columns={'evt_timestamp': 'timestamp', 'deviceid': 'entity'})

    # and sort it by timestamp
    df_i = df_i.sort_values(by='timestamp')
    df_i = df_i[df_i['entity'] == '04714B601096']   # single entity to reduce test time
    df_i = df_i.set_index(['entity', 'timestamp']).dropna()

    for i in range(0, df_i.index.nlevels):
        print(str(df_i.index.get_level_values(i)))

    EngineLogging.configure_console_logging(logging.DEBUG)

    #####
    print('Create dummy database')
    db_schema=None
    db = DatabaseDummy()
    print (db.model_store)

    #####
    print('Train VIAnomaly model for ' + df_i.index.levels[0].values)
    jobsettings = { 'db': db, '_db_schema': 'public'} #, 'save_trace_to_file' : True}

    # Now run the anomaly functions as if they were executed in a pipeline
    vasi = VIAnomalyScore(['speed'], ['rms_x'])
    #spsi.epochs = 1  # only for testing model storage
    vasi.epochs = 30

    vasi.auto_train = True
    vasi.delete_model = True
    et = vasi._build_entity_type(columns = [Column('MinTemp',Float())], **jobsettings)
    et.name = 'IOT_SHADOW_PUMP_DE_GEN5'

    vasi._entity_type = et
    df_i = vasi.execute(df=df_i)
    #####

    print('VIAnomaly score - inference')

    vasi = VIAnomalyScore(['speed'], ['rms_x'])
    vasi.epochs = 30
    vasi.auto_train = True

    vasi.delete_model = False

    et = vasi._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)
    et.name = 'IOT_SHADOW_PUMP_DE_GEN5'

    vasi._entity_type = et
    df_i = vasi.execute(df=df_i)
    print('VIAnomaly inferencing done')

    pass
def main(argv):
    '''
    You can test functions locally before registering them on the server to
    understand how they work.

    Supply credentials by pasting them from the usage section into the UI.
    Place your credentials in a separate file that you don't check into the repo.
    '''

    import argparse
    parser = argparse.ArgumentParser(
        description=f"Tester for PredictSKLearn iotfunction")
    parser.add_argument(
        'operation',
        type=str,
        help=
        f"Operation to perform. Local test, register function, store the model pickle in COS, test query on entity data, testext, regext: register extend function",
        choices=[
            'test', 'register', 'store', 'query', 'testext', 'regext',
            'unregext'
        ],
        default='test')
    parser.add_argument('-model_file',
                        type=str,
                        help=f"Model file to store",
                        default='sklearn_model.pickle')
    parser.add_argument('-model_path',
                        type=str,
                        help=f"Model path in COS",
                        default='sklearn_model.pickle')
    parser.add_argument('-dependent_variables',
                        type=str,
                        help=f"Columns to pass to predict",
                        default='*')
    parser.add_argument('-predicted_value',
                        type=str,
                        help=f"Name of the [redicted value column]",
                        default='predicted')
    script_utils.add_common_args(parser, argv)
    args = parser.parse_args(argv[1:])

    # logging.basicConfig(level=args.loglevel)
    from iotfunctions.enginelog import EngineLogging
    EngineLogging.configure_console_logging(args.loglevel)

    db, db_schema = script_utils.setup_iotfunc(args.creds_file, args.echo_sql)
    # pprint.pprint(db.credentials)

    import phg_iotfuncs.func_sklearn
    if args.operation == 'test':
        test(db, db_schema, phg_iotfuncs.func_sklearn.PredictSKLearn,
             args.model_path, args.dependent_variables, args.predicted_value)
    elif args.operation == 'register':
        script_utils.registerFunction(db, db_schema,
                                      phg_iotfuncs.func_sklearn.PredictSKLearn)
    elif args.operation == 'store':
        print(
            f"Storing model from {args.model_file} into COS at {args.model_path}"
        )
        with io.open(args.model_file, 'rb') as F:
            import pickle
            model_object = pickle.load(F)
        db.cos_save(model_object, args.model_path, binary=True, serialize=True)
    elif args.operation == 'query':
        print(f"Query another Entity's data")
        entities = db.entity_type_metadata.keys()
        pprint.pprint(db.entity_type_metadata['test_entity_for_AMQPPreload'])
        em = db.entity_type_metadata['test_entity_for_AMQPPreload']
        df = db.read_table(em['metricTableName'],
                           em['schemaName'],
                           parse_dates=None,
                           columns=None,
                           timestamp_col=None,
                           start_ts=None,
                           end_ts=None,
                           entities=None,
                           dimension=None)
        # df=db.read_table(em['metricTableName'],em['schemaName'], parse_dates=None, columns=None, timestamp_col=em['metricTimestampColumn'], start_ts=None, end_ts=None, entities=None, dimension=em['dimensionTableName'])
        print(f"got df {df.shape}")
        print(df.head(2))
        print('.........')
        print(df.tail(2))
    elif args.operation == 'testext':
        testExt(db, db_schema, phg_iotfuncs.func_sklearn.ExtendEntityPreload)
    elif args.operation == 'regext':
        script_utils.registerFunction(
            db, db_schema, phg_iotfuncs.func_sklearn.ExtendEntityPreload)
    elif args.operation == 'unregext':
        rc = db.unregister_functions(['ExtendEntityPreload'])
        print(f"unregistering function rc={rc}")
import json
import logging
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions import bif
from iotfunctions.metadata import EntityType
from iotfunctions.entity import make_sample_entity
from iotfunctions.db import Database
from iotfunctions.enginelog import EngineLogging
from iotfunctions.base import BasePreload
from iotfunctions import ui

EngineLogging.configure_console_logging(logging.WARNING)

# replace with a credentials dictionary or provide a credentials file
with open('../scripts/credentials_as_dev.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())

db = Database(credentials=credentials)
db_schema = None  # set if you are not using the default

entity_1 = make_sample_entity(db=db,
                              schema=db_schema,
                              name='test_10_years',
                              register=True,
                              data_days=3640,
                              freq='1d',
                              entity_count=2,
                              float_cols=3,
                              date_cols=2,
                              string_cols=1,
                              bool_cols=0)
def test_aggregation():

    numba_logger = logging.getLogger('numba')
    numba_logger.setLevel(logging.ERROR)

    # Run on the good pump first
    # Get stuff in
    print('Read Regressor Sample data in')
    df_i = pd.read_csv('./data/RegressionTestData.csv', index_col=False, parse_dates=['DATETIME'])
    df_i = df_i.rename(columns={'DATETIME': 'timestamp'})

    df_i['entity'] = 'MyShop'

    #print(type(df_i['timestamp'][0]))
    df_i = df_i.dropna()

    # make sure timestamp is a datetime (aggregations are very picky about datetime indices)
    df_i['timestamp'] = pd.to_datetime(df_i['timestamp']) #pd.to_datetime(df_rst.index, format="%Y-%m-%d-%H.%M.%S.%f")
    df_i['TEMP_AIR'] = df_i['TEMP_AIR'].astype(float)

    # and sort it by timestamp
    df_i = df_i.sort_values(by='timestamp')
    df_i = df_i.set_index(['entity', 'timestamp']).dropna()


    for i in range(0, df_i.index.nlevels):
        print(str(df_i.index.get_level_values(i)))

    EngineLogging.configure_console_logging(logging.DEBUG)

    #####
    print('Create dummy database')
    db_schema=None
    db = DatabaseDummy()
    print (db.model_store)

    #####

    jobsettings = { 'db': db, '_db_schema': 'public'} #, 'save_trace_to_file' : True}

    # build closure from aggregation class
    func = AggregateWithExpression

    # prepare parameter list for closure
    params_dict = {}
    params_dict['source'] = 'TEMP_AIR'
    params_dict['name'] = 'Temp_diff'
    params_dict['expression'] = 'x.max()-x.min()'

    # replace aggregate call with 'execute_AggregateWithExpression'
    func_name = 'execute_AggregateTimeInState'
    add_simple_aggregator_execute(func, func_name)

    # finally set up closure
    func_clos = getattr(func(**params_dict), func_name)


    # set up an Aggregation thingy with the entity index, timestamp index,
    # desired granularity and a (short) chain of aggregators
    # granularity = frequency, dimension(s), include entity, entity id
    aggobj = Aggregation(None, ids=['entity'], timestamp='timestamp', granularity=('D', None, True, 0),
                    simple_aggregators=[(['TEMP_AIR'], func_clos, 'x.max() - x.min()')])

    print(aggobj)


    et = aggobj._build_entity_type(columns=[Column(Temperature, Float())], **jobsettings)

    df_agg = aggobj.execute(df=df_i)
    df_agg_comp = pd.read_csv('./data/aggregated.csv', index_col=False, parse_dates=['timestamp'])

    assert_true(np.allclose(df_agg['x.max() - x.min()'].values, df_agg_comp['x.max() - x.min()'].values))

    print('Aggregation done', df_agg)

    pass
Beispiel #8
0
from iotfunctions import entity
from iotfunctions import metadata
from iotfunctions.metadata import EntityType
from iotfunctions.db import Database
from iotfunctions.enginelog import EngineLogging
from iotfunctions import estimator
from iotfunctions.ui import (UISingle, UIMultiItem, UIFunctionOutSingle,
                 UISingleItem, UIFunctionOutMulti, UIMulti, UIExpression,
                 UIText, UIStatusFlag, UIParameters)
from mmfunctions.anomaly import (SaliencybasedGeneralizedAnomalyScore, SpectralAnomalyScore,
                 FFTbasedGeneralizedAnomalyScore, KMeansAnomalyScore)
import datetime as dt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LnearRegression
from sklearn import metrics
import scipy as sp
import scipy.fftpack
import skimage as ski
from skimage import util as skiutil # for nifty windowing
import pyod as pyod
from pyod.utils.data import generate_data
from pyod.utils.data import evaluate_print
from pyod.utils.example import visualize
from pyod.models.knn import KNN
from pyod.models.iforest import IForest
%matplotlib inline
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()

EngineLogging.configure_console_logging(logging.INFO)
def main(argv):
    '''
    You can test functions locally before registering them on the server to
    understand how they work.

    Supply credentials by pasting them from the usage section into the UI.
    Place your credentials in a separate file that you don't check into the repo.
    '''
    # Get the IoTFunctions lib path
    import os, sys, argparse
    sys.path.append(os.path.realpath(os.path.join(os.path.dirname(__file__),'..')))

    parser = argparse.ArgumentParser(description=f"Tester for OSIPI iotfunctions")

    script_utils.add_operations(parser,['osi_dbtest','osi_list'])
    script_utils.add_common_args(parser,argv)

    addOSIPiArgs(argv[0],'credentials_osipi',parser)

    args = parser.parse_args(argv[1:])

    # logging.basicConfig(level=args.loglevel)
    EngineLogging.configure_console_logging(args.loglevel)

    db,db_schema=script_utils.setup_iotfunc(args.creds_file,args.echo_sql)
    # pprint.pprint(db.credentials)
    import os

    if args.points:
        from phg_iotfuncs.func_osipi import PhGOSIPIPointsPreload as TargetFunc
        entityName=args.entity_type if args.entity_type else args.entityNamePrefix+TargetFunc.__name__
        if args.operation=='test':
            point_attr_map=script_utils.loadPointsAttrMap(args.point_attr_map_file)
            test(db,db_schema,
                    TargetFunc(args.pihost, args.piport,args.piuser,args.pipass,
                                args.points_name_prefix, point_attr_map, args.date_field,
                                'osipi_preload_ok'))
        elif args.operation=='register':
            script_utils.registerFunction(db,db_schema,TargetFunc)
        elif args.operation=='create':
            if not args.point_attr_map_file or not args.points_name_prefix:
                print(f"-point_attr_map_file and -points_name_prefix must be specified for operation {args.operation}")
                return

            point_attr_map=script_utils.loadPointsAttrMap(args.point_attr_map_file)

            # Create the list of columns
            columns=[script_utils.to_sqlalchemy_column(v[1],v[2] if len(v)>2 else float,args.date_field) for v in point_attr_map.values()]                

            # attributes=list(dict.fromkeys([v[1] for v in point_attr_map.values()]))

            print(f"Creating entity {entityName} with columns {columns} specified in {args.point_attr_map_file}")
            script_utils.createEntity(db,db_schema,entityName,columns,
                    function=TargetFunc,
                    func_input={
                        'osipi_host': args.pihost,
                        'osipi_port': args.piport,
                        'osipi_user': args.piuser,
                        'osipi_pass': args.pipass, 
                        'date_field': args.date_field,
                        'name_filter': args.points_name_prefix,
                        'points_attr_map': point_attr_map
                    },
                    func_output={'osipi_preload_ok':'osipi_preload_ok'})
            # script_utils.createEntity(db,db_schema,entityName,attributes)
        elif args.operation=='osi_list':
            # List all Points defined in the target OSIPi server
            from phg_iotfuncs.osipiutils import listOSIPiPoints
            listOSIPiPoints(args)

    elif args.elements:
        from phg_iotfuncs.func_osipi import PhGOSIElemsPreload as TargetFunc
        entityName=args.entity_type if args.entity_type else args.entityNamePrefix+TargetFunc.__name__
        if args.operation=='test':
            test(db,db_schema,
                    TargetFunc(
                        args.pihost, args.piport,args.piuser,args.pipass,
                        args.date_field,args.parent_element_path,
                        'osipi_elements_preload_ok'))
        elif args.operation=='register':
            script_utils.registerFunction(db,db_schema,TargetFunc)
        elif args.operation=='create':
            # get a data sample to figure out the attributes
            from phg_iotfuncs import osipiutils, func_osipi

            if not args.parent_element_path:
                print(f"-parent_element_path must be specified for operation {args.operation}")
                return

            attrFields=[osipiutils.ATTR_FIELD_VAL,osipiutils.ATTR_FIELD_TS]
            # Fetch the Elements from OSIPi Server.
            elemVals,_=osipiutils.getOSIPiElements(args,args.parent_element_path,attrFields,func_osipi.DEVICE_ATTR)

            # Get into DataFrame table form indexed by timestamp 
            df=osipiutils.convertToEntities(elemVals,args.date_field,func_osipi.DEVICE_ATTR)
            attributes=df.columns
            print(f"Creating entity {entityName} with attributes {attributes}")
            script_utils.createEntity(db,db_schema,entityName,attributes,
                    function=TargetFunc,
                    func_input={
                        'osipi_host': args.pihost,
                        'osipi_port': args.piport,
                        'osipi_user': args.piuser,
                        'osipi_pass': args.pipass, 
                        'date_field': args.date_field,
                        'parent_element_path': args.parent_element_path,
                        'interval': args.interval
                    },
                    func_output={'osipi_elements_preload_ok':'osipi_elements_preload_ok'})

            #script_utils.createEntity(db,db_schema,entityName,attributes)
        elif args.operation=='osi_list':
            # List all Elements defined in the target OSIPi server
            from phg_iotfuncs.osipiutils import listOSIPiElements
            listOSIPiElements(args)
        elif args.operation=='osi_dbtest':
            # get a data sample to figure out the attributes
            import iotfunctions
            from  phg_iotfuncs import iotf_utils, osipiutils, func_osipi

            # Fetch the Elements from OSIPi Server.
            attrFields=[osipiutils.ATTR_FIELD_VAL,osipiutils.ATTR_FIELD_TS]
            elemVals,_=osipiutils.getOSIPiElements(args,args.parent_element_path,attrFields,func_osipi.DEVICE_ATTR)

            # Get into DataFrame table form indexed by timestamp 
            df=osipiutils.convertToEntities(elemVals,args.date_field,func_osipi.DEVICE_ATTR)
            
            entity_type_dict,entity_meta_dict=iotfunctions.metadata.retrieve_entity_type_metadata(_db=db,logical_name=entityName)
            iotf_utils.renameToDBColumns(df,entity_meta_dict)

            iotf_utils.adjustDataFrameColumns(db,entity_meta_dict,df,'OSItest',['date'])

            rc=db.write_frame(df=df, table_name=entity_meta_dict['metricsTableName'])
            print(f"Written {len(df)} rows, rc={rc}")
    else:
        script_utils.common_operation(args,db,db_schema)
def main(argv):
    '''
    You can test functions locally before registering them on the server to
    understand how they work.

    Supply credentials by pasting them from the usage section into the UI.
    Place your credentials in a separate file that you don't check into the repo.
    '''

    import argparse
    parser = argparse.ArgumentParser(
        description=f"Tester for AMQPPreload iotfunction")
    parser.add_argument('operation',
                        type=str,
                        help=f"Operation",
                        choices=['test', 'register', 'constant', 'k'],
                        default='test')
    parser.add_argument('-date',
                        dest='date_field',
                        type=str,
                        help=f"Field containing the event date/tiestamp",
                        default='date')
    parser.add_argument('-device_id', type=str, help=f"Device ID to filter on")
    parser.add_argument('-req',
                        dest='required_fields',
                        type=str,
                        help=f"Fields that are required to retain the record",
                        nargs='*')
    parser.add_argument('-const_name',
                        type=str,
                        help=f"Name of constant",
                        default=None)
    parser.add_argument('-const_value',
                        type=int,
                        help=f"Value for constant",
                        default=None)

    import azure.amqp_receiver as amqp_receiver
    amqp_receiver.add_iothub_args(parser)
    script_utils.add_common_args(parser, argv)
    args = parser.parse_args(argv[1:])

    # logging.basicConfig(level=args.loglevel)
    EngineLogging.configure_console_logging(args.loglevel)

    db, db_schema = script_utils.setup_iotfunc(args.creds_file, args.echo_sql)
    # pprint.pprint(db.credentials)

    from phg_iotfuncs.func_amqp import AMQPPreload as TargetFunc
    if args.operation == 'test':
        access_key = amqp_receiver.adjustArgs(args)
        required_fields = '' if args.required_fields is None else ','.join(
            args.required_fields)
        test(db, db_schema, TargetFunc, args.iot_hub_name, args.policy_name,
             args.consumer_group, args.partition_id, access_key,
             args.device_id, args.date_field, required_fields)
    elif args.operation == 'register':
        script_utils.registerFunction(db, db_schema, TargetFunc)
    elif args.operation == 'constant':
        from phg_iotfuncs import iotf_utils
        pprint.pprint(iotf_utils.getConstant(db, constant_name=None))
        if args.const_name is not None and args.const_value is not None:
            iotf_utils.putConstant(db, args.const_name, args.const_value)
    elif args.operation == 'k':
        from s import iotf_utils
        k_name = args.lastseq_constant
        k_desc = 'PhG Konst'
        try:
            rc = iotf_utils.registerConstant(db, k_name, int, k_desc)
        except:
            pass
        k_val = iotf_utils.getConstant(db, k_name, -1)
        print(f"Got value {k_val}")
        rc = iotf_utils.putConstant(db, k_name, k_val + 1)
        k_newval = iotf_utils.getConstant(db, k_name)
        print(f"Got new value {k_newval}")

    elif args.operation == 'k':
        import iotfunctions.ui
        constants = [
            iotfunctions.ui.UISingle(name='phg_const',
                                     description='PhG Konst',
                                     datatype=int)
        ]
        payload = []
        for c in constants:
            meta = c.to_metadata()
            name = meta['name']
            default = meta.get('value', None)
            del meta['name']
            try:
                del meta['value']
            except KeyError:
                pass
            payload.append({
                'name': name,
                'entityType': None,
                'enabled': True,
                'value': default,
                'metadata': meta
            })
        pprint.pprint(payload)
        rc = db.http_request(object_type='defaultConstants',
                             object_name=None,
                             request="POST",
                             payload=payload,
                             raise_error=True)
        pprint(rc)