def main(argv):
    sys.path.append(
        os.path.realpath(os.path.join(os.path.dirname(__file__), '..')))

    credPath = os.path.join(os.path.dirname(__file__),
                            f"credentials_as_{os.environ['USERNAME']}.json")
    print(f"Loading credentials from {credPath}")
    with io.open(credPath, encoding='utf-8') as F:
        credentials = json.loads(F.read())
    db_schema = None
    db = Database(credentials=credentials)

    from goodvibrations.predictStatus import PredictCondition
    print(f"Registering function")
    db.unregister_functions(["PredictCondition"])
    try:
        db.register_functions([PredictCondition])
    except Exception as exc:
        print(exc)

    fn = PredictCondition(condition='predStatus')
    df = fn.execute_local_test(db=db,
                               db_schema=db_schema,
                               generate_days=1,
                               to_csv=True)
    print(df)
    """
    def __init__(self,
                 dummy_input=None,
                 output_item='broken_preload',
                 **parameters):
        super().__init__(dummy_items=[], output_item=output_item)
        self.dummy_input = dummy_input

    def execute(self, df, start_ts=None, end_ts=None, entities=None):

        return True


try:

    db.register_functions([NonPackage])

except BaseException as e:

    print('Registration failed as expected: %s' % e)

else:

    raise RuntimeError(
        'Function is not in a package, function should have failed')
'''

Register a function with no build ui

'''
Beispiel #3
0
                            data_item='is_generated'), **{
                                '_timestamp': 'evt_timestamp',
                                '_db_schema': db_schema
                            })

# dimension columns
dimension_columns = []
dimension_columns.append(Column('business', String(50)))
dimension_columns.append(Column('site', String(50)))
dimension_columns.append(Column('equipment_type', String(50)))
dimension_columns.append(Column('train', String(50)))
dimension_columns.append(Column('service', String(50)))
dimension_columns.append(Column('asset_id', String(50)))

entity.register(raise_error=False)
db.register_functions([TurbineHTTPPreload])

df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())

meta = db.get_entity_type(entityType)
jobsettings = {
    '_production_mode': False,
    '_start_ts_override': dt.datetime.utcnow() - dt.timedelta(days=10),
    '_end_ts_override':
    (dt.datetime.utcnow() -
     dt.timedelta(days=1)),  # .strftime('%Y-%m-%d %H:%M:%S'),
    '_db_schema': 'BLUADMIN',
    'save_trace_to_file': True
}
import datetime as dt
import json
import pandas as pd
import numpy as np
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions.base import BaseTransformer
from iotfunctions.metadata import EntityType
from iotfunctions.db import Database
from iotfunctions import ui

with open('credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)

from custom.multiply_by_factor import MultiplyByFactor

db.register_functions([MultiplyByFunction])
Beispiel #5
0
# if there is a 2nd argument do not register but exit
if (len(sys.argv) > 1):
    sys.exit()

EngineLogging.configure_console_logging(logging.DEBUG)

#with open('credentials_as_dev.json', encoding='utf-8') as F:
#    credentials = json.loads(F.read())

#fn = AggregateItemStats(
#        input_item_1='x1',
#        input_item_2='x2',
#        output_item='y')

#df = fn.execute_local_test(generate_days=1,to_csv=True)
#print(df)

#cols = [
#    Column('string_1', String(255))
#        ]

#df = fn.execute_local_test(generate_days = 1,to_csv=True,
#                           columns = cols)

#db.register_functions([functions.AggregateItemStats])

db.register_module(functions)

db.register_functions([anomaly.SpectralAnomalyScore])
# fncsv.execute_local_test(db=db,db_schema=db_schema)
# db.register_functions([phg_iotfuncs.functions.CSVDataSource])

####################################################################################
fncsv = phg_iotfuncs.functions.CSVPreload(
    # csv_file = 'extract_CaCO3_cont1.csv',
    csv_file='*.csv',
    rebaseTS=True,
    output_item='loaded')

try:
    fncsv.execute_local_test(db=db, db_schema=db_schema)
except Exception as exc:
    print(f"Exception {exc}")
    import traceback
    traceback.print_exc()
    pass
db.register_functions([phg_iotfuncs.functions.CSVPreload])

####################################################################################
# fnpre = phg_iotfuncs.functions.HTTPPreload(url='internal_test',request='GET',output_item='loaded')
# fnpre.execute_local_test(db=db,db_schema=db_schema)
# db.register_functions([phg_iotfuncs.functions.HTTPPreload])

####################################################################################
# fnts=phg_iotfuncs.functions.MergeSampleTimeSeries(input_items=['temp', 'pressure', 'velocity'],
#                             output_items=['temp', 'pressure', 'velocity'])
# db.register_functions([phg_iotfuncs.functions.MergeSampleTimeSeries])

# db.register_functions([HelloWorldPhG,CSVDataSource,HTTPPreload,MergeSampleTimeSeries])
        return df
    
    @classmethod
    def build_ui(cls):
        #define arguments that behave as function inputs
        inputs = []
        inputs.append(ui.UIMultiItem(
                name = 'input_items',
                datatype=float,
                description = "Data items adjust",
                output_item = 'output_items',
                is_output_datatype_derived = True)
                      )        
        inputs.append(ui.UISingle(
                name = 'factor',
                datatype=float)
                      )
        outputs = []
        return (inputs,outputs)          
    
'''

from iotfunctions.sample import MultiplyByFactor

adj = MultiplyByFactor(input_items=['speed', 'travel_time'],
                       factor=0.9,
                       output_items=['adjusted_speed', 'adjusted_travel_time'])
adj.execute_local_test()

db.register_functions(adj)
Beispiel #8
0
import datetime as dt
import json
import pandas as pd
import numpy as np
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions.base import BaseTransformer
from iotfunctions.metadata import EntityType
from iotfunctions.db import Database
from iotfunctions import ui

with open('credentials.json', encoding='utf-8') as F:
  credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)


from custom.functions import MultiplyByFator
db.register_functions([MultiplyByFator])


from custom.functions import MyCustomFunction
db.register_functions([MyCustomFunction])
Beispiel #9
0
# Paste contents in credentials_as.json file
# Save in scripts
'''
'''
1. Create a database object to access Watson IOT Platform Analytics DB.
'''
schema = 'bluadmin'  #  set if you are not using the default
with open('./scripts/credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db = Database(credentials=credentials)
'''
2. Register custom function
You must unregister_functions if you change the method signature or required inputs
'''
db.unregister_functions(['SampleDimensionPreload_SS'])
db.register_functions([SampleDimensionPreload_SS])
'''
3. To do anything with IoT Platform Analytics, you will need one or more entity type.
This example assumes that the entity to which we are adding dimensions already exists
We add the custom function to this entity type to test it locally
'''
entity_name = 'issue_637_blank'
entity_type = db.get_entity_type(name=entity_name)

# get dimension table name - to add dimension values to
try:
    dim_table_name = (
        entity_type.get_attributes_dict()['_dimension_table_name']).lower()
except:
    dim_table_name = entity_name + '_dimension'
    bif.PythonExpression(
        expression='df["energy_value"]*df["energy_compare_percent"]',
        output_name='volume'), **{
            '_timestamp': 'evt_timestamp',
            '_db_schema': db_schema
        })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the Add Data to Entity Function UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([BIAssetHTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()
'''
view entity data
'''
df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())
    Column('deviceid', String(50)),
    Column("torque", Integer()),
    Column("acc", Integer()),
    Column("load", Integer()),
    Column("tool_type", Integer()),
    Column("speed", Float()),
    Column("travel_time", Float()),
    **{
        '_timestamp': 'evt_timestamp',
        # '_production_mode': False,
        '_db_schema': db_schema
    })

# db.unregister_functions(["InvokeExternalModel"])
# exit()
db.register_functions([InvokeWMLModel])
# exit()
print("Function registered")

entity.register(raise_error=False)
print("Entity registered")

# generate data and set anomaly_score to zeros
print("Generating sample data")
entity.generate_data(days=2.0, drop_existing=True)
# df = db.read_table(table_name=entity_name, schema=db_schema)

# empty_columns = ["anomaly_score"]
# print("Setting " + empty_columns.join(" ") "columns to zeros ")
# df[empty_columns] = np.zeros(len(df))
# db.write_frame(df, table_name=entity_name, schema=db_schema, if_exists="replace")
import json
from iotfunctions.db import Database

with open('credentials.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())

db_schema = None

db = Database(credentials=credentials)

from functions.bad_functions import XXXXX
db.register_functions([XXXXX])
Beispiel #13
0
                    url='internal_test',
                    output_item='http_preload_done'),
    bif.PythonExpression(expression='df["Temperature"]*df["Pressure"]',
                         output_name='Volume'), **{
                             '_timestamp': 'evt_timestamp',
                             '_db_schema': db_schema
                         })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data
After creating an EntityType you will need to register it so that it visible in the UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([DemoHTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use 'test_local_pipeline'.
A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()
'''
view entity data
'''

df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())
    bif.PythonExpression(expression='df["energy_value"]*df["energy_value"]',
                         output_name='volume'),
    **{
        '_timestamp': 'evt_timestamp',
        '_db_schema': db_schema
    })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the Add Data to Entity Function UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([MaximoAssetHTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()
'''
view entity data
'''
# can get entity metadata with following
#
# TODO, for some reason this returns data but also throws exception.
# Paste contents in credentials_as.json file
# Save in scripts
'''
'''
1. Create a database object to access Watson IOT Platform Analytics DB.
'''
schema = 'bluadmin'  #  set if you are not using the default
with open('./scripts/credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db = Database(credentials=credentials)
'''
2. Register custom function
You must unregister_functions if you change the method signature or required inputs
'''
db.unregister_functions(['SampleDimensionPreload_preset'])
db.register_functions([SampleDimensionPreload_preset])
'''
3. To do anything with IoT Platform Analytics, you will need one or more entity type.
This example assumes that the entity to which we are adding dimensions already exists
We add the custom function to this entity type to test it locally
'''
entity_name = 'issue_455_blank_preset'
entity_type = db.get_entity_type(name=entity_name)

# get dimension table name - to add dimension values to
try:
    dim_table_name = (
        entity_type.get_attributes_dict()['_dimension_table_name']).lower()
except:
    dim_table_name = entity_name + '_dimension'
Beispiel #16
0
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)
'''
Import and instantiate the functions to be tested 

The local test will generate data instead of using server data.
By default it will assume that the input data items are numeric.

Required data items will be inferred from the function inputs.

The function below executes an expression involving a column called x1
The local test function will generate data dataframe containing the column x1

By default test results are written to a file named df_test_entity_for_<function_name>
This file will be written to the working directory.

'''

from custom.functions import HelloWorldGG

fn = HelloWorldGG(name='Giri',
                  token='23537570-defd-4876-abde-68be94d9e26e',
                  output_col='greeting_output')
fn.execute_local_test(db=db, db_schema=db_schema)
'''
Register function so that you can see it in the UI
'''

db.register_functions([HelloWorldGG])
Beispiel #17
0
import datetime as dt
import json
import pandas as pd
import numpy as np
from sqlalchemy import Column, Integer, String, Float, DateTime, Boolean, func
from iotfunctions.base import BaseTransformer
from iotfunctions.metadata import EntityType
from iotfunctions.db import Database
from iotfunctions import ui

with open('credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)

from customSG.multiplybyfactorSG import MultiplyByFactorSG

db.register_functions([MultiplyByFactorSG])
from poc.functions import State_Timer

from iotfunctions.db import Database

from iotfunctions.enginelog import EngineLogging
EngineLogging.configure_console_logging(logging.DEBUG)

import datetime as dt

import sys
import pandas as pd
import numpy as np

logger = logging.getLogger(__name__)

with open('./credentials_Monitor-Demo2.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())

#with open('../Monitor-Demo-Credentials.json', encoding='utf-8') as F:
#    credentials = json.loads(F.read())

db = Database(credentials=credentials)
db_schema = None  #  set if you are not using the default

#db.unregister_functions(["State_Timer"])
logger.debug("Function  unregistered")
# exit()ßß
db.register_functions([State_Timer])
# exit()
#print("Function registered ")
Beispiel #19
0
if (len(sys.argv) <= 1) or (sys.argv[1] != 'test'):
    db = Database(credentials=credentials)
    print(db.cos_load)


# if in test mode call execute()
ais = anomaly.SpectralAnomalyScore('Val', windowsize=12, output_item='zscore')
kis = anomaly.KMeansAnomalyScore('Val', windowsize=4, output_item='kscore')

print("Instantiated")

# if there is a 2nd argument do not register but exit
if (len(sys.argv) > 1):
    sys.exit()

EngineLogging.configure_console_logging(logging.DEBUG)

# with open('credentials_as_dev.json', encoding='utf-8') as F:
#     credentials = json.loads(F.read())

#db.register_module(functions)

#db.register_functions([anomaly.SpectralAnomalyScore])
#db.register_functions([anomaly.KMeansAnomalyScore])
#db.register_functions([anomaly.NoDataAnomalyScoreNew])
#db.register_functions([anomaly.SimpleAnomaly])
#db.register_functions([anomaly.SimpleRegressor])
#db.register_functions([anomaly.GeneralizedAnomalyScore2])
#db.register_functions([anomaly.FFTbasedGeneralizedAnomalyScore2])
db.register_functions([anomaly.SaliencybasedGeneralizedAnomalyScore])
# Paste contents in credentials_as.json file
# Save in scripts
'''
'''
1. Create a database object to access Watson IOT Platform Analytics DB.
'''
schema = 'bluadmin'  #  set if you are not using the default
with open('./scripts/credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db = Database(credentials=credentials)
'''
2. Register custom function
You must unregister_functions if you change the method signature or required inputs
'''
db.unregister_functions(['SampleDimensionPreload_SS'])
db.register_functions([SampleDimensionPreload_random])
'''
3. To do anything with IoT Platform Analytics, you will need one or more entity type.
This example assumes that the entity to which we are adding dimensions already exists
We add the custom function to this entity type to test it locally
'''
entity_name = 'issue_455_blank_random'
entity_type = db.get_entity_type(name=entity_name)

# get dimension table name - to add dimension values to
try:
    dim_table_name = (
        entity_type.get_attributes_dict()['_dimension_table_name']).lower()
except:
    dim_table_name = entity_name + '_dimension'
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
print("Entity registered")

# entity.generate_data(days=0.5, drop_existing=True)
# print("Data generated")

df = db.read_table(table_name=entity_name, schema=db_schema)
print("Table head")
print(df.head())
print("Table columns")
print(list(df.columns))

db.register_functions([InvokeExternalModel])
print("Function registered")
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

# entity.exec_local_pipeline()
start = dt.datetime.utcnow() - dt.timedelta(days=7)
end = dt.datetime.utcnow() - dt.timedelta(days=1)
print("Starting pipeline")
entity.exec_local_pipeline()  #start_ts=start, end_ts=end)
print("Completed pipeline")
Beispiel #22
0
import json
from iotfunctions.db import Database

with open('credentials_as.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)

# from rawkintrevo_tutorial2.divbyfactor import RawkintrevosDivByFactor
from rawkintrevo_tutorial2.LegitRequest import ExternalModel

db.unregister_functions(["ExternalModel"])
db.register_functions([ExternalModel])

import rawkintrevo_tutorial2
db.register_module(rawkintrevo_tutorial2)

import rawkintrevo_tutorial2.LegitRequest
db.register_module(rawkintrevo_tutorial2.LegitRequest)
Beispiel #23
0
                       output_item='http_preload_done'),
    bif.PythonExpression(expression='df["temp"]*df["pressure"]',
                         output_name='volume'), **{
                             '_timestamp': 'evt_timestamp',
                             '_db_schema': db_schema
                         })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([sample.HTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use 'test_local_pipeline'.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.

'''

entity.exec_local_pipeline()
'''
view entity data
'''

df = db.read_table(table_name=entity_name, schema=db_schema)
import json
from iotfunctions.db import Database

with open('credentials.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())

db_schema = None

db = Database(credentials=credentials)

from custom.functions import PrevDayHourlyAvgDiff, PrevDayHourlyAvgPercentDiff, PrevDayDailyAvgPercentDiff
db.register_functions([
    PrevDayHourlyAvgDiff, PrevDayHourlyAvgPercentDiff,
    PrevDayDailyAvgPercentDiff
],
                      url='git+https://github.com/rweidinger/functions@')

# fn = PrevDayHourlyAvgPercentDiff(
#     input_item='GallonsRemaining',
#     output_item='GallonsRemaining_PctChg')
#
# import datetime as dt
#
# today = dt.datetime.today()
# print('Right now it is:', today)
# yesterday = today - dt.timedelta(days=1)
# yesterday_date = yesterday.date()
# yesterday_hour = yesterday.hour
#
# df = fn.execute_local_test(generate_days=3, to_csv=True)
# print(df.loc[(df['deviceid'] == '73001') &
The automated test assumes that data items are numeric. You can also
specify datatypes by passing a list of SQL Alchemy column objects.

'''

cols = [Column('string_1', String(255))]

df = fn.execute_local_test(generate_days=1, to_csv=True, columns=cols)
'''
Custom functions must be registered in the AS function catalog before
you can use them. To register a function:
    
'''

db.register_functions([MultiplyTwoItems])
'''
After registration has completed successfully the function is available for
use in the AS UI.

The register_functions() method allows you to register more than one function
at a time. You can also register a whole module file with all of its functions.


'''

from iotfunctions import bif

db.register_module(bif)
'''
Beispiel #26
0
with open('credentials_as_dev.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)
'''
Import and instantiate the functions to be tested

The local test will generate data instead of using server data.
By default it will assume that the input data items are numeric.

Required data items will be inferred from the function inputs.

The function below executes an expression involving a column called x1
The local test function will generate data dataframe containing the column x1

By default test results are written to a file named df_test_entity_for_<function_name>
This file will be written to the working directory.

'''

from AICustom.AIModelDesign import AIModelDesign

fn = AIModelDesign(name='AS_Tester', greeting_col='greeting')
fn.execute_local_test(db=db, db_schema=db_schema)
'''
Register function so that you can see it in the UI
'''

db.register_functions([AIModelDesign])
Beispiel #27
0
#!/user/bin/env python3
import json
import logging
from iotfunctions.db import Database
from iotfunctions.enginelog import EngineLogging

EngineLogging.configure_console_logging(logging.DEBUG)

with open('../dev_resources/credentials_as_dev.json', encoding='utf-8') as F:
    #with open('../dev_resources/cognio.json', encoding='utf-8') as F:
    credentials = json.loads(F.read())
db_schema = None
db = Database(credentials=credentials)

#from custom.forecast import Cognio_NeuralNetwork_Forecaster
from custom.functions import SS_HelloWorld

db.register_functions([SS_HelloWorld])
Beispiel #28
0
    # bif.PythonExpression(expression='df["energy_value"]*df["energy_value"]',
    bif.PythonExpression(expression='df["temperature"]', output_name='F'),
    **{
        '_timestamp': 'evt_timestamp',
        '_db_schema': db_schema
    })
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the Add Data to Entity Function UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([MaximoAssetHTTP])
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()
'''
view entity data
'''
# can get entity metadata with following
#
# TODO, for some reason this returns data but also throws exception.
                    **{
                      '_timestamp' : 'evt_timestamp',
                      '_db_schema' : db_schema
})


'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the Add Data to Entity Function UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
db.register_functions([MaximoAssetHTTP])

'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline()

'''
view entity data
'''
# can get entity metadata with following
                      Column('FUNCTION', String(50)), **{'schema': db_schema})

entity_dimension = entity.get_attributes_dict()['_dimension_table_name']
'''
When creating an EntityType object you will need to specify the name of the entity, the database
object that will contain entity data

After creating an EntityType you will need to register it so that it visible in the Add Data to Entity Function UI.
To also register the functions and constants associated with the entity type, specify
'publish_kpis' = True.
'''
entity.register(raise_error=False)
# When creating a custom preload function you can register it by uncommenting the following lines
# You must unregister_functions if you change the method signature or required inputs.
db.unregister_functions(['Issue455HTTPPreload'])
db.register_functions([Issue455HTTPPreload])
'''
To test the execution of kpi calculations defined for the entity type locally
use this function.

A local test will not update the server job log or write kpi data to the AS data
lake. Instead kpi data is written to the local filesystem in csv form.
'''

entity.exec_local_pipeline(**{'_production_mode': False})
'''
view entity data
'''
print("Read Table of new entity")
df = db.read_table(table_name=entity_name, schema=db_schema)
print(df.head())