Ejemplo n.º 1
0
def get_df_client(config: dict):
    """Creates a pandas dataframe client based on the passed config dictionary.

    :param config: (mandatory, dict) the loaded configuration.
    :return: InfluxDBClient
    """

    cfg_db = config['influxdb']

    # check for optional port
    if 'port' in cfg_db.keys():
        port = cfg_db['port']
    else:
        port = 8086

    # create influx db client
    dbclient = DataFrameClient(host=cfg_db['host'],
                               port=port,
                               username=cfg_db['user'],
                               password=cfg_db['password'])

    # make sure the data base exists (if database exists a new will not be created)
    dbclient.query(f"CREATE DATABASE {cfg_db['database']}")

    # select the wanted database
    dbclient.switch_database(cfg_db['database'])

    return dbclient
Ejemplo n.º 2
0
class TelegrafLogger(StreamHandler):
    def __init__(self):
        StreamHandler.__init__(self)

        with open('keys/influx_settings.json') as json_file:
            data = json.load(json_file)

            self.client = DataFrameClient(host=data.get("host"),
                                          port=data.get("port"),
                                          username=data.get("username"),
                                          password=data.get("password"))
            self.client.switch_database(data.get("database"))

    def emit(self, record):
        # initialize list of lists
        data = [[
            record.getMessage(), record.lineno, record.funcName,
            record.exc_info
        ]]
        df = pd.DataFrame(
            data,
            columns=['log_message', 'lineno', 'funcName', 'exc_info'],
            index=[datetime.utcnow()])
        self.client.write_points(measurement="camp_export",
                                 dataframe=df,
                                 tags={"log_level": record.levelname})
Ejemplo n.º 3
0
    def make_dataset(capteur_list, text_input_start, text_input_end,
                     nom_capteur, L_influx):

        client = DataFrameClient(host=L_influx[3],
                                 port=L_influx[4],
                                 username=L_influx[1],
                                 password=L_influx[2])
        client.switch_database(L_influx[0])

        src = pd.DataFrame()

        for elt in capteur_list:
            requete = "SELECT " + '"' + elt + '"' + " FROM measure WHERE time >= " + "'" + text_input_start + "'" + " AND " + "time <= " + "'" + text_input_end + "'" + " AND " + "ID=" + "'" + nom_capteur + "'"
            datasets = client.query(requete)
            df = pd.DataFrame(datasets['measure'])
            df = df.rename_axis('Date')
            df.index = df.index.astype('datetime64[ns]')
            src = pd.concat([df, src], axis=1)

        client.close()
        src['Date'] = src.index
        Index = [i for i in range(0, df.shape[0])]
        src.index = Index

        cols = src.columns.tolist()
        cols = cols[-1:] + cols[:-1]
        src = src[cols]

        return (src)
Ejemplo n.º 4
0
def water(request):
    chart_type = None
    graph = None
    client = DataFrameClient(host='localhost', port=8086)
    client.switch_database('NOAA_water_database')
    f = client.query("Select * from h2o_pH")
    df = pd.concat(f)
    df = df.reset_index(level=[0, 1])
    #df = pd.DataFrame(Air.objects.all().values(),columns=["date", "Carbon_Monoxide", "NOx", "Ozone","Particulate_Matter","SOx"])
    #df=df.set_index('date')
    if request.method == 'POST':

        chart_type = request.POST['sales']
        date_from = request.POST['date_from']
        date_to = request.POST['date_to']
        df['level_1'] = df['level_1'].apply(lambda x: x.strftime('%Y-%m-%d'))
        df2 = df.groupby('level_1', as_index=False)['pH'].agg('sum')
        if chart_type != "":
            if date_from != "" and date_to != "":
                df = df[(df['level_1'] > date_from)
                        & (df['level_1'] < date_to)]
                df2 = df.groupby('level_1', as_index=False)['pH'].agg('sum')
                graph = get_simple_plot(chart_type,
                                        x=df2['level_1'],
                                        y=df2['pH'],
                                        data=df)
    context = {
        'graph': graph,
    }
    #print(df2)
    return render(request, 'Water.html', context)
Ejemplo n.º 5
0
def load():
    df = pd.read_csv('GHI_DHI_Temp_Wind_20130101_english_units.csv',
                     skiprows=1)
    df.index = pd.to_datetime(df['DATE (MM/DD/YYYY)'] + ' ' + df['MST'],
                              format='%m/%d/%Y %H:%M')
    df.columns = [
        u'DATE (MM/DD/YYYY)', u'MST',
        u'AtmosphericAnalogKind_irradanceGlobalHorizontal',
        u'AtmosphericAnalogKind_irradanceDirectNormal',
        u'AtmosphericAnalogKind_irradanceDiffuseHorizontal',
        u'AtmosphericAnalogKind_ambientTemperature',
        u'AtmosphericAnalogKind_humidity', u'AtmosphericAnalogKind_speed',
        u'AtmosphericAnalogKind_bearing'
    ]
    dbname = 'proven'

    protocol = 'json'

    client = DataFrameClient(host='localhost', port=8086)

    #    print("Delete database: " + dbname)
    #    client.drop_database(dbname)

    print("Create pandas DataFrame")

    print("Create database: " + dbname)
    #    client.drop_database(dbname)
    client.create_database(dbname)
    dbs = client.get_list_database()
    print(dbs)
    client.switch_database(dbname)

    # print("Write DataFrame")
    client.write_points(df.loc['2013-7-1':'2013-7-31'],
                        'weather',
                        protocol=protocol)
    client.write_points(df.loc['2013-8-1':'2013-8-31'],
                        'weather',
                        protocol=protocol)
    client.write_points(df.loc['2013-9-1':'2013-9-30'],
                        'weather',
                        protocol=protocol)

    print("Write DataFrame with Tags")
    # client.write_points(df, 'demo',
    #                     {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)

    print("Read DataFrame")
Ejemplo n.º 6
0
def write_influx():
    from influxdb import DataFrameClient
    client = DataFrameClient(host='localhost', port=8086)
    client.create_database('df')
    client.switch_database('df')
    test = [{
        "measurement": "m1",
        "tags": {
            "freq": "1Min"
        },
        "time": "2009-11-10T23:00:00Z",
        "fields": {
            "mvalue1": 0.64,
            "mvalue2": 3.12
        }
    }, {
        "measurement": "m1",
        "tags": {
            "freq": "10Min"
        },
        "time": "2009-11-10T22:00:00Z",
        "fields": {
            "mvalue1": 0.62,
            "mvalue2": 3.11
        }
    }, {
        "measurement": "m2",
        "tags": {
            "freq": "1Min"
        },
        "time": "2009-11-10T22:00:00Z",
        "fields": {
            "mvalue1": 0.66,
            "mvalue1": 3.11
        }
    }]
    client.write_points(test)
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'),
                      columns=['0'])
    client.write_points(df, 'df', {'k1': 'v1', 'k2': 'v2'}, protocol='line')
Ejemplo n.º 7
0
class INSERTDATA:
    def __init__(self):
        host = 'r4-influxdb.ricplt'
        self.client = DataFrameClient(host, '8086', 'root', 'root')
        self.dropdb('UEData')
        self.createdb('UEData')

    def createdb(self, dbname):
        print("Create database: " + dbname)
        self.client.create_database(dbname)
        self.client.switch_database(dbname)

    def dropdb(self, dbname):
        print("DROP database: " + dbname)
        self.client.drop_database(dbname)

    def dropmeas(self, measname):
        print("DROP MEASUREMENT: " + measname)
        self.client.query('DROP MEASUREMENT ' + measname)
Ejemplo n.º 8
0
class InfluxDB_Helper:
    def __init__(self, host, port):
        self.client = InfluxDBClient(host, port)
        self.df_client = DataFrameClient(host, port)

    def delete_db(self, db_name):

        print("Already exists: " + db_name)
        self.client.switch_database(db_name)
        print("Switched to db: " + db_name)
        self.client.drop_database(db_name)
        print("Dropped db: " + db_name)

    def create_db(self, db_name):
        dbList = self.client.get_list_database()
        dbArr = []
        for db in dbList:
            dbArr.append(db['name'])

        if db_name in dbArr:
            print("Already exists: " + db_name)
        else:
            self.client.create_database(db_name)
            print("Created db: " + db_name)

    def write_df(self, df, db_name, meas_name, time_precision='ms', tags=None):
        self.df_client.switch_database(db_name)
        self.df_client.write_points(df,
                                    meas_name,
                                    time_precision=time_precision,
                                    tags=tags)

    def get_db_meas_df(self, db_name, meas_name):
        self.df_client.switch_database(db_name)
        query = 'select * from "{0}"."autogen"."{1}";'.format(
            db_name, meas_name)
        result = self.df_client.query(query)
        return result[meas_name]

    def get_client(self, df=False):
        if df:
            return self.df_client
        return self.client
Ejemplo n.º 9
0
df_final = df_filter.copy()

print('Data QC complete!')

x = input('Do you want to write to database? (y/n): ').upper()

if x == 'Y':

    # WritePoints
    print('Connecting to database...')
    clientdf = DataFrameClient(host='odm2equipment.uwrl.usu.edu',
                               port=8086,
                               username='******',
                               password='******')
    clientdf.switch_database('ciws_final')
    print('Writing points...')
    clientdf.write_points(dataframe=df_final,
                          measurement='DD',
                          field_columns={
                              'hotInFlowRate': df_final[['hotInFlowRate']],
                              'coldInFlowRate': df_final[['coldInFlowRate']],
                              'hotOutFlowRate': df_final[['hotOutFlowRate']],
                              'hotInTemp': df_final[['hotInTemp']],
                              'coldInTemp': df_final[['coldInTemp']],
                              'hotOutTemp': df_final[['hotOutTemp']],
                          },
                          tag_columns={'buildingID': df_final[['buildingID']]},
                          protocol='line',
                          numeric_precision=10,
                          batch_size=2000)
Ejemplo n.º 10
0
                             database=args.database,
                             pool_size=1)

    logging.getLogger(__name__).info("Updating database with arguments: " +
                                     str(args))

    if args.drop:
        client.drop_database(args.database)

    if args.database not in [d['name'] for d in client.get_list_database()]:
        client.create_database(args.database)
        client.query(
            "ALTER RETENTION POLICY autogen ON cache DURATION INF REPLICATION 1 SHARD DURATION 2600w DEFAULT"
        )

    client.switch_database(args.database)

    with IQFeedHistoryProvider(num_connections=args.iqfeed_conn) as history:
        all_symbols = {
            (s, args.interval_len, args.interval_type)
            for s in set(
                iqutil.get_symbols(symbols_file=args.symbols_file).keys())
        }
        update_to_latest(
            client=client,
            noncache_provider=noncache_provider(history),
            new_symbols=all_symbols,
            time_delta_back=relativedelta(years=args.delta_back),
            skip_if_older_than=relativedelta(days=args.skip_if_older)
            if args.skip_if_older is not None else None)
Ejemplo n.º 11
0
# Input parameters.
dbType = input("Input DB type: ").upper()
testType = input("Input test type (upload/dl):  ")
dataLength = input("Input data length: ")
testNum = int(input("Input # of trials: "))

measurement='flow'

try:
    client = DataFrameClient(host='192.168.212.133', port=8086)
    print('connection successful!')

except:
    print('connection unsuccessful :(')

client.switch_database('ciws')
query = 'DROP MEASUREMENT "flow"'

testResults = testResults(testNum)
testData = ['B','C','D','E','F']






# Loop through test for number of trials specified in input.
print('testing '+dbType+' db...\n')
for i in range(testNum):
    print('Trial #',i)
Ejemplo n.º 12
0
plt.style.use( "ggplot" )

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

seed = 3
np.random.seed( seed )
set_random_seed( seed )

client = DataFrameClient( host = "database", port = 8086 )
client.create_database( "tecpro" )

plt.style.use( "ggplot" )

client.write_points( df, "bees", database = "tecpro" )

client.switch_database( "tecpro" )

query = """
	select ActivityLevel
	from bees
	order by time
"""

dfn = client.query( query )

x = np.array( dfn[ "bees" ].ActivityLevel.values )



dataset = x
def analyse_db(db_name, timeframe="12h", host="localhost", port=8086):
    logging.info("Analysing temperature database: " + db_name)
    client = DataFrameClient(host, port)
    logging.debug("InfluxDB dataframe client created")

    # db_name = "99-TEST-v99"
    db_list = client.get_list_database()

    check_name = {"name": db_name}
    if check_name in db_list:
        client.switch_database(db_name)
        logging.debug("using database " + db_name)

    else:
        logging.critical("Can't find database: " + db_name)
        exit(-1)

    logging.info(f"Analysing last {timeframe}")
    query = "select * from temperature where time >= now() - " + timeframe
    logging.debug("Running query: " + query)

    # run the query and load the result set into a dataframe
    rs = client.query(query)
    df = pd.DataFrame(rs['temperature'])

    # convert time index to NZ timezone
    df.index = df.index.tz_convert('Pacific/Auckland')

    logging.info("===========================")
    logging.info("Ambient temperature data")
    logging.info("---------------------------")
    logging.debug(
        f"Got {df['ambient_temp'].count():d} ambient temp records...")

    logging.info(
        f"min ambient = {df['ambient_temp'].min():.2f} at {df['ambient_temp'].idxmin():%Y-%m-%d %H:%M}"
    )
    logging.info(
        f"max ambient = {df['ambient_temp'].max():.2f} at {df['ambient_temp'].idxmax():%Y-%m-%d %H:%M}"
    )
    logging.info(f"average ambient = {df['ambient_temp'].mean():.2f}")
    logging.info(f"std dev ambient = {df['ambient_temp'].std():.2f}")

    logging.info("===========================")
    logging.info("Fermenter temperature data")
    logging.info("---------------------------")
    logging.debug(
        f"Got {df['fermenter_temp'].count():d} fermenter temp records...")

    logging.info(
        f"min fermenter = {df['fermenter_temp'].min():.2f} at {df['fermenter_temp'].idxmin():%Y-%m-%d %H:%M}"
    )
    logging.info(
        f"max fermenter = {df['fermenter_temp'].max():.2f} at {df['fermenter_temp'].idxmax():%Y-%m-%d %H:%M}"
    )
    logging.info(f"average fermenter = {df['fermenter_temp'].mean():.2f}")
    logging.info(f"std dev fermenter = {df['fermenter_temp'].std():.2f}")

    # calculate zscore to identify outliers
    temps = df['fermenter_temp']  # this is a Series
    zscores = stats.zscore(temps)
    abs_zscores = np.abs(zscores)

    outliers = (abs_zscores < 3).groupby(
        level=0).all()  # this gives us a Series with True/False values
    # logging.debug(outliers)

    new_df = df[
        outliers]  # don't really understand how this works but it removes the False values (i.e. outliers)
    logging.debug(
        f"After removing outliers we now have {new_df['fermenter_temp'].count():d} records"
    )

    logging.info("===========================")
    logging.info("Updated fermenter temperature data")
    logging.info("---------------------------")
    logging.debug(
        f"Got {new_df['fermenter_temp'].count():d} fermenter temp records...")

    logging.info(
        f"min fermenter = {new_df['fermenter_temp'].min():.2f} at {new_df['fermenter_temp'].idxmin():%Y-%m-%d %H:%M}"
    )
    logging.info(
        f"max fermenter = {new_df['fermenter_temp'].max():.2f} at {new_df['fermenter_temp'].idxmax():%Y-%m-%d %H:%M}"
    )
    logging.info(f"average fermenter = {new_df['fermenter_temp'].mean():.2f}")
    logging.info(f"std dev fermenter = {new_df['fermenter_temp'].std():.2f}")

    logging.info("===========================")

    # Calculate the heat start lag
    logging.info("Calculate lag after heating starts")
    logging.info("==================================")

    # find heat start times
    query = "select fermenter_temp, change_action from temperature where change_action='START HEATING' and time >= now() - " + timeframe
    logging.debug("Running query: " + query)
    rs = client.query(query)
    df = pd.DataFrame(rs['temperature'])
    df.index = df.index.tz_convert('Pacific/Auckland')
    logging.info(f"Found {df['change_action'].count():d} instances")

    logging.debug(df)

    lag_list = []
    for index, row in df.iterrows():
        heat_stop_temp = row['fermenter_temp']
        logging.debug(f"Heat stop temp = {heat_stop_temp:.2f}")

        # find the minimum temp over the next 10 mins after heating starts
        time0 = rfc3339.rfc3339(index)
        time1 = rfc3339.rfc3339(index + timedelta(minutes=10))

        query = "select min(fermenter_temp) from temperature where '" + time0 + "' <= time and time <= '" + time1 + "'"
        rs1 = client.query(query)
        df1 = pd.DataFrame(rs1['temperature'])
        # df1.index = df1.index.tz_convert('Pacific/Auckland')
        max_temp_after_heat_stop = df1.iloc[0][
            'min']  # get the first & only value in the min column
        logging.debug(
            f"Min temp after heat start = {max_temp_after_heat_stop:.2f}")

        heat_stop_lag = abs(heat_stop_temp - max_temp_after_heat_stop)
        logging.info(f"Heat start lag = {heat_stop_lag:.2f}")
        lag_list.append(heat_stop_lag)

    # logging.debug(lag_list)
    lag_mean = mean(lag_list)
    logging.info(f"Average heat start lag = {lag_mean:.3f} C")

    # Calculate the heat stop lag
    logging.info("Calculate lag after heating stops")
    logging.info("=================================")

    # find heat stop times
    query = "select fermenter_temp, change_action from temperature where change_action='STOP HEATING' and time >= now() - " + timeframe
    logging.debug("Running query: " + query)
    rs = client.query(query)
    df = pd.DataFrame(rs['temperature'])
    df.index = df.index.tz_convert('Pacific/Auckland')
    logging.info(f"Found {df['change_action'].count():d} instances")

    logging.debug(df)

    lag_list = []
    for index, row in df.iterrows():
        heat_stop_temp = row['fermenter_temp']
        logging.debug(f"Heat stop temp = {heat_stop_temp:.2f}")

        # find the minimum temp over the next 10 mins after heating stops
        time0 = rfc3339.rfc3339(index)
        time1 = rfc3339.rfc3339(index + timedelta(minutes=10))

        query = "select max(fermenter_temp) from temperature where '" + time0 + "' <= time and time <= '" + time1 + "'"
        rs1 = client.query(query)
        df1 = pd.DataFrame(rs1['temperature'])
        # df1.index = df1.index.tz_convert('Pacific/Auckland')
        max_temp_after_heat_stop = df1.iloc[0][
            'max']  # get the first & only value in the min column
        logging.debug(
            f"Max temp after heat stop = {max_temp_after_heat_stop:.2f}")

        heat_stop_lag = abs(heat_stop_temp - max_temp_after_heat_stop)
        logging.info(f"Heat stop lag = {heat_stop_lag:.2f}")
        lag_list.append(heat_stop_lag)

    # logging.debug(lag_list)
    lag_mean = mean(lag_list)
    logging.info(f"Average heat stop lag = {lag_mean:.3f} C")
Ejemplo n.º 14
0
    mydb = Set.infl_mydb
    path = Set.sr_path
    timeshift = Set.sr_timeshift
    blocks = Set.sr_blocks
    sequence = Set.sr_sequence
    #    client = InfluxDBClient(host=host, port=port,
    #                            username=username, password=password)
    client = DataFrameClient(host=Set.infl_host,
                             port=Set.infl_port,
                             username=Set.infl_username,
                             password=Set.infl_password)

    listdb = client.get_list_database()
    listdb = [i['name'] for i in listdb]
    #    print(listdb)
    if mydb not in listdb:
        print('В influxdb нет БД {}'.format(mydb))
    else:
        client.switch_database(mydb)
    make_report()
    # Запускаем пеиодическое выполнление функции
#    schedule.every().minute.at(":17").do(make_report)
#
#    while True:
#        schedule.run_pending()
#        time.sleep(1)

#    import pdb
#    pdb.set_trace()
Ejemplo n.º 15
0
    plt.tight_layout(pad=5, w_pad=2, h_pad=2.5)
    fig2.show()
    plt.show()

    x = input('Do you want to write to database? (y/n): ').upper()

    if x == 'Y':

        # WritePoints
        print('Connecting to database...')
        clientdf = DataFrameClient(host=host,
                                   port=port,
                                   username=username,
                                   password=password)
        clientdf.switch_database(database)
        print('Writing points...')
        clientdf.write_points(
            dataframe=df_final,
            measurement=measurement,
            field_columns={
                'hotInFlowRate': df_final[['hotInFlowRate']],
                'coldInFlowRate': df_final[['coldInFlowRate']],
                'hotOutFlowRate': df_final[['hotOutFlowRate']],
                'hotInTemp': df_final[['hotInTemp']],
                'coldInTemp': df_final[['coldInTemp']],
                'hotOutTemp': df_final[['hotOutTemp']]
            },
            tag_columns={'buildingID': df_final[['buildingID']]},
            protocol='line',
            numeric_precision=10,
Ejemplo n.º 16
0
bldg = input("bldg ID: ").upper()
source = input("'hotIN', 'coldIN', or 'hotRETURN': ")

print('\nlocating file...')


# Used for testing connection.
#path="/Users/joseph/Desktop/GRA/InfluxSemesterProject/"
#file = "test.csv"

path="/Users/joseph/Desktop/GRA/InfluxSemesterProject/LLC_BLDG_"+bldg+"/"
file = source + "_LLC_BLDG_"+bldg+"_OCT-4-NOV-13_Testdata.csv"

print('\nfile located, building dataframe...')

csvReader = pd.read_csv(path+file, sep=',', index_col=0, parse_dates=True, infer_datetime_format=True)

print('\ndataframe complete, connecting to influxDB...')

client = DataFrameClient(host='influxdbubuntu.bluezone.usu.edu', port=8086)
client.switch_database('LLC_FlowData')

print('\nconnection established, uploading to influxdb...')

client.write_points(csvReader, 'flow', {'buildingID':bldg, 'source':source}, batch_size=2000, protocol='line')

print('\n\nDONE!')


Ejemplo n.º 17
0
from datetime import datetime, timedelta
import numpy as np
from influxdb import InfluxDBClient, DataFrameClient
import pandas as pd
import os
import re
import requests

# release Date: 17-04-99
__version__ = 0.4

client = InfluxDBClient(host='localhost', port=8086)
clientdf = DataFrameClient(host='localhost', port=8086)

client.switch_database('dbname')
clientdf.switch_database('dbname')


def create_switch_database(dbname):
    client.create_database(dbname)
    client.switch_database(dbname)
    clientdf.switch_database(dbname)


def datetime_from_str(str):
    return datetime.strptime(str, "%Y-%m-%dT%H:%M:%S.%fZ")


def time2str(time):
    return time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
Ejemplo n.º 18
0
#!/home/nichotelo/influx-env/bin/python
# coding: utf-8

from influxdb import DataFrameClient
import pandas as pd

host = '127.0.0.1'
port = 8086
user = '******'
password = '******'
dbname = 'sensors'

client = DataFrameClient(host=u'localhost',
                         port=8086,
                         username=u'admin',
                         password=u'adminpassword')
client.switch_database(dbname)

q = 'select * from "esp32/rssi/json"'

result = client.query(q)
df = result['rssi1'].head()

print(df)
def do_dataframes(host="localhost", port=8086):
    client = DataFrameClient(host, port)
    print("influxdb dataframe client created")

    dbname = "99-TEST-v99"
    db_list = client.get_list_database()

    check_name = {"name": dbname}
    if check_name in db_list:
        client.switch_database(dbname)
        print("using database " + dbname)

    else:
        print("can't find database: " + dbname)
        exit(-1)

    query = "select * from temperature"  # where change_action='START HEATING'"
    print("running query: " + query)

    rs = client.query(query)
    # print("resultset is...")
    # print(rs)
    # print("keys are... ", rs.keys())
    # print("temperature values...")
    # print(rs['temperature'])

    # load the result set into a dataframe
    df = pd.DataFrame(rs['temperature'])
    # convert time index to NZ timezone
    df.index = df.index.tz_convert('Pacific/Auckland')

    # print("dataframe is...")
    # print(df)

    print("from ", df['ambient_temp'].count(), " records...")
    print("min ambient = ", df['ambient_temp'].min())
    print("average ambient = ", df['ambient_temp'].mean())

    # print(df['fermemter_temp'].count(), "records")
    # temps = df['ambient_temp']  # this is a Series
    # print("ambient temp std dev =", temps.std())
    # for i in temps:
    #     if not np.isnan(i):
    #         print(i)

    # copy the fermeMter values to the fermeNter column
    print("fixing the fermenter temp data")
    for index, row in df.iterrows():
        if not np.isnan(row['fermemter_temp']):
            df.at[index, 'fermenter_temp'] = row['fermemter_temp']

    print("from", df['fermenter_temp'].count(), "records")
    print("min fermenter temp = ", df['fermenter_temp'].min(), "at",
          df['fermenter_temp'].idxmin())
    print("removing this value")
    df = df.drop(index=df['fermenter_temp'].idxmin())
    print("min fermenter temp = ", df['fermenter_temp'].min(), "at",
          df['fermenter_temp'].idxmin())
    print("average fermenter temp = ", df['fermenter_temp'].mean())
    print("max fermenter temp = ", df['fermenter_temp'].max(), "at",
          df['fermenter_temp'].idxmax())
    temps = df['fermenter_temp']  # this is a Series
    print("fermenter temp std dev =", temps.std())
Ejemplo n.º 20
0
POREnd = "'2019-04-19T12:00:00Z'"

print('\nConnecting to database...')
client = InfluxDBClient(host='odm2equipment.uwrl.usu.edu', port=8086, username='******',password='******')
client.switch_database('ciws')

print('Establishing connection to '+dbType+' database...')
# Connect to destination db
try:
    client_testDB = DataFrameClient(host='192.168.212.133', port=8086)
    print('connection successful!')

except:
    print('connection unsuccessful :(')

client_testDB.switch_database('ciws_por')


testResults = testResultsUL(testNum)
testData = ['B','C','D','E','F']


# Upload data by buildingID
print('testing '+dbType+' db...\n')
for x in testData:
    # Read in CSV file, start timer
    bldgID = "'" + x + "'"
    query = """SELECT * FROM "flow" WHERE "buildingID" =""" + bldgID + """ AND time >= """ + PORStart + """ AND time <= """ + POREnd + """"""

    print('Retrieving data...')
    # Convert returned ResultSet to Pandas dataframe with list
Ejemplo n.º 21
0
    def make_dataset(capteur_list, text_input_start, text_input_end,
                     nom_capteur, L_influx):

        xs = []
        ys = []
        colors = []
        labels = []

        client = DataFrameClient(host=L_influx[3],
                                 port=L_influx[4],
                                 username=L_influx[1],
                                 password=L_influx[2])
        client.switch_database(L_influx[0])

        #Granularity to avoid to display too much points on the figure
        end = datetime.strptime(text_input_end, "%Y-%m-%d %H:%M:%S")
        start = datetime.strptime(text_input_start, "%Y-%m-%d %H:%M:%S")
        ecartSecondes = (end - start).total_seconds()

        if ecartSecondes < 86401:
            groupby = None
        elif ecartSecondes > 86401 and ecartSecondes < 5000000:
            groupby = '1m'
        elif ecartSecondes > 5000000 and ecartSecondes < 77000000:
            groupby = '15m'
        else:
            groupby = '1h'

        #print(ecartSecondes, groupby)

        #Construction of vectors x and y for each register to be displayed
        for elt in capteur_list:

            if groupby == None:
                requete = "SELECT " + '"' + elt + '"' + " FROM measure WHERE time >= " + "'" + text_input_start + "'" + " AND " + "time <= " + "'" + text_input_end + "'" + " AND " + "ID=" + "'" + nom_capteur + "'"
                datasets = client.query(requete)
                df = pd.DataFrame(datasets['measure'])
                df = df.rename_axis('Date')
                df.index = df.index.astype('datetime64[ns]')

                client.close()

                #Convert data in list of lists
                a = []
                b = []
                for i in range(df.shape[0]):
                    a.append(df[elt][i])
                    b.append(df.index[i])
                xs.append(a)
                ys.append(b)

                colors.append(capteur_color[capteur_list.index(elt)])

                labels.append(elt)

            else:
                requete = "SELECT MEAN(" + '"' + elt + '"' + ") FROM measure WHERE time >= " + "'" + text_input_start + "'" + " AND " + "time <= " + "'" + text_input_end + "'" + " AND " + "ID=" + "'" + nom_capteur + "'" + " GROUP BY time(" + groupby + ") fill(0)"
                datasets = client.query(requete)
                df = pd.DataFrame(datasets['measure'])
                df = df.rename_axis('Date')
                df.index = df.index.astype('datetime64[ns]')

                client.close()

                #Conversion des données en liste de liste
                a = []
                b = []
                for i in range(df.shape[0]):
                    a.append(df['mean'][i])
                    b.append(df.index[i])
                xs.append(a)
                ys.append(b)

                colors.append(capteur_color[capteur_list.index(elt)])

                labels.append(elt)

        #Construction of the source of the figure
        new_src = ColumnDataSource(data={
            'x': xs,
            'y': ys,
            'color': colors,
            'label': labels
        })

        return new_src