Example #1
0
def predict(input, scaler, rt_val, test=False):

    model = keras.models.load_model(PATH + "Model_EB3_2434.h5")
    raw_forecast = model.predict(input)
    to_trans = np.zeros(7)
    to_trans[0] = raw_forecast
    to_trans = to_trans.reshape(1, len(to_trans))
    transform = scaler.inverse_transform(to_trans)
    transform = transform[0][0]
    forecast = transform + rt_val

    if not test:
        idx = pd.Timestamp(
            date.strftime(
                datetime.now(timezone(-timedelta(hours=5))) +
                timedelta(days=1), "%Y-%m-%d"))
        df = pd.DataFrame(forecast, index=[idx], columns=["Gage_Height"])
        client = influxdb.DataFrameClient(host='34.231.230.82',
                                          port=8086,
                                          username="******",
                                          password="******",
                                          database="peko_prediction_results")
        client.write_points(df, "ppred", database="peko_prediction_results")

    else:
        return forecast
Example #2
0
def pull_data_from_influx(your_query,
                          database_name,
                          IP_of_influx="127.0.0.1",
                          port=8086):
    #Initiating the client
    client = influxdb.DataFrameClient(IP_of_influx,
                                      port,
                                      database=database_name)
    #query returns data to dfs_dict
    dfs_dict = client.query(your_query)
    #Finding out the measurement name
    measurement = next(iter(dfs_dict))
    #Extracting the measurement data of use
    ret = dfs_dict[measurement]
    #Storing the measurement data as a pandas dataframe
    df = pd.DataFrame.from_dict(ret)
    #Resetting index
    df = df.reset_index()
    #Renaming index as datetime
    df.rename(columns={'index': 'datetime'}, inplace=True)
    #print(df)
    #Parsing datetime
    df['datetime'] = pd.to_datetime(df['datetime'])
    #Parsing time from datetime
    df['Time'] = df['datetime'].dt.time
    #parsing Time from datetime
    df['date'] = df['datetime'].dt.date
    df['date'] = pd.to_datetime(df['date'])
    #setting index as date
    df = df.set_index(['date'])

    return df
Example #3
0
def influx(sql):
    conn = influxdb.DataFrameClient(**config.INFLUX_CONN_SETTING)
    rows = conn.query(sql)
    if rows:
        rows = rows[config.INFLUX_TABLE_POINT]
    conn.close()
    return rows
Example #4
0
def update_graph_live(n, aggregation_duration, selected_date, aggregation_method):
    client = influxdb.DataFrameClient(database=config.DATABASE)

    start_time = arrow.get(selected_date)
    end_time = start_time + datetime.timedelta(days=1)
    measures = client.query(
        f"""
        select {aggregation_method}({config.INDICATOR}) as {config.INDICATOR} \
            from {config.MEASUREMENT} \
            where time >= '{start_time.isoformat()}' \
            and time <= '{end_time.isoformat()}' \
            group by time({aggregation_duration}m), "{config.TAG}"
        """
    )

    fig = go.Figure(
        layout=dict(legend=dict(orientation="h"), margin=dict(l=20, r=20, t=20, b=20)),
    )

    for (_, ((_, tag_value),)), data in measures.items():
        fig.add_trace(
            go.Scatter(
                x=data.index,
                y=data[config.INDICATOR],
                name=tag_value,
                mode="lines+markers",
            ),
        )

    return fig
Example #5
0
def cli(ctx, verbosity):
    ctx.obj = {}
    ctx.obj['client'] = influxdb.DataFrameClient(host=DEFAULT_HOST,
                                                 port=DEFAULT_PORT,
                                                 database=DEFAULT_DB_NAME)
    logging.basicConfig(
        level=LOGGING_LEVELS[verbosity.upper()],
        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
Example #6
0
 def connect(self):
     """ Setup an Influx client connection """
     self._influxdb = influxdb.DataFrameClient(
         host=self.hostname,
         port=self.port,
         username=self.user,
         password=self.password,
         database=self.database,
         ssl=self.ssl,
         verify_ssl=self.verify_ssl,
     )
Example #7
0
 def __init__(self, db_input=None, usr='******', pwd='root', cloud=False):
     if not cloud:
         host = '192.168.58.71'
         # 公司:192.168.58.71 阿毛:192.168.38.176
     else:
         host = '39.100.15.89'
     self.client = influxdb.DataFrameClient(host=host,
                                            port=8086,
                                            username=usr,
                                            password=pwd,
                                            database=db_input)
Example #8
0
 def __connectInfluxdb(self, InfluxdbSource):
     host = InfluxdbSource['host']
     port = InfluxdbSource['port']
     username = InfluxdbSource['username']
     password = InfluxdbSource['password']
     database = InfluxdbSource['database']
     client = influxdb.DataFrameClient(host=host,
                                       port=port,
                                       username=username,
                                       password=password,
                                       database=database)
     return client
     pass
    def get_days_data(self, vehicle, days_ago, hours_ago, length):
        c = influxdb.DataFrameClient(host='18.237.167.98', port=8086)
        self.client.switch_database("telematics")

        results = self.client.query("select CF_Clu_Odometer,CR_Vcu_DistEmpty_km,CR_Bms_Soc_Pc, CF_Bms_StdCha, CF_Bms_FstCha "
                          "from {} "
                          "where time > now() - {}d - {}h and time < now() - {}d  + {}h ".
                          format(vehicle, days_ago, hours_ago, days_ago, length - hours_ago))
        df = results[vehicle].dropna()

        df_fo = df.loc[(df['CF_Clu_Odometer'] > 1000)]
        # find all spots where battery jumps
        df_fo['soc_plus'] = df_fo['CR_Bms_Soc_Pc'].shift().astype(float) + 2.0
        df_fo['charge_break'] = df_fo['CR_Bms_Soc_Pc'] > df_fo['soc_plus'].shift()
        df_fo['charge_list'] = (df_fo['CF_Bms_StdCha'] > 0) | (df_fo['CF_Bms_FstCha'] > 0)
        return df_fo
Example #10
0
    def __init__(self, pair, router_port=None, sub_port=None):
        super().__init__(pair=pair, router_port=router_port, sub_port=sub_port)

        if not pair.startswith("frx"):
            self.database_name = pair
        else:
            self.database_name = pair.replace('frx', '')

        # Sets the name of hst file name in case we need to fetch
        # from hst archive...
        self.pair_hst_file = self.database_name + '1.hst'

        # sets the client of market pair from where we would get
        # minute data
        self.client = db.DataFrameClient(host='localhost',
                                         port=8086,
                                         database=self.database_name)
Example #11
0
def getDataFromInfluxdb(code,date,database,columns=[]):
    client = influxdb.DataFrameClient(host=INFLUXDBHOST, port=8086, username='******', password='******', database=database)
    measure=code
    b = dtparser.parse(str(date))+ datetime.timedelta(hours=0)
    e = dtparser.parse(str(date)) + datetime.timedelta(hours=24)
    colstr=''
    if len(columns)>0:
        for col in columns:
            colstr=colstr+f""" "{col}", """
        colstr=colstr[:-2]
    else:
        colstr='*'
    query=f""" select {colstr} from "{database}"."autogen"."{measure}" where time >= {int(b.timestamp() * 1000 * 1000 * 1000)} and time < {int(e.timestamp() * 1000 * 1000* 1000)} """
    result=client.query(query)
    if result!={}:
        data=pd.DataFrame(result[measure])
    else:
        data=pd.DataFrame()
    return data
Example #12
0
def loaddata_influxdb(_database_name, _measurement_name, start_time, end_time,
                      car_id):

    client = influxdb.DataFrameClient(host='125.140.110.217',
                                      port=8999,
                                      username='******',
                                      password='******',
                                      database=_database_name)

    _query = 'SELECT DRIVE_SPEED, GPS_LAT, GPS_LONG, time FROM "%s"' % _measurement_name + " WHERE time >= '%s' and time < '%s' and car_id = '%s'" % (
        start_time, end_time, car_id)
    print(_query)
    _query_result = client.query(_query)

    df = _query_result[_measurement_name]
    df = df.reset_index().rename(columns={"index": "RECORD_TIME"})
    print(len(df), 'data loaded')

    return df.to_dict()
Example #13
0
 def saveToInfluxdb(self, code, date, factor, data):
     host = InfluxdbServer['host']
     port = InfluxdbServer['port']
     username = InfluxdbServer['username']
     password = InfluxdbServer['password']
     database = 'MaoTickFactors'
     client = influxdb.DataFrameClient(host=host,
                                       port=port,
                                       username=username,
                                       password=password,
                                       database=database)
     dbs = client.get_list_database()
     if ({'name': database} in dbs) == False:
         client.create_database(database)
     client.write_points(dataframe=data,
                         database=database,
                         measurement=code,
                         tags={},
                         field_columns=list(data.columns),
                         protocol='line')
     pass
Example #14
0
def getdataset(values, spaces, timefilter):
    # Set up a client for InfluxDB
    dbclient = influxdb.DataFrameClient('192.168.1.70', 8086, 'root', 'root',
                                        'sensordata')

    # get data
    if timefilter != "": timefilter = " AND " + timefilter
    data = pandas.DataFrame()
    for value in values:
        for space in spaces:
            rs = dbclient.query('SELECT mean(value) FROM "' + value +
                                '" WHERE "ruimte" = \'' + space + '\'' +
                                timefilter +
                                ' GROUP BY time(5m), "ruimte" fill(linear)')
            datacol = list(rs.values())[0]
            datacol.columns = [space + '-' + value]
            if data.empty:
                data = datacol
            else:
                data = data.join(datacol, how='outer')
    dbclient.close()
    return data
Example #15
0
def update_table(start, end):

    g_start = start
    g_end = end
    r_start = date.strftime(
        pd.Timestamp(start) - pd.Timedelta(days=1), "%Y-%m-%d")
    r_end = date.strftime(pd.Timestamp(end) - pd.Timedelta(days=1), "%Y-%m-%d")
    uapi.collect_data(PATH, "gage", "01480870", g_start, g_end)
    uapi.collect_data(PATH, "rain", "01480870", r_start, r_end)
    uapi.collect_data(PATH, "rain", "01480399", r_start, r_end)

    df1 = uapi.parse_data(PATH + "gage_01480870_%s.txt" % g_end)
    df1.rename(columns={"data": "Gage_Height"}, inplace=True)
    df2 = uapi.parse_data(PATH + "rain_01480870_%s.txt" % r_end)
    df2.rename(columns={"data": "Downingtown"}, inplace=True)
    df3 = uapi.parse_data(PATH + "rain_01480399_%s.txt" % r_end)
    df3.rename(columns={"data": "Wagontown"}, inplace=True)

    client = influxdb.DataFrameClient(host='localhost', port=8086)
    client.write_points(df1, "Gage_Height", database="training_raw")
    client.write_points(df2, "Precip", database="training_raw")
    client.write_points(df3, "Precip", database="training_raw")
Example #16
0
def connect_to_db(database='SKYSPARK'):
    """Function to connect to the database
    
    Args:
        database (string): name of the database to connect to options are 'SKYSPARK' (default) and 'ION'

    Returns:
        client (influxdb-python client object): database connection object \n
        OR \n
        (None): If the database connection failed
    """
    client = influxdb.DataFrameClient(host='206.12.92.81',
                                      port=8086,
                                      username='******',
                                      password='******',
                                      database=database)
    try:
        client.ping()
        print("Successful Connection to " + database + "\n")
        return client
    except:
        print("Failed to Connect to " + database + "\n")
        return None
Example #17
0
    def __init__(self, other, timeframe=None):

        self.market = other.database_name if other else 'EURUSD'

        self.max_period = 100

        # self.monitor = self.timeframes[1]
        self.monitor = 'EURUSD'

        self.client = db.DataFrameClient(database=self.market)

        self.init = self.client.query(
            f"SELECT open,high,low,close from {self.market} \
            LIMIT {self.max_period}").get(self.monitor).dropna()

        self.ema = Indicator('ema', talib.EMA, timeperiod=20)
        self.ema.update(self.init)

        self.ema3 = Indicator('ema3', talib.EMA, timeperiod=50)
        self.ema3.update(self.init)

        self.ema4 = Indicator('ema4', talib.EMA, timeperiod=100)
        self.ema4.update(self.init)
Example #18
0
                    default='127.0.0.1',
                    metavar='HOST',
                    help='Influx host [127.0.0.1]')
parser.add_argument(
    '-u',
    '--unit',
    dest='unit',
    metavar='UNIT',
    default='arb',
    help='Unit of measurement, e.g. volt, degC, ... [def: arb]')
parser.add_argument(metavar='measurement1:column1', dest='mc1')
parser.add_argument(metavar='measurement2:column2', dest='mc2')

args = parser.parse_args()

clt = influxdb.DataFrameClient(host=args.influx_host, port=8086)
clt.switch_database(args.influx_db)

m1, col1 = args.mc1.split(':')
m2, col2 = args.mc2.split(':')

query = f"SELECT {col1} FROM {m1}"
if args.start:
    query += f" WHERE time >= '{args.start}'"
if args.end:
    if args.start:
        query += f" AND time <= '{args.end}'"
    else:
        query += f" WHERE time <= '{args.end}'"

t1, val1 = query_to_np(clt, query)
 def cmc_df_client(self):
     """The client connection to the CoinMarketCap database for daily data"""
     return influxdb.DataFrameClient(
         self.host, self.port, self.user, self.password,
         self.config_info.get_field('CMCDBName', self.setting_name))
 def day_gdax_df_client(self):
     """The client connection to the GDAX database for daily data"""
     return influxdb.DataFrameClient(
         self.host, self.port, self.user, self.password,
         self.config_info.get_field('dailyDBName', self.setting_name))
Example #21
0
# connecting to the database without using the csv files available in the storage bucket
import influxdb
import pandas as pd
import matplotlib.pyplot as plt
import os

client = influxdb.DataFrameClient(host='35.203.94.147',
                                  port=8086,
                                  username='******',
                                  password='******')
#client = influxdb.InfluxDBClient(host='35.203.80.76', port=8086, username='******', password='******')
#client.create_retention_policy(name='2h_policy', duration='1w',default=True, replication="string")

print(client.get_list_database())
client.switch_database('PlaceBonaventure')

result = client.query('SHOW MEASUREMENTS;')  #list tables

os.makedirs("processed_data", exist_ok=True)

if not os.path.exists("processed_data/sensor_value.pt"):
    q = """SELECT * FROM "sensor_value" LIMIT 1000000"""

    res = client.query(q)

    # keep only if pandas Dataframe instance
    keys = [k for k, v in res.items() if not isinstance(v, list)]
    values = [v for _, v in res.items() if not isinstance(v, list)]

    sensor_data = values[0]
 def openDB(self):
     self.Db = influxdb.DataFrameClient(database="raw",
                                        host="localhost:8086")
     return self.Db
Example #23
0
def fetch_missing_data_fill_database(
        pair, port=INTERMED_ROUTER, start=None, end=None):
    """
    There are three main conditions this function fulfills.....

    1. IF NO DATABASE FOR THAT PAIR... It fetches data for a
    particular time range , creates a new database and saves the
    fetched data in that database, then returns fetched data

    2. IF THERE IS A DATABASE.... It checks in database to make sure
    there are no loopholes, if any is found it fetches dat DATA
    and saves the loop data in database, then returns the fetched data

    3. IF ["start" and "end"] VARIABLES do not default to None for
    that pair it fetches for data with regards to that time-range
    of start/end and returns it....

    """

    prefix = pair if pair == 'R_50' else 'frx' + pair
    empty = pd.DataFrame()
    ctx = zmq.Context()
    req = ctx.socket(zmq.REQ)
    req.setsockopt_string(zmq.IDENTITY, f'{pair}1')
    req.connect(f"tcp://localhost:{port}")

    def return_val(*args):
        nonlocal req, empty
        get_dict = populate_history_schema(*args, 'candles')
        req.send_json(get_dict)
        try:
            msg = req.recv_json()
            frame = Parse_History_Candle_To_DataFrame(msg['candles'])
            empty = empty.append(frame)
            print(msg)
        except json.JSONDecodeError:
            print(
                """Error decoding response sent from router,
                closing sockets and exiting""")
            raise Exception("Websocket is not responding...check why")

    def loop_and_fetch(gen):
        while True:
            try:
                n = next(gen)
                return_val(*n, prefix)
            except StopIteration:
                if len(empty) == 0:
                    raise Exception("Empty frame returned.....")
                break

    # CONDITION 3
    # I set it here to run before CONDITION 1/2
    if (start and end) is not None:
        print("fetching from range values in binary api")
        gen = yield_partition(start, end)
        loop_and_fetch(gen)
        return empty

    # CONDITION 1
    try:
        client = db.DataFrameClient(host='localhost', port=8086, database=pair)
        empty = empty.append(
            client.query(f'select * from {pair}')[f'{pair}']
        )
        missing = return_missing_timestamps(empty)
        gen = return_stamp_interval(np.array(missing, dtype=np.int64))
        loop_and_fetch(gen)
        print("fetched from database")

    # CONDITION 2
    except InfluxDBClientError:

        now = pd.Timestamp.utcnow()
        prev = now - pd.Timedelta(days=10)
        # prev = now - pd.Timedelta(weeks=6)
        i, e = int(prev.timestamp()), int(now.timestamp())
        gen = yield_partition(i, e)
        loop_and_fetch(gen)
        client.create_database(f'{pair}')

    # Code Cleanup and deallocation
    finally:
        client.write_points(empty, f'{pair}', protocol='json')
        client.close()
        req.close()
        ctx.term()

    print("FETCHED DATA SUCCESSFULLY.....")

    return empty
Example #24
0
import influxdb
from config import credentials, measurement

client = influxdb.DataFrameClient(**credentials)
query = ("select * from %s"
         " where time < now() - 90m"
         " and time > now() - 100m"
         " limit 10" % measurement)
df = client.query(query).get(measurement)
print(df.describe())
Example #25
0
    def calibrate(self, request, pk=None):
        if request.method == "POST":
            form = SensorModelForm(request.POST)
            ## TODO: Add validation. Currently problematic because there are issues with the models.
            #if form.is_valid():
            raw_sensors = form.data["raw_sensors"]
            calibration_entity = form.data["calibration_entity"]
            time_range_start = form.data["time_range_start"]
            time_range_end = form.data["time_range_end"]
            measurement = form.data["measurement"]

            db = influxdb.DataFrameClient(
                database="home_assistant",
                host="influxdb",
                username=os.environ['INFLUXDB_USER'],
                password=Path(
                    '/run/secrets/influxdb_password').read_text().strip("\n"))

            for sensor in raw_sensors.split(","):
                # TODO: Move this to flux so that I can maybe use bind_params. It works for queries, but not sub queries.
                results = db.query(
                    f"SELECT first(\"value\") AS \"value\", first(\"calibrated_value\") AS \"calibrated_value\" \
                    FROM (SELECT \"value\" FROM \"raw\" WHERE (entity_id =~ /({ sensor })/) AND time >= { time_range_start }ms and time <= { time_range_end }ms), \
                    (SELECT \"value\" AS \"calibrated_value\" FROM \"{ measurement }\" WHERE entity_id =~ /({ calibration_entity })/ AND time >= { time_range_start }ms and time <= { time_range_end }ms) \
                    GROUP BY time(10s)")

                # Clean up results
                results = pandas.concat(results,
                                        keys=[measurement, "raw"],
                                        axis=1)
                results = results.dropna(axis=0)
                X = results["raw"]
                Y = results[measurement]

                model = linear_model.LinearRegression().fit(
                    X.values.reshape(-1, 1), Y.values.reshape(-1, 1))
                expression = re.compile(
                    r'(?P<type>.+)_(?P<device>\d+)_(?P<sensor>.*)'.format(
                        measurement.lower()))
                s = expression.search(sensor)
                if s:
                    topic = f"seedship/{ s.group('type') }_{ s.group('device') }/{ s.group('sensor').replace('_raw', '_calibration') }"
                    print(
                        f"Linear calibration for { topic }: Slope({ model.coef_[0] }) Intercept({ model.intercept_[0]})"
                    )
                    ViewSensor.send_mqtt(topic, {
                        "slope": model.coef_[0][0],
                        "bias": model.intercept_[0]
                    },
                                         retain=True)

            return HttpResponse(status=204)

        form = SensorModelForm(
            initial={
                "calibration_entity": pk,
                "raw_sensors": request.GET.get("raw_sensors"),
                "time_range_start": request.GET.get("time_range_start"),
                "time_range_end": request.GET.get("time_range_end"),
                "measurement": request.GET.get("measurement"),
            })
        context = {
            'title': "Calibrate Linear Filter",
            'form': form,
        }
        return render(request, 'seedship_gui/gf-form.html', context)
Example #26
0
import shutil
import time
import pandas as pd
import influxdb
from random import randint
from settings.default import *
from influxdb import InfluxDBClient
import datetime
import hex2dec_ble

dbClient = InfluxDBClient(INFLUX_DB_HOST, INFLUX_DB_PORT, INFLUX_DB_USER,
                          INFLUX_DB_PASSWORD, INFLUX_DB_NAME)
batch_count = 10000
influx_pd = influxdb.DataFrameClient(INFLUX_DB_HOST,
                                     INFLUX_DB_PORT,
                                     INFLUX_DB_USER,
                                     INFLUX_DB_PASSWORD,
                                     INFLUX_DB_NAME,
                                     verify_ssl=False)


def move_processed_file(file, type):
    """
    Function to move the processed file to processed folder
    :param file:
    :return:
    """
    logging.info("moving the processed file: %s" % file)
    file_name_array = file.split(".")
    file_name = file_name_array[0]
    file_name = file_name + "_" + type + ".csv"
    shutil.move(SUB_DUMP_DIR + file, PROCESSED_FILES_FOLDER + file_name)
Example #27
0
def main(safety_on=False):

    # Set the working directory.
    os.chdir(WORKING_DIRECTORY)

    # Check for options passed to script
    try:
        opts, args = getopt.getopt(sys.argv[1:], "hf", ["help", "flush"])
    except getopt.GetoptError as err:
        # print help information and exit:
        print(err)  # will print something like "option -a not recognized"
        usage()
        sys.exit(2)

    flush = False
    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit()
        elif o in ('-f', '--flush'):
            flush = True
        else:
            assert False, 'unhandled option'
            usage()
            sys.exit()

    print("\n")
    print("Oh hello there! Let's see if there are new files.")

    # Open connection to InfluxDB
    influxdb_client = influxdb.DataFrameClient(host=INFLUX_HOST,
                                               port=INFLUX_PORT,
                                               username=INFLUX_USER_NAME,
                                               password=INFLUX_PASSWORD,
                                               database=INFLUX_DB_NAME)

    # Check if old files should be flushed.
    if flush and not safety_on:
        flush_old_file_names(SQLITE_LOCATION)
        flush = False
        print("I will flush the old file register first.")

    # Make a dictionary that contains list of the different file types
    old_files = get_names_of_old_files(SQLITE_LOCATION, TYPE_LIST)

    # Load names of all available input files
    input_files = find_input_files(START_DATES, GLOB_PATTERNS)

    input_files = {
        k: (remove_todays_file(v) if k != 'calvin' else v)
        for k, v in input_files.items()
    }

    # Extract new files
    new_files = extract_new_files_only(input_files, old_files)

    # Only load the first x elements of each file list.
    # Remove the other names from the new_file dictionaries.
    export = {}

    for i in new_files.keys():
        export[i] = new_files[i][0:CHUNK_SIZE].copy()
        new_files[i] = new_files[i][CHUNK_SIZE:]

    # Declare curried function:
    load_chamber_data = load_data(export, TAGS['base'])
    # This is the step where we load the actual data into Python.
    raw_data = {
        i: load_chamber_data(i)
        for i in new_files.keys() if i != 'calvin'
    }

    # Make Calvin dataframe
    raw_data['calvin'] = list(
        map(lambda x: load_calvin_file(x, CALVIN_HEADER), export['calvin']))
    raw_data['calvin'] = pd.concat(raw_data['calvin'])

    # Sort dictionary elements by key names
    raw_data = dict(sorted(raw_data.items()))

    # Write chamber data to InfluxDB.
    for i in raw_data.keys():
        if i not in 'calvin':
            if not len(raw_data[i].index) == 0:
                influxdb_client.write_points(dataframe=raw_data[i],
                                             measurement=('raw_' + i),
                                             tag_columns=None,
                                             batch_size=1000,
                                             protocol='line')
        else:  # If there is data from Calvin write to InfluxDB.
            if not len(raw_data['calvin'].index) == 0:
                influxdb_client.write_points(dataframe=raw_data['calvin'],
                                             measurement='raw_calvin',
                                             tag_columns=TAGS['calvin'],
                                             batch_size=1000,
                                             protocol='line')

    # Remove last item from new_files['calvin']
    if len(new_files['calvin']) == 0:
        export['calvin'] = export['calvin'][:-1]

    # Write the names of the files that were loaded into InfluxDB to the
    # local database.
    add_names_to_old_files(SQLITE_LOCATION, export)

    # Run function recursively if there still elements in the new file list
    if any([not x.empty for x in new_files.values()]):
        main(safety_on=True)

    return
Example #28
0
import influxdb
import pandas as pd

c = influxdb.DataFrameClient()
c.create_database("icinga2")
c.switch_database("icinga2")
df = pd.read_excel("Disk Space.xlsx")
df['time'] = df.index
df.index = pd.to_datetime(df.index)
c.write_points(df, "disk")
Example #29
0
file_location = MODEL_OUTPUT_FILE_LOCATION  #Set default value for file location

#Then check with user-input for different file location
prompt = "input path + filename or leave blank for default of " + MODEL_OUTPUT_FILE_LOCATION
filename_input = input(prompt)
if len(filename_input) > 0:
    file_location = filename_input

username = input("input username:"******"input password:"******"Can not connect to InfluxDB. Is your network connection ok?")

#Load the output from the model into a dataframe
output = pd.read_csv(file_location)
if 'uniqueID' not in output.columns:
    output.rename(columns={'uniqueId': 'uniqueID'}, inplace=True)
if len(output['uniqueID'].unique()) < len(output):
    raise Exception(