Ejemplo n.º 1
0
class CInflux:
    def __init__(self, dbinfo, dbname):
        self.dbname = dbname
        self.l2_dbname = "%s_l2" % self.dbname
        self.client = InfluxDBClient(dbinfo['host'], dbinfo['port'],
                                     dbinfo['user'], dbinfo['password'],
                                     self.l2_dbname)
        self.df_client = DataFrameClient(dbinfo['host'], dbinfo['port'],
                                         dbinfo['user'], dbinfo['password'],
                                         self.dbname)

    def get(self):
        return self.df_client.query("select * from %s" % self.dbname)

    def get_newset_row(self):
        return self.df_client.query("select last(*) from %s" % self.dbname)

    def set(self, df):
        return self.df_client.write_points(df, self.dbname, protocol='json')

    def create(self):
        self.df_client.create_database(self.dbname)

    def delete(self):
        self.df_client.drop_database(self.dbname)
Ejemplo n.º 2
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'demo'
    protocol = 'line'

    client = DataFrameClient(host, port, user, password, dbname)

    print('Create pandas DataFrame')
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'),
                      columns=['0'])

    print('Create database: ' + dbname)
    client.create_database(dbname)

    print('Write DataFrame')
    client.write_points(df, 'demo', protocol=protocol)

    print('Write DataFrame with Tags')
    client.write_points(df,
                        'demo', {
                            'k1': 'v1',
                            'k2': 'v2'
                        },
                        protocol=protocol)

    print('Read DataFrame')
    client.query('select * from demo')

    print('Delete database: ' + dbname)
    client.drop_database(dbname)
Ejemplo n.º 3
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'demo'
    # Temporarily used to avoid line protocol time conversion issues #412, #426, #431.
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df,
                        'demo', {
                            'k1': 'v1',
                            'k2': 'v2'
                        },
                        protocol=protocol)

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
def main(host='localhost', port=8086):
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    # Temporarily avoid line protocol time conversion issues #412, #426, #431.
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30, freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo',
                        {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
Ejemplo n.º 5
0
def create_db_add_points_example(num_points, batch_size):
    client = DataFrameClient(host, port, user, password, test_db)
    x = pd.DatetimeIndex(start=pd.Timestamp("2010-01-01"), periods=num_points, freq="S")
    y = pd.DataFrame(index=x, data=pd.np.random.randn(num_points, 1))
    client.create_database(test_db)
    client.write_points(y, "perf-test", batch_size=batch_size, protocol='line')
    client.drop_database(test_db)
Ejemplo n.º 6
0
def main(host='ec2-18-204-6-116.compute-1.amazonaws.com', port=8086):
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    protocol = 'line'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(
        data=list(range(30)),
        index=pd.date_range(start='2014-11-16',
        periods=30, freq='H'), columns=['0']
    )

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo',
                        {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
Ejemplo n.º 7
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo')

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'})

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
Ejemplo n.º 8
0
class CInflux:
    def __init__(self, dbinfo, dbname, iredis=create_redis_obj()):
        self.redis = iredis
        self.dbname = dbname
        self.df_client = DataFrameClient(dbinfo['host'],
                                         dbinfo['port'],
                                         dbinfo['user'],
                                         dbinfo['password'],
                                         self.dbname,
                                         timeout=10)

    def __del__(self):
        self.redis = None
        self.df_client = None

    def get_all_databases(self):
        if self.redis.exists(ALL_IN_DATABASES):
            return set(
                str(dbname, encoding="utf8")
                for dbname in self.redis.smembers(ALL_IN_DATABASES))
        else:
            all_dbs = self._get_all_databses()
            for _db in all_dbs:
                self.redis.sadd(ALL_IN_DATABASES, _db)
            return all_dbs

    def _get_all_databses(self):
        return [x['name'] for x in self.df_client.get_list_database()]

    def get(self, dbname=None):
        if dbname is None: dbname = self.dbname
        return self.df_client.query("select * from %s" % dbname)

    def get_newset_row(self, dbname=None):
        if dbname is None: dbname = self.dbname
        return self.df_client.query("select last(*) from %s" % dbname)

    def set(self, df, dbname=None):
        dbname = dbname if dbname is not None else self.dbname
        try:
            self.df_client.write_points(df, dbname, protocol='json')
            return True
        except InfluxDBServerError as e:
            logger.error(e)
            return False

    def create(self, dbname=None):
        if dbname is None: dbname = self.dbname
        if dbname in self.get_all_databases(): return True
        self.df_client.create_database(dbname)
        self.redis.sadd(ALL_IN_DATABASES, dbname)
        return True

    def delete(self, dbname=None):
        if dbname is None: dbname = self.dbname
        if dbname not in self.get_all_databases(): return True
        self.df_client.drop_database(dbname)
        self.redis.srem(ALL_IN_DATABASES, dbname)
        return True
 def test_drop_numeric_named_database(self):
     """Test drop numeric db for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with requests_mock.Mocker() as m:
         m.register_uri(requests_mock.POST,
                        "http://localhost:8086/query",
                        text='{"results":[{}]}')
         cli.drop_database('123')
         self.assertEqual(m.last_request.qs['q'][0], 'drop database "123"')
Ejemplo n.º 10
0
def main(host='localhost', port=8086):
    logging.basicConfig(level=logging.INFO)
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    logging.info("Create pandas DataFrame")
    today = datetime.datetime.today()
    date_list = get_week_list_by_date(today)
    df = pd.DataFrame(data=list(range(len(date_list))),
                      index=date_list,
                      columns=['W'])
    logging.info("Create database: " + dbname)
    client.create_database(dbname)

    logging.info("Write DataFrame to dsm_power table")
    client.write_points(df.copy(),
                        'dsm_power',
                        tags={'device_id': 'II8583-Z5EKI-N9700'},
                        protocol=protocol)
    logging.info("Write DataFrame to electric_power table")
    client.write_points(df.copy(),
                        'electric_power',
                        tags={'device_id': 'II8583-H9871-78D4F'},
                        protocol=protocol)

    logging.info("origin dataframe: {}".format(df))

    logging.info("Read DataFrame from dsm_power table")
    fetch_df = client.query("select * from dsm_power")['dsm_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("Read DataFrame from electric_power table")
    fetch_df = client.query("select * from electric_power")['electric_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("get data by specfic time range")
    start_date = "2018-06-04"
    end_date = "2018-06-06"
    fetch_df = client.query("select * from dsm_power where time > '" +
                            start_date + "' and time < '" + end_date +
                            "'")['dsm_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("Delete database: " + dbname)
    client.drop_database(dbname)
 def test_drop_numeric_named_database(self):
     """Test drop numeric db for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with requests_mock.Mocker() as m:
         m.register_uri(
             requests_mock.POST,
             "http://localhost:8086/query",
             text='{"results":[{}]}'
         )
         cli.drop_database('123')
         self.assertEqual(
             m.last_request.qs['q'][0],
             'drop database "123"'
         )
Ejemplo n.º 12
0
def main(pair=None, logic=None):
    df = DataFrameClient(host='localhost', port=8086)
    database = df.get_list_database()

    lis = [i for x in database for i in x.values() if i in MARKET_PAIRS]

    if logic:
        for x in lis:
            df.drop_database(x)
        return 'Finished'

    elif pair in lis:
        df.drop_database(pair)
        return
Ejemplo n.º 13
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'Stock Data'
    protocol = 'line'
    client = DataFrameClient(host, port, user, password, dbname)
    print('passed DataFrameClient creation')
    start = datetime.datetime(2015, 1, 1)
    end = datetime.datetime(2016, 1, 1)
    df = pdr.DataReader('F', 'iex', start, end)
    df.index = pd.DatetimeIndex(pd.to_datetime(list(df.index)))
    print(type(df.index))
    print('Trying to create database')
    client.create_database(dbname)
    client.write_points(df, 'Stock', protocol=protocol)
    client.query("select * from stock")
    client.drop_database(dbname)
    print('Finished')
Ejemplo n.º 14
0
class DbClient:

    def __init__(self, database=None, host=None, port=None):

        if database is None:
            self.database = configParser['database']['name']
        else:
            self.database = database

        if host is None:
            self.host = configParser.get('database', 'host')
        else:
            self.host = host

        if port is None:
            self.port = configParser.get('database', 'port')
        else:
            self.port = database

        self.client = DataFrameClient(host=self.host, port=self.port, database=self.database)

    def save_to_db(self, df, measurement, tags=None):

        if tags is None:
            print("Write DataFrame")
            self.client.write_points(df, database=self.database, measurement=measurement, protocol='json')
        else:
            print("Write DataFrame with Tags")
            self.client.write_points(df, database=self.database, measurement=measurement, tags=tags, protocol='json')

    def fetch_from_db(self, query):
        print("Read DataFrame")
        return self.client.query(query)

    def create_db(self):
        self.client.create_database('crypto_analyzer')

    def drop_db(self):
        self.client.drop_database(self.database)

    def is_existing(self):
        result = self.client.get_list_database()
        return result is not None or len(result) > 0
Ejemplo n.º 15
0
class InfluxTSDB:
    def __init__(self,
                 dbhost='localhost',
                 dbport=8086,
                 dbuser='******',
                 dbpassword='******',
                 dbname='test'):
        if dbhost != None:
            self.influxdb = DataFrameClient(dbhost, dbport, dbuser, dbpassword,
                                            dbname)
            self.dbname = dbname

    def drop_db(self):
        try:
            self.influxdb.drop_database(self.dbname)
        except InfluxDBClientError, e:
            if str(e) == "database does not exist!":
                return True
        return False
Ejemplo n.º 16
0
class INSERTDATA:
    def __init__(self):
        host = 'r4-influxdb.ricplt'
        self.client = DataFrameClient(host, '8086', 'root', 'root')
        self.dropdb('UEData')
        self.createdb('UEData')

    def createdb(self, dbname):
        print("Create database: " + dbname)
        self.client.create_database(dbname)
        self.client.switch_database(dbname)

    def dropdb(self, dbname):
        print("DROP database: " + dbname)
        self.client.drop_database(dbname)

    def dropmeas(self, measname):
        print("DROP MEASUREMENT: " + measname)
        self.client.query('DROP MEASUREMENT ' + measname)
Ejemplo n.º 17
0
def test_write_to_db():
    from connector.connector import write_to_db, wait_for_influxdb
    db_host = "influxdb_test"
    db_port = 8086
    db_username = "******"
    db_password = "******"
    db_database = "test"
    #Connects to local InfluxDB
    db_client = DataFrameClient(host=db_host,
                                port=db_port,
                                username=db_username,
                                password=db_password,
                                database=db_database)
    # waits for influxdb service to be active
    wait_for_influxdb(db_client=db_client)
    #Creates local Database
    db_client.create_database('test')
    #Create testing CSV file with one mock up line
    now = datetime.now()
    one_line = str.encode(
        "adc,channel,time_stamp,value\n1,1,{},100".format(now))
    with open("testing.csv", "wb") as csvfile:
        csvfile.write(one_line)
    f = open("testing.csv")
    payload = f.read()
    payload = str.encode(payload)
    write_to_db(payload=payload, db_client=db_client)
    written = db_client.query('SELECT * FROM "measurements"')
    dataframe = written['measurements']
    value = dataframe['mV'][0]
    #Remove mockup CSV file
    os.remove("testing.csv")
    #Deletes mockup DB
    db_client.drop_database('test')
    assert value == 100 * 0.125
    #bug : dataframe.index.values[0] has more precision than np.datetime64(now)
    #assert dataframe.index.values[0] == np.datetime64(now)
class influx_logging():
    def __init__(self, host, port, user, password, dbname):
        self.host = host
        self.port = port
        self.user = '******'
        self.password = '******'
        self.dbname = 'demo'
        self.protocol = 'line'
        self.client = DataFrameClient(self.host, self.port, self.user,
                                      self.password, self.dbname)
        pass

    def main(self, actual_joint_state, encoded_joint_state):
        print("Create database: " + self.dbname)
        self.client.create_database(self.dbname)

        print("Write DataFrame with Tags")
        df = pd.DataFrame(data=list(range(1)),
                          index=pd.date_range(start=datetime.datetime.utcnow(),
                                              periods=1,
                                              freq='H'),
                          columns=['num_data_df'])
        self.client.write_points(df,
                                 'actual_joint_state', {
                                     'joint1': actual_joint_state[0],
                                     'joint2': actual_joint_state[1]
                                 },
                                 protocol=self.protocol)
        self.client.write_points(df,
                                 'encoded_joint_state', {
                                     'joint1': encoded_joint_state[0],
                                     'joint2': encoded_joint_state[1]
                                 },
                                 protocol=self.protocol)
        print("Delete database: " + self.dbname)
        self.client.drop_database(self.dbname)
Ejemplo n.º 19
0
        "location to locally saved symbols file (to prevent downloading it every time)"
    )
    args = parser.parse_args()

    client = DataFrameClient(host=args.host,
                             port=args.port,
                             username=args.user,
                             password=args.password,
                             database=args.database,
                             pool_size=1)

    logging.getLogger(__name__).info("Updating database with arguments: " +
                                     str(args))

    if args.drop:
        client.drop_database(args.database)

    if args.database not in [d['name'] for d in client.get_list_database()]:
        client.create_database(args.database)
        client.query(
            "ALTER RETENTION POLICY autogen ON cache DURATION INF REPLICATION 1 SHARD DURATION 2600w DEFAULT"
        )

    client.switch_database(args.database)

    with IQFeedHistoryProvider(num_connections=args.iqfeed_conn) as history:
        all_symbols = {
            (s, args.interval_len, args.interval_type)
            for s in set(
                iqutil.get_symbols(symbols_file=args.symbols_file).keys())
        }
class InfluxDBConnector(object):
    def __init__(self,
                 username='******',
                 password='******',
                 port=8086,
                 database=None,
                 host='localhost'):
        '''
        :param username: user to connect
        :type username: str
        :param password: password of the user
        :type password: str
        :param port: port to connect to InfluxDB
        :type port: int
        :param database: database name to connect to
        :type database: str
        :param host: hostname to connect to InfluxDB
        :type host: str
        '''
        self.username = username
        self.password = password
        self.port = port
        self.database = database
        self.host = host
        self.client = DataFrameClient(self.host, self.port, self.username,
                                      self.password, self.database)

    def create_database(self, database):
        """Create a new database in InfluxDB.

        :param database: the name of the database to create
        :type database: str
        """
        self.client.create_database(database)

    def delete_database(self, database):
        """Delete a database from InfluxDB.

        :param database: the name of the database to drop
        :type database: str
        """
        self.client.drop_database(database)

    def list_databases(self):
        """Get the list of databases in InfluxDB.

        :returns: all databases in InfluxDB
        :rtype: list of dictionaries
        """
        return self.client.get_list_database()

    def list_measurements(self):
        """Get the list of measurements in database in InfluxDB

        :return:
        """
        return self.client.get_list_measurements()

    def write_points(self,
                     dataframe,
                     measurement,
                     tags=None,
                     tag_columns=None,
                     field_columns=None,
                     time_precision=None,
                     database=None,
                     retention_policy=None,
                     batch_size=None,
                     protocol='line',
                     numeric_precision=None):
        """Write to multiple time series names.

        :param dataframe: data points in a DataFrame
        :param measurement: name of measurement
        :param tags: dictionary of tags, with string key-values
        :param tag_columns: N/A. No description in API or source code?
        :param field_columns: N/A. No description in API or source code?
        :param time_precision: [Optional, default None] Either 's', 'ms', 'u'
            or 'n'.
        :param batch_size: [Optional] Value to write the points in batches
            instead of all at one time. Useful for when doing data dumps from
            one database to another or when doing a massive write operation
        :type batch_size: int
        :param database: the database to write the DataFrame to
        :type database: str
        :param retention_policy: N/A. No description in API or source code?
        :param protocol: Protocol for writing data. Either 'line' or 'json'.
        :type protocol: str
        :param numeric_precision: Precision for floating point values.
            Either None, 'full' or some int, where int is the desired decimal
            precision. 'full' preserves full precision for int and float
            datatypes. Defaults to None, which preserves 14-15 significant
            figures for float and all significant figures for int datatypes.
        :returns: True, if the write operation is successful
        :rtype: bool
        """
        return self.client.write_points(dataframe, measurement, tags,
                                        tag_columns, field_columns,
                                        time_precision, database,
                                        retention_policy, batch_size, protocol,
                                        numeric_precision)

    def query(self,
              query,
              params=None,
              epoch=None,
              expected_response_code=200,
              database=None,
              raise_errors=True,
              chunked=False,
              chunk_size=0,
              dropna=True):
        """Send a query to InfluxDB into a DataFrame

        :param query: the actual query string
        :type query: str
        :param params: additional parameters for the request, defaults to {}
        :param epoch: response timestamps to be in epoch format either 'h',
            'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
            RFC3339 UTC format with nanosecond precision
        :param expected_response_code: the expected status code of response,
            defaults to 200
        :param database: database to query, defaults to None
        :type database: str
        :param raise_errors: Whether or not to raise exceptions when InfluxDB
            returns errors, defaults to True
        :param chunked: Enable to use chunked responses from InfluxDB.
            With ``chunked`` enabled, one ResultSet is returned per chunk
            containing all results within that chunk
        :param chunk_size: Size of each chunk to tell InfluxDB to use.
        :param dropna: drop columns where all values are missing
        :returns: the queried data
        :rtype: :class:`~.ResultSet`
        """
        return self.client.query(query, params, epoch, expected_response_code,
                                 database, raise_errors, chunked, chunk_size,
                                 dropna)
Ejemplo n.º 21
0
class InfluxHelper(object):
    class MissingConnectionException(Exception):
        """Client is not defined or connected"""
        pass

    def __init__(self, *args, **kwargs):
        self.client = None
        self.batch_size = 10000
        self.connection_info = settings.INFLUX_CONNECTION

    def connect_to_dataframe_client(self):
        self.client = DataFrameClient(
            host=self.connection_info['host'],
            port=self.connection_info['port'],
            username=self.connection_info['username'],
            password=self.connection_info['password'],
            database=self.connection_info['database'])

    def recreate_database(self):
        read_user = self.connection_info['read_username']
        database_name = self.connection_info['database']

        if not self.client:
            raise InfluxHelper.MissingConnectionException(
                'InfluxDB client is not connected.')

        self.client.drop_database(database_name)
        self.client.create_database(database_name)
        self.client.grant_privilege('read', database_name, read_user)

    def write_all_sensor_values(self, sensor):
        self.write_sensor_values(sensor, datetime.min)

    def get_series_last_value(self, identifier):
        query_string = 'select last(DataValue), time from {identifier}'.format(
            identifier=identifier)
        result = self.client.query(query_string,
                                   database=self.connection_info['database'])
        if result and len(result) == 1:
            dataframe = result[identifier]  # type: pd.DataFrame
            return dataframe.first_valid_index().to_pydatetime()

    def write_sensor_values(self, sensor, starting_datetime):
        values = TimeSeriesResultValue.objects.filter(value_datetime__gt=starting_datetime, result_id=sensor.result_id)\
            .annotate(DateTime=F('value_datetime'))\
            .annotate(UTCOffset=F('value_datetime_utc_offset'))\
            .annotate(DataValue=F('data_value'))
        values_dataframe = self.prepare_data_values(values)
        if values_dataframe.empty:
            return 0

        result = self.add_dataframe_to_database(values_dataframe,
                                                sensor.influx_identifier)
        del values_dataframe
        return result

    def prepare_data_values(self, values_queryset):
        dataframe = pd.DataFrame.from_records(
            values_queryset.values('DateTime', 'UTCOffset', 'DataValue'))
        if dataframe.empty:
            return dataframe

        dataframe['DateTime'] = pd.to_datetime(dataframe['DateTime'])
        dataframe.set_index(['DateTime'], inplace=True)
        dataframe['DataValue'] = pd.to_numeric(dataframe['DataValue'],
                                               errors='coerce').astype(
                                                   np.float64)
        dataframe['UTCOffset'] = pd.to_numeric(dataframe['UTCOffset'],
                                               errors='coerce').astype(
                                                   np.float64)
        dataframe.dropna(how='any', inplace=True)
        return dataframe

    def add_dataframe_to_database(self, dataframe, identifier):
        try:
            write_success = self.client.write_points(
                dataframe,
                identifier,
                time_precision='s',
                batch_size=self.batch_size)
            return len(dataframe) if write_success else 0
        except InfluxDBClientError as e:
            print 'Error while writing to database {}: {}'.format(
                identifier, e.message)
            return 0
Ejemplo n.º 22
0
class DbClient(metaclass=Singleton):
    def __init__(self, database=None, host=None, port=None):
        if database is None:
            self.database = configParser['database']['name']
        else:
            self.database = database

        if host is None:
            self.host = configParser['database']['host']
        else:
            self.host = host

        if port is None:
            self.port = configParser['database']['port']
        else:
            self.port = port

        self._instance = DataFrameClient(host=self.host,
                                         port=self.port,
                                         database=self.database)

    def save_to_db(self, df, measurement, tags=None):
        """ Saving dataframe to influx db """
        if tags is None:
            print("Write DataFrame")
            self._instance.write_points(df,
                                        database=self.database,
                                        measurement=measurement,
                                        protocol='json')
        else:
            print("Write DataFrame with Tags")
            self._instance.write_points(df,
                                        database=self.database,
                                        measurement=measurement,
                                        tags=tags,
                                        protocol='json')

    def fetch_from_db(self, query):
        """ Fetching data from influx db """

        print("Read from influx db")
        return self._instance.query(query)

    def create_db(self):
        """ Creating the influx db database """

        print("Create influx db")
        self._instance.create_database('crypto_analyzer')

    def drop_db(self):
        """ Dropping the influx db database """

        print("Influx database with all measurements")
        self._instance.drop_database(self.database)

    def df_int_to_float(self, df):
        """ Converting the int data type columns to float """

        for i in df.select_dtypes('int64').columns.values:
            df[i] = df[i].astype(float)
        return df

    def is_existing(self):
        """ Checks if database already exists """
        result = self._instance.get_list_database()
        return result is not None or len(result) > 0
import os
import sys
import time
import datetime
import numpy as np
import pandas

from influxdb import InfluxDBClient, DataFrameClient
from influxdb.exceptions import InfluxDBClientError

client = DataFrameClient('localhost', 8086, 'root', 'root', 'hrs_experiment')

try:
    print("Drop database")
    client.drop_database('hrs_experiment')
except InfluxDBClientError:
    pass # Didn't already exist for dropping.

print("Create database")
client.create_database('hrs_experiment')

write_times = []
for i in range(1, len(sys.argv)):
    print("Processing file: ", sys.argv[i], '...')
    station_id = 'Q' + os.path.basename(sys.argv[i]).split('_')[0]
    print("Using station ID: ", station_id, '...')
    df = pandas.read_csv(sys.argv[i], parse_dates=True, index_col=0, header = None)
    print("Creating JSON data...")
    print("Writing data...")
    start = time.time()
    for k, g in df.groupby(np.arange(len(df))//100):
#Instantinate client and establish connection to Database 
client = DataFrameClient(host = host,
                         port = port,
                         username = username,
                         password = password, 
                         database = database)

#Load Data you want into pandas dataframe
data = pd.read_csv("name_of_file.csv")

#Create new database. Replace 'demo' with any name you want for the database
client.create_database('demo')

#Delete database. Replace 'demo' with any name you want for the database
client.drop_database('demo')


#Write data to database
#First make sure the data is correctly transformed
#1st make sure the timestamp column is in datetime format
#2nd make sure the timestamp column is set as index
#Examples:

#Transform timestamp column into datetime format
data['timestamp'] = pd.to_datetime(data['timestamp'])

#Set timestamp as index
data = data.set_index('timestamp')

#Once the data is clean and properly aligned it can be sent to Influx DB