Ejemplo n.º 1
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30, freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo')

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'})

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.delete_database(dbname)
def main(host='localhost', port=8086):
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    # Temporarily avoid line protocol time conversion issues #412, #426, #431.
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30, freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df, 'demo',
                        {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
Ejemplo n.º 3
0
 def test_create_database(self):
     """Test create database for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with requests_mock.Mocker() as m:
         m.register_uri(requests_mock.POST,
                        "http://localhost:8086/query",
                        text='{"results":[{}]}')
         cli.create_database('new_db')
         self.assertEqual(m.last_request.qs['q'][0],
                          'create database "new_db"')
Ejemplo n.º 4
0
def main(host='localhost', port=8086):
    logging.basicConfig(level=logging.INFO)
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    logging.info("Create pandas DataFrame")
    today = datetime.datetime.today()
    date_list = get_week_list_by_date(today)
    df = pd.DataFrame(data=list(range(len(date_list))),
                      index=date_list,
                      columns=['W'])
    logging.info("Create database: " + dbname)
    client.create_database(dbname)

    logging.info("Write DataFrame to dsm_power table")
    client.write_points(df.copy(),
                        'dsm_power',
                        tags={'device_id': 'II8583-Z5EKI-N9700'},
                        protocol=protocol)
    logging.info("Write DataFrame to electric_power table")
    client.write_points(df.copy(),
                        'electric_power',
                        tags={'device_id': 'II8583-H9871-78D4F'},
                        protocol=protocol)

    logging.info("origin dataframe: {}".format(df))

    logging.info("Read DataFrame from dsm_power table")
    fetch_df = client.query("select * from dsm_power")['dsm_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("Read DataFrame from electric_power table")
    fetch_df = client.query("select * from electric_power")['electric_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("get data by specfic time range")
    start_date = "2018-06-04"
    end_date = "2018-06-06"
    fetch_df = client.query("select * from dsm_power where time > '" +
                            start_date + "' and time < '" + end_date +
                            "'")['dsm_power']
    fetch_df.index = fetch_df.index.tz_localize(None)
    logging.info("fetch: {}".format(fetch_df))

    logging.info("Delete database: " + dbname)
    client.drop_database(dbname)
 def test_create_numeric_named_database(self):
     """Test create db w/numeric name for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with requests_mock.Mocker() as m:
         m.register_uri(
             requests_mock.POST,
             "http://localhost:8086/query",
             text='{"results":[{}]}'
         )
         cli.create_database('123')
         self.assertEqual(
             m.last_request.qs['q'][0],
             'create database "123"'
         )
Ejemplo n.º 6
0
def load():
    df = pd.read_csv('GHI_DHI_Temp_Wind_20130101_english_units.csv',
                     skiprows=1)
    df.index = pd.to_datetime(df['DATE (MM/DD/YYYY)'] + ' ' + df['MST'],
                              format='%m/%d/%Y %H:%M')
    df.columns = [
        u'DATE (MM/DD/YYYY)', u'MST',
        u'AtmosphericAnalogKind_irradanceGlobalHorizontal',
        u'AtmosphericAnalogKind_irradanceDirectNormal',
        u'AtmosphericAnalogKind_irradanceDiffuseHorizontal',
        u'AtmosphericAnalogKind_ambientTemperature',
        u'AtmosphericAnalogKind_humidity', u'AtmosphericAnalogKind_speed',
        u'AtmosphericAnalogKind_bearing'
    ]
    dbname = 'proven'

    protocol = 'json'

    client = DataFrameClient(host='localhost', port=8086)

    #    print("Delete database: " + dbname)
    #    client.drop_database(dbname)

    print("Create pandas DataFrame")

    print("Create database: " + dbname)
    #    client.drop_database(dbname)
    client.create_database(dbname)
    dbs = client.get_list_database()
    print(dbs)
    client.switch_database(dbname)

    # print("Write DataFrame")
    client.write_points(df.loc['2013-7-1':'2013-7-31'],
                        'weather',
                        protocol=protocol)
    client.write_points(df.loc['2013-8-1':'2013-8-31'],
                        'weather',
                        protocol=protocol)
    client.write_points(df.loc['2013-9-1':'2013-9-30'],
                        'weather',
                        protocol=protocol)

    print("Write DataFrame with Tags")
    # client.write_points(df, 'demo',
    #                     {'k1': 'v1', 'k2': 'v2'}, protocol=protocol)

    print("Read DataFrame")
Ejemplo n.º 7
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'Stock Data'
    protocol = 'line'
    client = DataFrameClient(host, port, user, password, dbname)
    print('passed DataFrameClient creation')
    start = datetime.datetime(2015, 1, 1)
    end = datetime.datetime(2016, 1, 1)
    df = pdr.DataReader('F', 'iex', start, end)
    df.index = pd.DatetimeIndex(pd.to_datetime(list(df.index)))
    print(type(df.index))
    print('Trying to create database')
    client.create_database(dbname)
    client.write_points(df, 'Stock', protocol=protocol)
    client.query("select * from stock")
    client.drop_database(dbname)
    print('Finished')
Ejemplo n.º 8
0
def write_influx():
    from influxdb import DataFrameClient
    client = DataFrameClient(host='localhost', port=8086)
    client.create_database('df')
    client.switch_database('df')
    test = [{
        "measurement": "m1",
        "tags": {
            "freq": "1Min"
        },
        "time": "2009-11-10T23:00:00Z",
        "fields": {
            "mvalue1": 0.64,
            "mvalue2": 3.12
        }
    }, {
        "measurement": "m1",
        "tags": {
            "freq": "10Min"
        },
        "time": "2009-11-10T22:00:00Z",
        "fields": {
            "mvalue1": 0.62,
            "mvalue2": 3.11
        }
    }, {
        "measurement": "m2",
        "tags": {
            "freq": "1Min"
        },
        "time": "2009-11-10T22:00:00Z",
        "fields": {
            "mvalue1": 0.66,
            "mvalue1": 3.11
        }
    }]
    client.write_points(test)
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'),
                      columns=['0'])
    client.write_points(df, 'df', {'k1': 'v1', 'k2': 'v2'}, protocol='line')
Ejemplo n.º 9
0
class DbClient:

    def __init__(self, database=None, host=None, port=None):

        if database is None:
            self.database = configParser['database']['name']
        else:
            self.database = database

        if host is None:
            self.host = configParser.get('database', 'host')
        else:
            self.host = host

        if port is None:
            self.port = configParser.get('database', 'port')
        else:
            self.port = database

        self.client = DataFrameClient(host=self.host, port=self.port, database=self.database)

    def save_to_db(self, df, measurement, tags=None):

        if tags is None:
            print("Write DataFrame")
            self.client.write_points(df, database=self.database, measurement=measurement, protocol='json')
        else:
            print("Write DataFrame with Tags")
            self.client.write_points(df, database=self.database, measurement=measurement, tags=tags, protocol='json')

    def fetch_from_db(self, query):
        print("Read DataFrame")
        return self.client.query(query)

    def create_db(self):
        self.client.create_database('crypto_analyzer')

    def drop_db(self):
        self.client.drop_database(self.database)

    def is_existing(self):
        result = self.client.get_list_database()
        return result is not None or len(result) > 0
Ejemplo n.º 10
0
class INSERTDATA:
    def __init__(self):
        host = 'r4-influxdb.ricplt'
        self.client = DataFrameClient(host, '8086', 'root', 'root')
        self.dropdb('UEData')
        self.createdb('UEData')

    def createdb(self, dbname):
        print("Create database: " + dbname)
        self.client.create_database(dbname)
        self.client.switch_database(dbname)

    def dropdb(self, dbname):
        print("DROP database: " + dbname)
        self.client.drop_database(dbname)

    def dropmeas(self, measname):
        print("DROP MEASUREMENT: " + measname)
        self.client.query('DROP MEASUREMENT ' + measname)
Ejemplo n.º 11
0
class Timeseries:
    def __init__(self,series_name):

        self.name = series_name


    def SetInfluxConnection(self,dbhost='localhost',dbport=8086,dbuser='******', dbpassword='******',dbname='test'):

        if dbhost != None:
            self.influxdb = DataFrameClient(dbhost, dbport, dbuser, dbpassword, dbname)
            self.influxdb2 = InfluxDBClient(dbhost, dbport, dbuser, dbpassword, dbname)
            self.dbname = dbname

    def CheckDatabase(self):
        try:
            self.influxdb.create_database(self.dbname)
        except InfluxDBClientError,e:
            if str(e) == "database already exists":
                return True

        return False
Ejemplo n.º 12
0
def mutilscan(host, port, path):
    """Instantiate a connection to the InfluxDB."""
    user = '******'
    password = '******'
    dbname = 'test'
    thread_count = 10
    global start
    start = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    global client
    client = DataFrameClient(host, port, user, password, dbname)
    client.create_database(dbname) 
    print("scanning. please wait...")
    
    for i in range(thread_count):
        t = influxThread(i)
        t.setDaemon(True)
        t.start()
    pathq.put(path)

    
    
Ejemplo n.º 13
0
def write_db():

    cols1 = [
        "timestamp", "key", "BID_PRICE", "ASK_PRICE", "BID_SIZE", "ASK_SIZE",
        "ASK_ID", "BID_ID", "TOTAL_VOLUME", "LAST_SIZE", "TRADE_TIME",
        "QUOTE_TIME", "HIGH_PRICE", "LOW_PRICE", "BID_TICK", "CLOSE_PRICE",
        "EXCHANGE_ID", "MARGINABLE", "SHORTABLE", "ISLAND_BID_DEPRECATED",
        "ISLAND_ASK_DEPRECATED", "ISLAND_VOLUME_DEPRECATED", "QUOTE_DAY",
        "TRADE_DAY", "VOLATILITY", "DESCRIPTION", "LAST_ID", "DIGITS",
        "OPEN_PRICE", "NET_CHANGE", "HIGH_52_WEEK", "LOW_52_WEEK", "PE_RATIO",
        "DIVIDEND_AMOUNT", "DIVIDEND_YIELD", "ISLAND_BID_SIZE_DEPRECATED",
        "ISLAND_ASK_SIZE_DEPRECATED", "NAV", "FUND_PRICE", "EXCHANGE_NAME",
        "DIVIDEND_DATE", "IS_REGULAR_MARKET_QUOTE", "IS_REGULAR_MARKET_TRADE",
        "REGULAR_MARKET_LAST_PRICE", "REGULAR_MARKET_LAST_SIZE",
        "REGULAR_MARKET_TRADE_TIME", "REGULAR_MARKET_TRADE_DAY",
        "REGULAR_MARKET_NET_CHANGE", "SECURITY_STATUS", "MARK",
        "QUOTE_TIME_IN_LONG", "TRADE_TIME_IN_LONG",
        "REGULAR_MARKET_TRADE_TIME_IN_LONG"
    ]
    # with open('//home/suchita/PycharmProjects/StreamingTDA/output.csv', 'a', newline='') as c:
    #     writer = csv.DictWriter(c, fieldnames=cols1)
    #     writer.writeheader()
    dbClient = DataFrameClient('localhost', 8086, 'stockdata')
    dbClient.create_database('stockdata')
    # tagdic = {'key'}
    list_of_files = get_list_of_json_files()
    for file in list_of_files:
        print(file)
        with open(path1 + "/" + file) as f:
            data = json.loads(f)
        count = len(data['content'])

        for i in range(count):
            dict2 = {**data['content'][i]}
            dict3 = create_dict(dict2, data['timestamp'])
            df = pd.DataFrame(dict3)
            print(dict3)
            dbClient.write_points(pd,
                                  measurement='StockData',
                                  database='stockdata')
Ejemplo n.º 14
0
def dataframe_into_influxdb(args: dict, df: pd.DataFrame, tag_columns=None):
    if tag_columns is None:
        tag_columns = ['dev-id']
    if args.get('influxdb_database') is None or args.get(
            'influxdb_measurement') is None:
        logging.debug(
            'Not saving into InfluxDB (no database or measurement name given')
        return False
    protocol = 'line'
    client = DataFrameClient(host=args.get('influxdb_host'),
                             port=args.get('influxdb_port'),
                             username=args.get('influxdb_username'),
                             password=args.get('influxdb_password'),
                             database=args.get('influxdb_database'))
    logging.info('Create database: {}'.format(args.get('influxdb_database')))
    client.create_database(args.get('influxdb_database'))
    client.write_points(df,
                        args.get('influxdb_measurement'),
                        tag_columns=tag_columns,
                        protocol=protocol,
                        batch_size=5000)
    return True
Ejemplo n.º 15
0
def write_influxdb(host, port, user, password, dbname, protocol, filename):
    client = DataFrameClient(host, port, user, password, dbname)

    data = pd.read_json(filename)
    data['timestamp'] = pd.to_datetime(data['timestamp'])
    data = data.set_index('timestamp')
    # falta escribir tags
    print("Create database: " + dbname)
    if client.create_database(dbname):
        print('database created succesfully!')

    print("Write DataFrame")
    if client.write_points(data, 'transporte',protocol=protocol):
        print('data saved succesfully!')
Ejemplo n.º 16
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'name'
    protocol = 'line'
    # Defining the database
    client = DataFrameClient(host, port, user, password, dbname)
    # Piping the data from terminal
    a = subprocess.Popen(
        ["ps -eo user,%mem,%cpu,start"], shell=True,
        stdout=subprocess.PIPE).communicate()[0].decode("utf-8")
    # Copying the data to clipboard
    pyperclip.copy(a)
    df = pd.read_clipboard(index_col=False)
    # Defining the database columns
    df = df[["STARTED", "%CPU", "%MEM", "USER"]]
    # Writing the time in index format
    ind = pd.DatetimeIndex(data=df["STARTED"])
    df.set_index(ind, inplace=True)
    print(df)
    # Creating and writing the database into InfluxDB
    client.create_database(dbname)
    client.write_points(df, dbname, protocol=protocol, time_precision="ms")
Ejemplo n.º 17
0
def test_write_to_db():
    from connector.connector import write_to_db, wait_for_influxdb
    db_host = "influxdb_test"
    db_port = 8086
    db_username = "******"
    db_password = "******"
    db_database = "test"
    #Connects to local InfluxDB
    db_client = DataFrameClient(host=db_host,
                                port=db_port,
                                username=db_username,
                                password=db_password,
                                database=db_database)
    # waits for influxdb service to be active
    wait_for_influxdb(db_client=db_client)
    #Creates local Database
    db_client.create_database('test')
    #Create testing CSV file with one mock up line
    now = datetime.now()
    one_line = str.encode(
        "adc,channel,time_stamp,value\n1,1,{},100".format(now))
    with open("testing.csv", "wb") as csvfile:
        csvfile.write(one_line)
    f = open("testing.csv")
    payload = f.read()
    payload = str.encode(payload)
    write_to_db(payload=payload, db_client=db_client)
    written = db_client.query('SELECT * FROM "measurements"')
    dataframe = written['measurements']
    value = dataframe['mV'][0]
    #Remove mockup CSV file
    os.remove("testing.csv")
    #Deletes mockup DB
    db_client.drop_database('test')
    assert value == 100 * 0.125
    #bug : dataframe.index.values[0] has more precision than np.datetime64(now)
    #assert dataframe.index.values[0] == np.datetime64(now)
def main():

    # Read the data into CSV, and output as a
    df = pd.read_csv(FILENAME,
                     infer_datetime_format=True,
                     parse_dates=['datetime'],
                     index_col='datetime')
    df = df.resample(RESAMPLE).mean().interpolate('time').dropna()

    df = df.shift(periods=6, freq='H')

    influx = DataFrameClient(HOST, PORT)

    try:
        influx.create_database(DATABASE)
    except:
        print("Database already exists.")

    influx.write_points(df,
                        MEASUREMENT,
                        batch_size=BATCH_SIZE,
                        database=DATABASE)

    print("Data successfully published to InfluxDB")
Ejemplo n.º 19
0
def main(host='localhost', port=8086):
    """Instantiate the connection to the InfluxDB client."""
    user = '******'
    password = '******'
    dbname = 'demo'
    # Temporarily avoid line protocol time conversion issues #412, #426, #431.
    protocol = 'json'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'),
                      columns=['0'])

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points(df, 'demo', protocol=protocol)

    print("Write DataFrame with Tags")
    client.write_points(df,
                        'demo', {
                            'k1': 'v1',
                            'k2': 'v2'
                        },
                        protocol=protocol)

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.drop_database(dbname)
Ejemplo n.º 20
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'

    client = DataFrameClient(host, port, user, password, dbname)

    print("Create pandas DataFrame")
    df = pd.DataFrame(data=list(range(30)),
                      index=pd.date_range(start='2014-11-16',
                                          periods=30,
                                          freq='H'))

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Write DataFrame")
    client.write_points({'demo': df})

    print("Read DataFrame")
    client.query("select * from demo")

    print("Delete database: " + dbname)
    client.delete_database(dbname)
class influx_logging():
    def __init__(self, host, port, user, password, dbname):
        self.host = host
        self.port = port
        self.user = '******'
        self.password = '******'
        self.dbname = 'demo'
        self.protocol = 'line'
        self.client = DataFrameClient(self.host, self.port, self.user,
                                      self.password, self.dbname)
        pass

    def main(self, actual_joint_state, encoded_joint_state):
        print("Create database: " + self.dbname)
        self.client.create_database(self.dbname)

        print("Write DataFrame with Tags")
        df = pd.DataFrame(data=list(range(1)),
                          index=pd.date_range(start=datetime.datetime.utcnow(),
                                              periods=1,
                                              freq='H'),
                          columns=['num_data_df'])
        self.client.write_points(df,
                                 'actual_joint_state', {
                                     'joint1': actual_joint_state[0],
                                     'joint2': actual_joint_state[1]
                                 },
                                 protocol=self.protocol)
        self.client.write_points(df,
                                 'encoded_joint_state', {
                                     'joint1': encoded_joint_state[0],
                                     'joint2': encoded_joint_state[1]
                                 },
                                 protocol=self.protocol)
        print("Delete database: " + self.dbname)
        self.client.drop_database(self.dbname)
Ejemplo n.º 22
0
def write_influxdb(host, port, user, password, dbname, protocol, filename):
    client = DataFrameClient(host, port, user, password, dbname)

    data = pd.read_json(filename)
    data['timestamp'] = pd.to_datetime(data['timestamp'])
    data = data.set_index('timestamp')
    # usar tag_columns=None para tags
    # print('#######################################')
    # print('printeamos data a impactar en influx: ' + str(data))

    print("Create database: " + dbname)
    if client.create_database(dbname):
        print('database created succesfully!')

    print("Write DataFrame")
    if client.write_points(data, 'transporte', protocol=protocol):
        print('data saved succesfully!')
Ejemplo n.º 23
0
        else:
            sys.exit("Invalid region: '%s'" % args.region)

        influxdb_client = DataFrameClient(host=influxdb_host,
                                 port=influxdb_port,
                                 username=influxdb_user,
                                 password=influxdb_password,
                                 database=influxdb_dbname)

        dbs = influxdb_client.get_list_database()
        create_db = True
        for db in dbs:
            if db['name'] == influxdb_dbname:
                create_db = False
                break

        if create_db:
            influxdb_client.create_database(influxdb_dbname)

    options = {"access_log": access_log,
                "request_time_threshold": request_time_threshold,
               "log_datetime_format": log_datetime_format,
               "plot_chart": plot_chart,
               "uri_white_list": uri_white_list,
               "statsd": statsd,
               "stats_client": stats_client,
               "influxdb_client": influxdb_client}

    log_analysis = LogAnalysis(options=options)
    log_analysis.run()
Ejemplo n.º 24
0
import paho.mqtt.client as mqtt
import json
import datetime
import pandas as pd
from influxdb import DataFrameClient

# dbclient
dbclient = DataFrameClient(host='nangluong.iotmind.vn',
                           port=8086,
                           username='******',
                           password='******',
                           database='SCG')
dbclient.create_database('SCG')


# mqtt on connect
def on_connect(client, userdata, flags, rc):
    client.subscribe('data/plc')


# mqtt on message
def on_message(client, userdata, msg):
    topic = msg.topic
    content = msg.payload
    objpayload = json.loads(content)
    data = objpayload["d"]
    data = pd.DataFrame(data)

    # split and and rename
    data[['device', 'parameter']] = data.tag.str.split(':', expand=True)
    data[['parameter_1',
Ejemplo n.º 25
0
df.drop( "x", axis = 1, inplace = True )
df.drop( "xx", axis = 1, inplace = True )
df.drop( "Day", axis = 1, inplace = True )
df.drop( "Hour", axis = 1, inplace = True )

np.set_printoptions( edgeitems = 5, suppress = True, linewidth = None, threshold = None )
plt.style.use( "ggplot" )

os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

seed = 3
np.random.seed( seed )
set_random_seed( seed )

client = DataFrameClient( host = "database", port = 8086 )
client.create_database( "tecpro" )

plt.style.use( "ggplot" )

client.write_points( df, "bees", database = "tecpro" )

client.switch_database( "tecpro" )

query = """
	select ActivityLevel
	from bees
	order by time
"""

dfn = client.query( query )
Ejemplo n.º 26
0
class InfluxClient(object):
    def __init__(self, host, port, username, password, database):
        self.client = DataFrameClient(host=host,
                                      port=port,
                                      username=username,
                                      password=password,
                                      database=database)
        self.database = database
        if database is not None and database not in self.GetDatabases():
            self.CreateDatabase(database)
        self.query_errors = {}

    def GetDatabases(self):
        return [db['name'] for db in self.client.get_list_database()]

    def CreateDatabase(self, database_name):
        self.client.create_database(database_name)

    def DeleteAllSeriesInDatabase(self):
        query = 'Drop series WHERE "" = \'\''
        self.RunQuery(query, 'everything gets deleted')

    @staticmethod
    def GetIdentifier(site_code, var_code, qc_id, source_id, method_id):
        """
        InfluxDB Identifiers for iUTAH only:
        For the following time series:
            Turbidity; Campbell_OBS-3+_Turb, 1, 67, 2
        Format as 'wof_{site_code}_{var_code}_{qc_id}_{source_id}_{method_id}'
            wof_PUPP2S_Campbell_OBS_3+_Turb_Raw_1_67_2
        Encode as a URI (to remove invalid characters while keeping uniqueness)
            wof_PUPP2S_Campbell_OBS_3%2B_Turb_1_67_2
        Replace all non-word characters with an underscore
            wof_PUPP2S_Campbell_OBS_3_2B_Turb_1_67_2

        Example python code:
        def GetIdentifier(site_code, var_code, qc_id, source_id, method_id):
            pre_identifier = 'wof_{}_{}_{}_{}_{}'.format(site_code, var_code, qc_id, source_id, method_id)
            return re.sub('[\W]', '_', urllib.quote(pre_identifier, safe=''))
        """

        pre_identifier = 'wof_{}_{}_{}_{}_{}'.format(site_code, var_code,
                                                     qc_id, source_id,
                                                     method_id)
        return re.sub('[\W]', '_', urllib.quote(pre_identifier, safe=''))

    @staticmethod
    def GetUSGSIdentifier(site_code, variable, source, param):
        pre_identifier = 'usgs_{}_{}_{}_{}'.format(site_code, variable, source,
                                                   param)
        return re.sub('[\W]', '_',
                      urllib.quote(" ".join(pre_identifier.split()), safe=''))

    @staticmethod
    def GetEnviroDiyIdentifier(result_uuid):
        return 'uuid_{}'.format(result_uuid.replace('-', '_'))

    @staticmethod
    def GetIdentifierBySeriesDetails(series):
        if series is None:
            return None
        return InfluxClient.GetIdentifier(series.site_code,
                                          series.variable_code, series.qc_id,
                                          series.source_id, series.method_id)

    @staticmethod
    def GetiUtahUrlQueryString(identifier):
        return 'http://iutahinflux.uwrl.usu.edu:8086/query?u=web_client&p=password&db=iutah&q=' \
               'SELECT%20%2A%20FROM%20%22{}%22'.format(identifier)

    def RunQuery(self, query_string, identifier):
        try:
            return self.client.query(query_string, database=self.database)
        except InfluxDBClientError as e:
            print 'Query Error for {}: {}'.format(identifier, e.message)
            if identifier not in self.query_errors.keys():
                self.query_errors[identifier] = []
            self.query_errors[identifier].append(e.message)
        return None

    def AddSeriesToDatabase(self, series):
        if series is None:
            return None
        identifier = self.GetIdentifierBySeriesDetails(series)
        print 'Writing data points for ' + identifier
        write_success = self.client.write_points(series.datavalues,
                                                 identifier,
                                                 protocol='json',
                                                 batch_size=10000)
        if not write_success:
            print 'Write failed for series with identifier {}'.format(
                identifier)
        else:
            print '{} Data points written for time series with identifier {}'.format(
                len(series.datavalues), identifier)

    def AddDataFrameToDatabase(self, datavalues, identifier):
        try:
            print 'Writing {} data points for {}'.format(
                len(datavalues), identifier)
            write_success = self.client.write_points(datavalues,
                                                     identifier,
                                                     protocol='json',
                                                     batch_size=10000)
            if not write_success:
                print 'Write failed for series with identifier {}'.format(
                    identifier)
            else:
                print '{} Data points written for time series with identifier {}'.format(
                    len(datavalues), identifier)
            return len(datavalues)
        except InfluxDBClientError as e:
            print 'Error while writing to database {}: {}'.format(
                identifier, e.message)
            print datavalues
            return 0

    def GetTimeSeriesBySeriesDetails(self, series, start='', end=''):
        return self.GetTimeSeries(series.site_code, series.variable_code,
                                  series.qc_code, series.source_code,
                                  series.method_code, start, end)

    def GetTimeSeries(self,
                      site_code,
                      var_code,
                      qc_code,
                      source_code,
                      method_code,
                      start='',
                      end=''):
        identifier = self.GetIdentifier(site_code, var_code, qc_code,
                                        source_code, method_code)
        print 'Getting time series for ' + identifier
        query_string = 'Select {select} from {series}'.format(
            select='*', series=identifier)
        if len(start) > 0:
            query_string += ' where time > \'{}\''.format(start)
        if len(end) > 0 and len(start) > 0:
            query_string += ' and time < \'{}\''.format(end)
        elif len(end) > 0:
            query_string += ' where time < \'{}\''.format(end)
            return self.RunQuery(query_string, identifier)
        return None

    def GetTimeSeriesStartTime(self, site_code, var_code, qc_code, source_code,
                               method_code):
        identifier = self.GetIdentifier(site_code, var_code, qc_code,
                                        source_code, method_code)
        print 'Getting start time for ' + identifier
        query_string = 'Select first(DataValue), time from {identifier}'.format(
            identifier=identifier)
        result = self.RunQuery(query_string, identifier)
        if result is not None and len(result) == 1:
            dataframe = result[identifier]  # type: pandas.DataFrame
            return dataframe.first_valid_index().to_pydatetime()
        return None

    def GetTimeSeriesEndTime(self, identifier):
        query_string = 'Select last(DataValue), time from {identifier}'.format(
            identifier=identifier)
        result = self.RunQuery(query_string, identifier)
        if result is not None and len(result) == 1:
            dataframe = result[identifier]  # type: pandas.DataFrame
            return dataframe.first_valid_index().to_pydatetime()
        return None
Ejemplo n.º 27
0
            sys.exit("Invalid region: '%s'" % args.region)

        influxdb_client = DataFrameClient(host=influxdb_host,
                                          port=influxdb_port,
                                          username=influxdb_user,
                                          password=influxdb_password,
                                          database=influxdb_dbname)

        dbs = influxdb_client.get_list_database()
        create_db = True
        for db in dbs:
            if db['name'] == influxdb_dbname:
                create_db = False
                break

        if create_db:
            influxdb_client.create_database(influxdb_dbname)

    options = {
        "access_log": access_log,
        "request_time_threshold": request_time_threshold,
        "log_datetime_format": log_datetime_format,
        "plot_chart": plot_chart,
        "uri_white_list": uri_white_list,
        "statsd": statsd,
        "stats_client": stats_client,
        "influxdb_client": influxdb_client
    }

    log_analysis = LogAnalysis(options=options)
    log_analysis.run()
Ejemplo n.º 28
0
class Metatrader:

    def __init__(self, host=None, real_volume=None, localtime=True, dbtype=None,dbhost=None,dbport=None,dbpass=None,dbuser=None,dbname=None):
        self._api = Functions(host)
        self.real_volume = real_volume or False
        self.localtime = localtime 
        self._utc_timezone = timezone('UTC')
        self._my_timezone = get_localzone()
        self._utc_brocker_offset = self.__utc_brocker_offset()
        # db settings
        self.dbtype = dbtype or 'SQLITE' # SQLITE OR INFLUXDB
        if dbtype == "INFLUXDB":
            # if dbtype is influxdb
            self.dbhost = dbhost or 'localhost'
            self.dbport = dbport or '8086'
            self.dbuser = dbuser or 'root'
            self.dbpass = dbpass or 'root'
            self.dbname = dbname or 'ejtraderMT'
            self.protocol = 'line'
            self.client = DataFrameClient(self.dbhost, self.dbport, self.dbuser, self.dbpass, self.dbname)
            self.client.create_database(self.dbname)
         
    def balance(self):
        return self._api.Command(action="BALANCE")

    def accountInfo(self):
        return self._api.Command(action="ACCOUNT")

    def positions(self):
        return self._api.Command(action="POSITIONS")

    def orders(self):
        return self._api.Command(action="ORDERS")

    def trade(self, symbol, actionType, volume, stoploss, takeprofit, price, deviation):
        self._api.Command(
            action="TRADE",
            actionType=actionType,
            symbol=symbol,
            volume=volume,
            stoploss=stoploss,
            takeprofit=takeprofit,
            price=price,
            deviation=deviation
        )

    def buy(self, symbol, volume, stoploss, takeprofit, deviation=5):
        price = 0
        self.trade(symbol, "ORDER_TYPE_BUY", volume,
                   stoploss, takeprofit, price, deviation)

    def sell(self, symbol, volume, stoploss, takeprofit, deviation=5):
        price = 0
        self.trade(symbol, "ORDER_TYPE_SELL", volume,
                   stoploss, takeprofit, price, deviation)

    def buyLimit(self, symbol, volume, stoploss, takeprofit, price=0, deviation=5):
        self.trade(symbol, "ORDER_TYPE_BUY_LIMIT", volume,
                   stoploss, takeprofit, price, deviation)

    def sellLimit(self, symbol, volume, stoploss, takeprofit, price=0, deviation=5):
        self.trade(symbol, "ORDER_TYPE_SELL_LIMIT", volume,
                   stoploss, takeprofit, price, deviation)

    def buyStop(self, symbol, volume, stoploss, takeprofit, price=0, deviation=5):
        self.trade(symbol, "ORDER_TYPE_BUY_STOP", volume,
                   stoploss, takeprofit, price, deviation)

    def sellStop(self, symbol, volume, stoploss, takeprofit, price=0, deviation=5):
        self.trade(symbol, "ORDER_TYPE_SELL_LIMIT", volume,
                   stoploss, takeprofit, price, deviation)

    def cancel_all(self):
        orders = self.orders()

        if 'orders' in orders:
            for order in orders['orders']:
                self.CancelById(order['id'])

    def close_all(self):
        positions = self.positions()

        if 'positions' in positions:
            for position in positions['positions']:
                self.CloseById(position['id'])

    def positionModify(self, id, stoploss, takeprofit):
        self._api.Command(
            action="TRADE",
            actionType="POSITION_MODIFY",
            id=id,
            stoploss=stoploss,
            takeprofit=takeprofit
        )

    def ClosePartial(self, id, volume):
        self._api.Command(
            action="TRADE",
            actionType="POSITION_PARTIAL",
            id=id,
            volume=volume
        )

    def CloseById(self, id):
        self._api.Command(
            action="TRADE",
            actionType="POSITION_CLOSE_ID",
            id=id
        )

    def CloseBySymbol(self, symbol):
        self._api.Command(
            action="TRADE",
            actionType="POSITION_CLOSE_SYMBOL",
            symbol=symbol
        )

    def orderModify(self, id, stoploss, takeprofit, price):
        self._api.Command(
            action="TRADE",
            actionType="ORDER_MODIFY",
            id=id,
            stoploss=stoploss,
            takeprofit=takeprofit,
            price=price
        )

    def CancelById(self, id):
        self._api.Command(
            action="TRADE",
            actionType="ORDER_CANCEL",
            id=id
        )

    def __utc_brocker_offset(self):
        utc = datetime.now(self._utc_timezone).strftime('%Y-%m-%d %H:%M:%S')
        try:
            broker = self.accountInfo()
            broker = datetime.strptime(broker['time'], '%Y.%m.%d %H:%M:%S')
        except KeyError:
            raise "Metatrader 5 Server is disconnect"
        utc = datetime.strptime(utc, '%Y-%m-%d %H:%M:%S')

        duration = broker - utc
        duration_in_s = duration.total_seconds()
        hour = divmod(duration_in_s, 60)[0]
        seconds = int(hour)*60
        return seconds

    
    def _price(self):
        connect = self._api.live_socket()
        while True:
            price = connect.recv_json()
            try:
                price = price['data']
                price = pd.DataFrame([price]) 
                price = price.set_index([0])
                price.index.name = 'date'
                if self._allchartTF == 'TS':
                    price.index = pd.to_datetime(price.index, unit='ms')
                    price.columns = ['bid', 'ask']
                    self._priceQ.put(price)
                elif self._allchartTF == 'TICK':
                    price.index = pd.to_datetime(price.index, unit='ms')
                    price.columns = ['type','bid', 'ask','last','volume']
                    self._priceQ.put(price) 
                else:
                    if self.real_volume:
                        del price[5]
                    else:
                        del price[6]
                    price.index = pd.to_datetime(price.index, unit='s')
                    price.columns = ['open', 'high', 'low','close', 'volume','spread']
                    self._priceQ.put(price)
                    
                
            except KeyError:
                  pass

       

    def _start_thread_price(self):
        t = Thread(target=self._price,daemon=True)
        t.start()
        self._priceQ = Queue()

    def _start_thread_event(self):
        t = Thread(target=self._event,daemon=True)
        t.start()
        self._eventQ = Queue()
        

    

    def _event(self):
        connect = self._api.streaming_socket()
        while True:
            event = connect.recv_json()
            try:
                event = event['request']
                event = pd.DataFrame(event, index=[0])
                self._eventQ.put(event)
            except KeyError:
                pass

    def price(self, symbol, chartTF):
        self._api.Command(action="RESET")
        self._allsymbol_ = symbol
        self._allchartTF = chartTF
        for active in symbol:
            self._api.Command(action="CONFIG", symbol=active, chartTF=chartTF) 
        self._start_thread_price()        
        time.sleep(0.5)
        return  self._priceQ.get()
       


    def event(self, symbol, chartTF):
        self._api.Command(action="RESET")
        self._allsymbol_ = symbol
        self._allchartTF = chartTF
        for active in symbol:
            self._api.Command(action="CONFIG",  symbol=active, chartTF=chartTF)          
        
        self._start_thread_event()
        time.sleep(0.5)
        return  self._eventQ.get()
        

    
      

    # convert datestamp to dia/mes/ano
    def _date_to_timestamp(self, s):
        return time.mktime(datetime.strptime(s, "%d/%m/%Y").timetuple())
    # convert datestamp to dia/mes/ano
    def datetime_to_timestamp(self, s):
        return time.mktime(s.timetuple())

    def _date_to_timestamp_broker(self):
        brokertime = time.mktime(datetime.strptime(self.accountInfo()['time'], '%Y.%m.%d %H:%M:%S').timetuple())
        return round(brokertime)

    def _brokerTimeCalculation(self,s):
        delta = timedelta(seconds = s)
        broker = datetime.strptime(self.accountInfo()['time'], '%Y.%m.%d %H:%M:%S')
        result = broker - delta
        return result
  

    


    def _timeframe_to_sec(self, timeframe):
        # Timeframe dictionary
        TIMECANDLE = {
            "M1": 60,
            "M2": 120,
            "M3": 180,
            "M4": 240,
            "M5": 300,
            "M15": 900,
            "M30": 1800,
            "H1": 3600,
            "H4": 14400,
            "D1": 86400,
            "W1": 604800,
            "MN": 2629746,

        }
        return TIMECANDLE[timeframe]

    def _setlocaltime_dataframe(self, df):
        df.index = df.index.tz_localize(self._utc_brocker_offset)
        df.index = df.index.tz_convert(self._my_timezone)
        df.index = df.index.tz_localize(None)
        return df
   


    def history(self,symbol,chartTF=None,fromDate=None,toDate=None,database=None,dataframe=True):
        self.chartTF = chartTF
        self.fromDate = fromDate
        self.toDate = toDate
        self._historyQ = Queue()
        self.dataframe = dataframe
        if isinstance(symbol, tuple):
            for symbols in symbol:
                self._symbol = symbols
        else:
            self._symbol = symbol
        if chartTF:
            if database:
                try:
                    start(self.__historyThread_save,repeat=1, max_threads=20)
                except:
                    print("Error: unable to start History thread")
            else:
                try:
                    start(self._historyThread,repeat=1, max_threads=20)
                except:
                    print("Error: unable to start History thread")
                return self._historyQ.get()
        else:
            q = DictSQLite('history')
            if isinstance(symbol, list):
                try:
                    if self.dbtype == 'SQLITE':
                        df = q[f'{self._symbol[0]}']
                    else:
                        df = self.client.query(f"select * from {self._symbol[0]}")
                        df = df[self._symbol[0]]
                        df.index = df.index.tz_localize(None)
                        df.index.name = 'date'
                except KeyError:
                    df = f" {self._symbol[0]}  isn't on database"
                    pass 
            else:
                try:
                    if self.dbtype == 'SQLITE':
                        df = q[f'{self._symbol}']
                    else:
                        df = self.client.query(f"select * from {self._symbol}")
                        df = df[self._symbol]
                        df.index = df.index.tz_localize(None)
                        df.index.name = 'date'
                except KeyError:
                    df = f" {self._symbol}  isn't on database"
                    pass
            return df
    
    


    def _historyThread(self,data):
        actives = self._symbol
        chartTF = self.chartTF
        fromDate = self.fromDate
        toDate  = self.toDate
        main = pd.DataFrame()
        current = pd.DataFrame()
        if(chartTF == 'TICK'):
            chartConvert = 60
        else:
            chartConvert = self._timeframe_to_sec(chartTF)
        for active in actives:
            # the first symbol on list is the main and the rest will merge
            if active == actives[0]:
                # get data
                if fromDate and toDate:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self._date_to_timestamp(fromDate), toDate=self._date_to_timestamp(toDate))
                elif isinstance(fromDate, int):
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + fromDate * chartConvert - chartConvert) ))
                elif isinstance(fromDate, str) and toDate==None:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self._date_to_timestamp(fromDate),toDate=self._date_to_timestamp_broker())
                else:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + 100 * chartConvert - chartConvert) ))
                self._api.Command(action="RESET")
                if self.dataframe:
                    try:
                        main = pd.DataFrame(data['data'])
                        main = main.set_index([0])
                        main.index.name = 'date'
                        

                        # TICK DATA
                        if(chartTF == 'TICK'):
                            main.columns = ['bid', 'ask']
                            main.index = pd.to_datetime(main.index, unit='ms')
                        else:
                            main.index = pd.to_datetime(main.index, unit='s')
                            if self.real_volume:
                                del main[5]
                            else:
                                del main[6]
                            main.columns = ['open', 'high', 'low',
                                            'close', 'volume', 'spread']
                    except KeyError:
                        pass
            else:
                 # get data
                if fromDate and toDate:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self._date_to_timestamp(fromDate), toDate=self._date_to_timestamp(toDate))
                elif isinstance(fromDate, int):
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + fromDate * chartConvert - chartConvert) ))
                elif isinstance(fromDate, str) and toDate==None:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self._date_to_timestamp(fromDate),toDate=self._date_to_timestamp_broker())
                else:
                    data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                        fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + 100 * chartConvert - chartConvert) ))

                self._api.Command(action="RESET")
                if self.dataframe:
                    try:
                        current = pd.DataFrame(data['data'])
                        current = current.set_index([0])
                        current.index.name = 'date'
                        active = active.lower()
                        # TICK DATA
                        if(chartTF == 'TICK'):
                            current.index = pd.to_datetime(current.index, unit='ms')
                            current.columns = [f'{active}_bid', f'{active}_ask']
                        else:
                            current.index = pd.to_datetime(current.index, unit='s')
                            if self.real_volume:
                                del current[5]
                            else:
                                del current[6]
                
                            current.columns = [f'{active}_open', f'{active}_high',
                                            f'{active}_low', f'{active}_close', f'{active}_volume', f'{active}_spread']

                        main = pd.merge(main, current, how='inner',
                                        left_index=True, right_index=True)
                    except KeyError:
                        pass
        if self.dataframe:
            try:
                if self.localtime:
                    self._setlocaltime_dataframe(main)
            except AttributeError:
                pass
            main = main.loc[~main.index.duplicated(keep='first')]
        else:
            main = data
        self._historyQ.put(main)

    

    def __historyThread_save(self,data):
            actives = self._symbol
            chartTF = self.chartTF
            fromDate = self.fromDate
            toDate  = self.toDate
            main = pd.DataFrame()
            current = pd.DataFrame()
            self._count = 0
            try:
                os.makedirs('DataBase')
            except OSError:
                pass
            # count data
            start_date = datetime.strptime(fromDate, "%d/%m/%Y")
            if not toDate:
                end_date = datetime.now() #date(2021, 1, 1)
            else:
                end_date = datetime.strptime(toDate, "%d/%m/%Y")

            delta = timedelta(days=1)
            delta2 = timedelta(days=1)
            diff_days = start_date - end_date
            days_count = diff_days.days
            pbar = tqdm(total=abs(days_count))
            appended_data = []
            while start_date <= end_date:
                pbar.update(delta.days)
                fromDate = start_date.strftime("%d/%m/%Y")
                toDate = start_date
                toDate +=  delta2
                toDate = toDate.strftime("%d/%m/%Y")  

                if(chartTF == 'TICK'):
                    chartConvert = 60
                else:
                    chartConvert = self._timeframe_to_sec(chartTF)
                for active in actives:
                    self._count += 1 
                   
                    # the first symbol on list is the main and the rest will merge
                    if active == actives[0]:
                        self._active_name = active
                        # get data
                        if fromDate and toDate:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self._date_to_timestamp(fromDate), toDate=self._date_to_timestamp(toDate))
                        elif isinstance(fromDate, int):
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + fromDate * chartConvert - chartConvert) ))
                        elif isinstance(fromDate, str) and toDate==None:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self._date_to_timestamp(fromDate),toDate=self._date_to_timestamp_broker())
                        else:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + 100 * chartConvert - chartConvert) ))
                        self._api.Command(action="RESET")
                        try:
                            main = pd.DataFrame(data['data'])
                            main = main.set_index([0])
                            main.index.name = 'date'
                            

                            # TICK DATA
                            if(chartTF == 'TICK'):
                                main.columns = ['bid', 'ask']
                                main.index = pd.to_datetime(main.index, unit='ms')
                            else:
                                main.index = pd.to_datetime(main.index, unit='s')
                                if self.real_volume:
                                    del main[5]
                                else:
                                    del main[6]
                                main.columns = ['open', 'high', 'low',
                                                'close', 'volume', 'spread']
                        except KeyError:
                            pass
                    else:
                        # get data
                        if fromDate and toDate:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self._date_to_timestamp(fromDate), toDate=self._date_to_timestamp(toDate))
                        elif isinstance(fromDate, int):
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + fromDate * chartConvert - chartConvert) ))
                        elif isinstance(fromDate, str) and toDate==None:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self._date_to_timestamp(fromDate),toDate=self._date_to_timestamp_broker())
                        else:
                            data = self._api.Command(action="HISTORY", actionType="DATA", symbol=active, chartTF=chartTF,
                                                fromDate=self.datetime_to_timestamp(self._brokerTimeCalculation((10800 + chartConvert) + 100 * chartConvert - chartConvert) ))

                        self._api.Command(action="RESET")
                        try:
                            current = pd.DataFrame(data['data'])
                            current = current.set_index([0])
                            current.index.name = 'date'
                            active = active.lower()
                            # TICK DATA
                            if(chartTF == 'TICK'):
                                current.index = pd.to_datetime(current.index, unit='ms')
                                current.columns = [f'{active}_bid', f'{active}_ask']
                            else:
                                current.index = pd.to_datetime(current.index, unit='s')
                                if self.real_volume:
                                    del current[5]
                                else:
                                    del current[6]
                    
                                current.columns = [f'{active}_open', f'{active}_high',
                                                f'{active}_low', f'{active}_close', f'{active}_volume', f'{active}_spread']

                            main = pd.merge(main, current, how='inner',
                                            left_index=True, right_index=True)
                        except KeyError:
                            pass
                
                main = main.loc[~main.index.duplicated(keep='first')]
                appended_data.append(main)
             
                start_date += delta
            pbar.close()
            df = pd.concat(appended_data)
            start(self._save_to_db,data=[df],repeat=1, max_threads=20)



    def _save_to_db(self,df):
        if self.dbtype == 'SQLITE':
            q = DictSQLite('history',multithreading=True)
            try:
                if self.localtime:
                    self._setlocaltime_dataframe(df)
                    
            except AttributeError:
                pass
        
        q[f"{self._active_name}"] = df
        else:
 def test_create_database_fails(self):
     """Test create database fail for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with _mocked_session(cli, 'post', 401):
         cli.create_database('new_db')
 def test_create_database_fails(self):
     """Test create database fail for TestInfluxDBClient object."""
     cli = DataFrameClient(database='db')
     with _mocked_session(cli, 'post', 401):
         cli.create_database('new_db')
username = '******'
password = '******'
database = 'demo'

#Instantinate client and establish connection to Database 
client = DataFrameClient(host = host,
                         port = port,
                         username = username,
                         password = password, 
                         database = database)

#Load Data you want into pandas dataframe
data = pd.read_csv("name_of_file.csv")

#Create new database. Replace 'demo' with any name you want for the database
client.create_database('demo')

#Delete database. Replace 'demo' with any name you want for the database
client.drop_database('demo')


#Write data to database
#First make sure the data is correctly transformed
#1st make sure the timestamp column is in datetime format
#2nd make sure the timestamp column is set as index
#Examples:

#Transform timestamp column into datetime format
data['timestamp'] = pd.to_datetime(data['timestamp'])

#Set timestamp as index
Ejemplo n.º 32
0
def main():
    """
	MQTT Client connector in charge of receiving the 10 Hz csv files,
	perform calculations and store them in the database
	"""
    #influxdb information for connection -- right now is local
    db_host = 'influxdb'  #'localhost'
    db_port = 8086
    db_username = '******'
    db_password = '******'
    database = 'testing'

    #info of the MQTT broker
    host = "10.128.189.236"  #'iot.eclipse.org'
    port = 1883
    keepalive = 30
    client_id = None  #client_id is randomly generated

    #Add Location Topics to this array in order to allow for multiple publishers
    topic_locations = ["usa/quincy/1", "usa/quincy/2"]

    commsTopic = "communication/influxdbUpdate"

    def on_connect(client, userdata, flags, rc):
        if rc == 0:
            # Callback for when the client receives a CONNACK response from the server.
            print("Connected with result code {}".format(rc))
            # Subscribes to topic with QoS 2

            #Subscribing to all publishers
            for topic in topic_locations:
                client.subscribe(topic, 2)
        else:
            print("Error in connection")

    def on_message(client, userdata, msg):
        # The callback for when a PUBLISH message is received from the server.
        #Detects an arriving message (CSV) and writes it in the db
        payload = msg.payload
        #print(payload)
        try:
            dataEnteredArray = write_to_db(payload, db_client)
            #Adding the location of the publisher to the information that will be sent to calculator
            locationAndDataArray = [msg.topic, dataEnteredArray]
            #Publishing index information on new data added to Influx to Calculator microservice
            client.publish(commsTopic, json.dumps(locationAndDataArray))
        except:  #This needs to be changed
            print("Error")

    def on_publish(client, userdata, result):
        # Function for clients's specific callback when pubslishing message
        print("Comms Data Sent")
        pass

    # connects to database and creates new database
    db_client = DataFrameClient(host=db_host,
                                port=db_port,
                                username=db_username,
                                password=db_password,
                                database=database)
    # waits for influxdb service to be active
    wait_for_influxdb(db_client=db_client)
    db_client.create_database('testing')

    #Establish conection with broker and start receiving messages
    # Params -> Client(client_id=””, clean_session=True, userdata=None, protocol=MQTTv311, transport=”tcp”)
    # We set clean_session to False, so in case connection is lost, it'll reconnect with same ID
    # For debug purposes (client_id is not defined) we'll set it to True
    client = mqtt.Client(client_id=client_id, clean_session=True)
    client.on_connect = on_connect
    client.on_message = on_message
    client.on_publish = on_publish
    client.connect(host, port, keepalive)

    # Blocking call that processes network traffic, dispatches callbacks and handles reconnecting.
    client.loop_forever()
import numpy as np
import pandas

from influxdb import InfluxDBClient, DataFrameClient
from influxdb.exceptions import InfluxDBClientError

client = DataFrameClient('localhost', 8086, 'root', 'root', 'hrs_experiment')

try:
    print("Drop database")
    client.drop_database('hrs_experiment')
except InfluxDBClientError:
    pass # Didn't already exist for dropping.

print("Create database")
client.create_database('hrs_experiment')

write_times = []
for i in range(1, len(sys.argv)):
    print("Processing file: ", sys.argv[i], '...')
    station_id = 'Q' + os.path.basename(sys.argv[i]).split('_')[0]
    print("Using station ID: ", station_id, '...')
    df = pandas.read_csv(sys.argv[i], parse_dates=True, index_col=0, header = None)
    print("Creating JSON data...")
    print("Writing data...")
    start = time.time()
    for k, g in df.groupby(np.arange(len(df))//100):
        client.write_points(g, station_id)
    write_times.append(time.time() - start)
    print("Data written in {0} seconds".format(write_times[-1]))
    print("Sleeping for 30 seconds...")
Ejemplo n.º 34
0
    client = DataFrameClient(host=args.host,
                             port=args.port,
                             username=args.user,
                             password=args.password,
                             database=args.database,
                             pool_size=1)

    logging.getLogger(__name__).info("Updating database with arguments: " +
                                     str(args))

    if args.drop:
        client.drop_database(args.database)

    if args.database not in [d['name'] for d in client.get_list_database()]:
        client.create_database(args.database)
        client.query(
            "ALTER RETENTION POLICY autogen ON cache DURATION INF REPLICATION 1 SHARD DURATION 2600w DEFAULT"
        )

    client.switch_database(args.database)

    with IQFeedHistoryProvider(num_connections=args.iqfeed_conn) as history:
        all_symbols = {
            (s, args.interval_len, args.interval_type)
            for s in set(
                iqutil.get_symbols(symbols_file=args.symbols_file).keys())
        }
        update_to_latest(
            client=client,
            noncache_provider=noncache_provider(history),
def main():

    # Initializes the default logger
    logging.basicConfig(
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        level=logging.INFO)
    logger = logging.getLogger(APPLICATION_NAME)

    # Checks the Python Interpeter version
    if (sys.version_info < (3, 0)):
        logger.error('Python 3 is requested! Leaving the program.')
        sys.exit(-1)

    # Parse arguments
    args = configuration_parser()

    logger.setLevel(args.logging_level)
    logger.info(f'Starting application "{APPLICATION_NAME}"...')
    logger.debug(f'Arguments: {vars(args)}')

    v_latitude, v_longitude = map(float, args.gps_location.split(','))

    v_influxdb_host = args.influxdb_host
    v_influxdb_port = args.influxdb_port

    v_influxdb_database = args.influxdb_database
    v_influxdb_username = args.influxdb_username
    v_influxdb_password = args.influxdb_password

    # Check if "Emon" database exists
    _client = DataFrameClient(host=v_influxdb_host,
                              port=v_influxdb_port,
                              username=v_influxdb_username,
                              password=v_influxdb_password,
                              database=v_influxdb_database)

    _dbs = _client.get_list_database()
    logger.debug(f'List of InfluxDB databases: {_dbs}')
    if v_influxdb_database not in [_d['name'] for _d in _dbs]:
        logger.info(
            f'InfluxDB database "{v_influxdb_database}" not found. Creating a new one.'
        )
        _client.create_database(v_influxdb_database)

    _client.close()

    # Pack all parameters in a dictionary
    _userdata = {
        'LOGGER': logger,
        'LATITUDE': v_latitude,
        'LONGITUDE': v_longitude,
        'INFLUXDB_HOST': v_influxdb_host,
        'INFLUXDB_PORT': v_influxdb_port,
        'INFLUXDB_USER': v_influxdb_username,
        'INFLUXDB_PASS': v_influxdb_password,
        'INFLUXDB_DB': v_influxdb_database,
        'MEASUREMENT_TS': args.measurement_ts,
        'PROCESSED_TS': args.processed_ts,
        'FORECAST_TS': args.forecast_ts,
        'WEATHER_TS': args.weather_ts,
        'HORIZON_LENGTH': args.horizon_length,
        'WEATHER_SERVER_URL': args.weather_server_url,
        'WEATHER_FORECAST_INTERVAL': args.weather_forecast_interval,
        'WEATHER_START_TIMESTAMP': args.weather_start_timestamp
    }

    # Instantiate the scheduler and repeatedly run the "forecasting task"
    # "forecast interval" seconds after its previous execution
    _main_scheduler = continuous_scheduler.MainScheduler()
    _main_scheduler.add_task(forecasting_task, 0, args.forecast_interval, 0,
                             _userdata)
    _main_scheduler.start()
Ejemplo n.º 36
0
class tsdb(object):
    def __init__(self,
                 dbname,
                 host='localhost',
                 port=8086,
                 user='******',
                 password='******'):
        self.host = host
        self.port = port
        self.user = user
        self.password = password
        self.dbname = dbname
        self.client = None
        self.protocol = 'json'

    def _connect(self):
        if self.client is None:
            self.client = DataFrameClient(host=self.host,
                                          port=self.port,
                                          username=self.user,
                                          password=self.password,
                                          database=self.dbname)
            #self.client.switch_database(self.dbname)

    def _disconnect(self):
        if self.cleint is not None:
            self.client.close()
            self.client = None

    def _reconnet(self):
        self._disconnect()
        self._connect()

    def create_db(self):
        self._connect()
        dbs = self.client.get_list_database()
        for e in dbs:
            if self.dbname in e.values():
                logger.debug("Database {} is already exist.".format(
                    self.dbname))
                return

        logger.info("Creating database:{}".format(self.dbname))
        self.client.create_database(self.dbname)
        #self._set_retantion_policy()

    def _set_retantion_policy(self):
        self._connect()
        self.client.create_retention_policy(name='raw',
                                            duration='12h',
                                            replication=1,
                                            default=True)
        self.client.create_retention_policy(name='cooked',
                                            duration='52w',
                                            replication=1,
                                            default=False)

    def check_db(self):
        self._connect()
        db = self.client.get_list_database()
        ms = self.client.get_list_measurements()
        rp = self.client.get_list_retention_policies(self.dbname)
        user = self.client.get_list_users()

        print('db: {}, measurements: {}'.format(db, ms))
        print('retention policy: {}'.format(rp))
        print('users: {}'.format(user))

    def insert(self, df, measurement, tags=None):
        self._connect()
        try:
            result = self.client.write_points(df,
                                              measurement,
                                              tags=tags,
                                              time_precision='n',
                                              protocol=self.protocol)
        except:
            logger.info('influxdb write error')
            result = False
        return result

    def query(self, sql):
        self._connect()
        result = self.client.query(sql)
        return result
Ejemplo n.º 37
0
class DbClient(metaclass=Singleton):
    def __init__(self, database=None, host=None, port=None):
        if database is None:
            self.database = configParser['database']['name']
        else:
            self.database = database

        if host is None:
            self.host = configParser['database']['host']
        else:
            self.host = host

        if port is None:
            self.port = configParser['database']['port']
        else:
            self.port = port

        self._instance = DataFrameClient(host=self.host,
                                         port=self.port,
                                         database=self.database)

    def save_to_db(self, df, measurement, tags=None):
        """ Saving dataframe to influx db """
        if tags is None:
            print("Write DataFrame")
            self._instance.write_points(df,
                                        database=self.database,
                                        measurement=measurement,
                                        protocol='json')
        else:
            print("Write DataFrame with Tags")
            self._instance.write_points(df,
                                        database=self.database,
                                        measurement=measurement,
                                        tags=tags,
                                        protocol='json')

    def fetch_from_db(self, query):
        """ Fetching data from influx db """

        print("Read from influx db")
        return self._instance.query(query)

    def create_db(self):
        """ Creating the influx db database """

        print("Create influx db")
        self._instance.create_database('crypto_analyzer')

    def drop_db(self):
        """ Dropping the influx db database """

        print("Influx database with all measurements")
        self._instance.drop_database(self.database)

    def df_int_to_float(self, df):
        """ Converting the int data type columns to float """

        for i in df.select_dtypes('int64').columns.values:
            df[i] = df[i].astype(float)
        return df

    def is_existing(self):
        """ Checks if database already exists """
        result = self._instance.get_list_database()
        return result is not None or len(result) > 0