Beispiel #1
0
def runProg(args):
    """run program"""

    pd.set_option('display.width', 300)

    # log to a file
    util.logToFile(f'printLastDateTimeForAllMarketDataTables.log')

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')

    # create database class
    mydb = database.tradingDB(DBType=DBType,
                              DBFileName=DBFileName,
                              sqlalchemyLoggingLevel=logging.ERROR)
    # mydb = database.tradingDB(DBType='mysql', DBFileName=DBFileName)

    # load existing database
    mydb.instantiateExistingTablesAndClasses()

    # set log level
    tradingLogger = logging.getLogger('trading')
    tradingLogger.setLevel(logging.ERROR)

    df = getInfoAboutTables(mydb=mydb)
    printState(df=df)

    pass
def runProg(args):
    """run program"""

    pd.set_option('display.width', 300)

    # log to a file
    utils.logToFile(f'createTraingData.log')

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')

    # create database class
    mydb = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # mydb = database.tradingDB(DBType='mysql', DBFileName=DBFileName)

    # load existing database
    mydb.instantiateExistingTablesAndClasses()
    # set log level
    mydb._loggerSQLAlchemy.setLevel(logging.ERROR)
    ssn = mydb.Session()

    if 1:
        if (mydb.DBEngine.name == 'sqlite'):
            print('starting to perform vacuum ...')
            stmt = sqlalchemy.text('vacuum')
            mydb.DBEngine.execute(stmt)
            print('... done')

    ssn.commit()
    ssn.close()
Beispiel #3
0
def runProg(args):
    """run program"""

    util.patchAsyncio()

    # log to a file
    utils.logToFile(f'ttestTradingHours.log', level=logging.INFO)
    # utils.logToConsole()

    apschedulerLogger = logging.getLogger('apscheduler')
    apschedulerLogger.setLevel(logging.INFO)
    tradingLogger = logging.getLogger('trading')
    tradingLogger.setLevel(logging.INFO)

    pd.set_option('display.width', 200)

    # flags
    useWatchdog = False
    useScheduler = True

    # local timezone
    tzlocal = dateutil.tz.tzlocal()

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    host = config.get('InteractiveBrokers', 'host')
    port = config.getint('InteractiveBrokers', 'port')
    clientId = config.getint('InteractiveBrokers', 'clientId')
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')

    # for production mode: watchdog
    if useWatchdog:
        # start watchdog
        # ibc = IBC(963, gateway=True, tradingMode='paper',ibcIni='/home/bn/IBController/configPaper.ini')
        ibcIni = config.get('InteractiveBrokers', 'ibcIni')
        tradingMode = config.get('InteractiveBrokers', 'tradingMode')
        ibc = IBC(970, gateway=True, tradingMode=tradingMode, ibcIni=ibcIni)
        ib = IB()
        watchdogApp = Watchdog(ibc,
                               ib=ib,
                               appStartupTime=15,
                               host=host,
                               port=port,
                               clientId=clientId)
        watchdogApp.start()
    else:
        # faster way for now
        ib = IB()
        try:
            ib.connect(host=host, port=port, clientId=clientId)
        except:
            import random
            clientId = clientId + random.randint(1, 100000)
            ib.connect(host=host, port=port, clientId=clientId)
            pass

        class myWatchdog(object):
            def __init__(self):
                self.ib = ib
                pass

            pass

        watchdogApp = myWatchdog()
        pass
    pass

    # create database class
    mydb = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # load existing database
    mydb.instantiateExistingTablesAndClasses(ib=ib)
    # set log level of sqlalchemy
    mydb._loggerSQLAlchemy.setLevel(logging.WARNING)

    # set the list of qualified contracts
    # get a list of qualified contracts that correspond to each row in mydb.MarketDataInfoTableDataFrame
    __qcs__ = list(mydb.MarketDataInfoTableDataFrame.qualifiedContract.values)
    # qcs = __qcs__[0:2]
    # qcs = operator.itemgetter(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12)(__qcs__)
    # qcs = operator.itemgetter(0, 13, 1, 11, 7)(__qcs__)
    # qcs = operator.itemgetter(0, 12, 13, 2, 10, 3)(__qcs__)
    # qcs = operator.itemgetter(0, 1, 10)(__qcs__)
    qcs = __qcs__
    if isinstance(qcs, Contract):
        qcs = [qcs]
        pass
    if isinstance(qcs, tuple):
        qcs = list(qcs)
        pass

    if None in qcs:
        print('problem with connecting to IB. Now exiting')
        sys.exit()

    # define the container class
    cc = containerClass.ContainerClass()

    # add config
    cc.config = config

    # set watchdogapp
    cc.watchdogApp = watchdogApp

    # set database
    cc.mydb = mydb
    cc.qcs = qcs

    # register callbacks with ib
    cc.registerCallbacks(useWatchdog=useWatchdog)

    # define a scheduler
    useScheduler = True
    if useScheduler:
        scheduler = AsyncIOScheduler()
        cc.scheduler = scheduler
        cc.scheduler.start()
        pass

    for qc in qcs:
        print(qc)
        cds = watchdogApp.ib.reqContractDetails(qc)
        cd = cds[0]
        tHP = marketDataIB.TradingHourParser(cd)
        print(tHP.timeZoneId)
        for line in tHP.tradingHours.split(';'):
            print(line)
            break
            pass
        hoursParsed = tHP.parseToDF()
        print(hoursParsed.head(6))
        # for index, row in hoursParsed.iterrows():
        #     print(row)
        #     break
        #     pass

    # # general setting for the density of data for historical requests
    # configIB = config['InteractiveBrokers']
    # barSizePandasTimeDelta = pd.Timedelta(**eval(configIB.get('densityTimeDelta', '{"minutes":1}')))
    #
    #
    # ##############################################################
    # # request recent historical bars
    # ##############################################################
    # # settings to request the bars (for the qcs that are a member of cc)
    # # the 'short' settings are the default ones to be applied during trading hours
    #
    # durationPandasTimeDelta = pd.Timedelta(**eval(configIB.get('durationTimeDeltaRecentHistoricalDataShort', '{"hours":1}')))
    # timeOutTime = configIB.getint('timeOutTimeShortRequests', 10)
    #
    # recentHistoricalDataSettingsShort = {
    #     'durationPandasTimeDelta': durationPandasTimeDelta,
    #     'barSizePandasTimeDelta': barSizePandasTimeDelta,
    #     'timeOutTime': timeOutTime,
    #     'maximumBarsLengthFactor': 2,
    # }
    #
    # # settings for recent historical bars to be requested during off-trading hours.
    # # due to performance reasons, during the trading hours we want to request
    # # very short bars; during off-trading hours, we can request longer bars
    # # which fill up possible gaps left by the shorter setting.
    #
    # durationPandasTimeDelta = pd.Timedelta(**eval(configIB.get('durationTimeDeltaRecentHistoricalDataLong', '{"days":1}')))
    # timeOutTime = configIB.getint('timeOutTimeMediumRequests', 60)
    #
    # recentHistoricalDataSettingsLong = {
    #     'durationPandasTimeDelta': durationPandasTimeDelta,
    #     'barSizePandasTimeDelta': barSizePandasTimeDelta,
    #     'timeOutTime': timeOutTime,
    #     'maximumBarsLengthFactor': 2,
    # }
    #
    #
    # # set the current settings in the containerClass
    # a = (f'Now updating the settings for the request of recent historical bars')
    # logging.info(a)
    # print(a)
    # # set the settings
    # cc.recentHistoricalDataSettings = recentHistoricalDataSettingsShort
    #
    # # request the bars
    # a = (f'Now requesting initial recent historical bars')
    # logging.info(a)
    # print(a)
    # orderedDictOfBars = cc.requestRecentHistoricalOrderedDictOfBars()
    # cc.orderedDictOfBars = orderedDictOfBars
    #
    #
    # for (tableName, bars) in cc.orderedDictOfBars.items():
    #     nBars = None
    #     if isinstance(bars,objects.BarDataList):
    #         nBars = len(bars)
    #     print(tableName,type(bars),nBars)
    # ##############################################################
    #
    #
    # ##############################################################
    # # request historical bars
    # ##############################################################
    #
    # # add the job requesting historical data to the scheduler
    # # this setting starts at the earliestDateTime given by IB
    #
    # earliestPandasTimeDelta = pd.Timedelta(**eval(configIB.get('earliestTimeDeltaHistoricalData', '{"weeks":4}')))
    # durationPandasTimeDelta = pd.Timedelta(**eval(configIB.get('durationTimeDeltaHistoricalData', '{"days":1}')))
    # timeOutTime = configIB.getint('timeOutTimeLongRequests', 1800)
    # # timeOutTime = configIB.getint('timeOutTimeMediumRequests', 60)
    #
    # if earliestPandasTimeDelta.total_seconds() < 0:
    #     earliestDateTimeUTCNaive = None
    # else:
    #     earliestDateTimeUTCNaive = pd.to_datetime(pd.datetime.utcnow()).floor('1 min') - earliestPandasTimeDelta
    #     pass
    #
    # historicalDataGetterSettings={
    #     'ib': cc.watchdogApp.ib,
    #     'mydb': cc.mydb,
    #     'qcs': cc.qcs,
    #     'durationPandasTimeDelta': durationPandasTimeDelta,
    #     'barSizePandasTimeDelta': barSizePandasTimeDelta,
    #     'earliestDateTime': earliestDateTimeUTCNaive,
    #     'timeOutTime': timeOutTime,
    #     'jitterSpanFraction': 0.02,
    # }
    #
    # jobSettings = {
    #     'job': marketDataIB.asyncioJobGetHistoricalData,
    #     'args': [],
    #     'kwargs': historicalDataGetterSettings,
    #     'jobRootName': None,
    #     'minute': '*',
    #     'second': '0',
    #     'coalesce': True,
    #     'misfire_grace_time': 30,
    #     'trigger': 'cron',
    #     'max_instances': 1,
    # }
    #
    # if useScheduler:
    #     cc.addJobToScheduler(jobSettings=jobSettings)
    #     pass
    # ##############################################################
    #
    #
    # ##############################################################
    # # change the request of recent historical bars to a longer setting during off-trading hours
    # ##############################################################
    # # add a scheduled job that switches from the short to the long settings
    # jobSettings = {
    #     'job': cc.schedulerJobSwitchRequestForRecentHistoricalDataFromOneSettingToOther,
    #     'args': [],
    #     'kwargs': recentHistoricalDataSettingsLong,
    #     'jobRootName': 'schedulerJobSwitchRequestForRecentHistoricalDataFromShortToLong',
    #     'hour': '22',
    #     # 'hour': '*',
    #     'minute': '07',
    #     # 'minute': '*/2',
    #     'second': '00',
    #     # 'second': '5-59/10',
    #     'coalesce': True,
    #     'misfire_grace_time': 30,
    #     'trigger': 'cron',
    #     'max_instances': 1,
    # }
    #
    # if useScheduler:
    #     cc.addJobToScheduler(jobSettings=jobSettings)
    #
    # # add a scheduled job that switches from the long to the short settings
    # jobSettings = {
    #     'job': cc.schedulerJobSwitchRequestForRecentHistoricalDataFromOneSettingToOther,
    #     'args': [],
    #     'kwargs': recentHistoricalDataSettingsShort,
    #     'jobRootName': 'schedulerJobSwitchRequestForRecentHistoricalDataFromLongToShort',
    #     'hour': '04',
    #     # 'hour': '*',
    #     'minute': '13',
    #     # 'minute': '1-59/2',
    #     'second': '00',
    #     # 'second': '*/10',
    #     'coalesce': True,
    #     'misfire_grace_time': 30,
    #     'trigger': 'cron',
    #     'max_instances': 1,
    # }
    #
    # if useScheduler:
    #     cc.addJobToScheduler(jobSettings=jobSettings)
    #
    # ##############################################################

    if 1:
        if useScheduler:
            print('Press Ctrl+{0} to exit'.format('Break' if os.name ==
                                                  'nt' else 'C'))
            # Execution will block here until Ctrl+C (Ctrl+Break on Windows) is pressed.
            try:
                asyncio.get_event_loop().run_forever()
            except (KeyboardInterrupt, SystemExit):
                pass
            pass
        else:
            util.allowCtrlC()
            ib.run()
            pass
        pass

    ib.disconnect()
Beispiel #4
0
def runProg(args):
    """run program"""

    ###
    # conclusion: for fast processes, it is beneficial to use the serial approach
    ##

    pd.set_option('display.width', 300)

    # log to a file
    util.logToFile(f'createTraingData.log')

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')
    configIB = config['InteractiveBrokers']
    barSizePandasTimeDelta = pd.Timedelta(
        **eval(configIB.get('densityTimeDelta', '{"minutes":1}')))

    # create database class
    mydb = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # mydb = database.tradingDB(DBType='mysql', DBFileName=DBFileName)

    # load existing database
    mydb.instantiateExistingTablesAndClasses()
    # set log level
    mydb._loggerSQLAlchemy.setLevel(logging.ERROR)

    nowUTCFloor = pd.to_datetime(pd.datetime.utcnow()).floor(
        barSizePandasTimeDelta) - barSizePandasTimeDelta
    startDate = nowUTCFloor
    # startDate = pd.to_datetime('2018-05-17 15:00:00').tz_localize('Europe/Berlin').tz_convert('UTC').tz_localize(None).round('1 min')
    print(startDate)
    dateRange = createDateRange(startDate)

    tableNames = mydb.MarketDataInfoTableDataFrame.tableName

    if 1:
        # multiprocessing Shared List using wrapper
        tt1 = time.time()

        def simpleFunc(tableName: str = None) -> None:
            return getCompleteDFFromDateRangeAndTableName(tableName=tableName,
                                                          mydb=mydb,
                                                          dateRange=dateRange)

        resultsDict = parallelizeDataBaseRequest(tableNames=tableNames,
                                                 simpleFunc=simpleFunc)

        df = resultsDict['MarketData_CASH_EUR_JPY_IDEALPRO']
        # df = resultsDict['MarketData_CFD_IBUS500_USD_SMART']
        print(df)

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(
            f'retrieved data multi processing Shared Variable using wrapper: {ttdiff}'
        )
        print()

        pass

    if 1:
        # serial processing, long function
        tt1 = time.time()

        def simpleFunc(tableName: str = None) -> None:
            return getCompleteDFFromDateRangeAndTableName(tableName=tableName,
                                                          mydb=mydb,
                                                          dateRange=dateRange)

        resultsDict = {}
        for tableName in tableNames:
            resultsDict[tableName] = simpleFunc(tableName)
            pass

        df = resultsDict['MarketData_CASH_EUR_JPY_IDEALPRO']
        # df = resultsDict['MarketData_CFD_IBUS500_USD_SMART']
        # print(df)

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(f'retrieved data serial long function: {ttdiff}')
        print()

        pass

    if 1:
        # multiprocessing Shared List using wrapper, fast function
        tt1 = time.time()

        def simpleFunc(tableName: str = None) -> None:
            tableORM = mydb.getTableORMByTablename(tableName)
            ssn = mydb.Session()
            myDT = ssn.query(sqlalchemy.func.min(tableORM.datetime)).scalar()
            ssn.close()
            return myDT

        resultsDict = parallelizeDataBaseRequest(tableNames=tableNames,
                                                 simpleFunc=simpleFunc)

        print(resultsDict)

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(
            f'retrieved data multi processing Shared Variable using wrapper fast function: {ttdiff}'
        )
        print()

        pass

    if 1:
        # serial processing, fast function
        tt1 = time.time()

        def simpleFunc(tableName: str = None) -> None:
            tableORM = mydb.getTableORMByTablename(tableName)
            ssn = mydb.Session()
            myDT = ssn.query(sqlalchemy.func.min(tableORM.datetime)).scalar()
            ssn.close()
            return myDT

        resultsDict = {}
        for tableName in tableNames:
            resultsDict[tableName] = simpleFunc(tableName)
            pass

        print(resultsDict)

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(f'retrieved data serial fast function: {ttdiff}')
        print()

        pass

    if 0:
        # multiprocessing Shared List
        tt1 = time.time()

        # Setup a list of processes that we want to run
        manager = mp.Manager()
        results = manager.dict()
        # print(type(results))

        processes = []
        argss = [(tableName, mydb, dateRange, results)
                 for tableName in tableNames]

        for args in argss:
            tableName = args[0]
            process = mp.Process(target=funcMultiProcessingSharedList,
                                 args=args)

            processes.append(process)
            pass

        # Run processes
        for p in processes:
            p.start()
            pass

        # Exit the completed processes
        for p in processes:
            p.join()
            pass

        resultsDict = dict(results)
        # print(resultsDict)

        df = resultsDict['MarketData_CASH_EUR_JPY_IDEALPRO']
        # df = resultsDict['MarketData_CFD_IBUS500_USD_SMART']

        print(df)

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(f'retrieved data multi processing Shared Variable: {ttdiff}')
        print()

        pass
    pass
def runProg(args):
    """run program"""

    pd.set_option('display.width', 300)

    # log to a file
    util.logToFile(f'createTraingData.log')

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    host = config.get('InteractiveBrokers', 'host')
    port = config.getint('InteractiveBrokers', 'port')
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')
    clientId = config.getint('InteractiveBrokers', 'clientId')

    # override configFile if clientId is given on the command line
    if args.clientId is not None:
        clientId = args.clientId

    # create database class
    mydb = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # mydb = database.tradingDB(DBType='mysql', DBFileName=DBFileName)

    # load existing database
    mydb.instantiateExistingTablesAndClasses()
    # set log level
    mydb._loggerSQLAlchemy.setLevel(logging.ERROR)

    tbls = mydb.MarketDataInfoTableDataFrame['tableORM']

    if 0:
        # loop over entire table and repair
        df = None
        for idx in mydb.MarketDataInfoTableDataFrame.index:
            tableORM = mydb.MarketDataInfoTableDataFrame.at[idx, 'tableORM']
            tableName = tableORM.__tablename__

            ssn = mydb.Session()
            print(tableName, ssn.query(tableORM).count())
            ssn.close()

            # if tableName not in ['MarketData_IND_N225_JPY_OSE.JPN', 'MarketData_IND_INDU_USD_CME']:
            # if tableName not in ['MarketData_IND_N225_JPY_OSE.JPN']:
            if False:
                continue
            dfLoop = mydb.correctDiffDateTimesForMarketDataTable(
                tableName=tableName,
                startDateTime=None,
                endDateTime=None,
                doCorrection=True)
            print(dfLoop)
            if df is None:
                df = dfLoop.copy()
            else:
                if dfLoop is not None:
                    df = df.append(dfLoop)
                    pass
                pass
            pass
        df.sort_index(inplace=True)
        print(df)

    if 1:
        # loop over entire table and check for specific values in any of the columns [close, low, high, open]
        df = None
        valuesToFind = [None, 0]
        for idx in mydb.MarketDataInfoTableDataFrame.index:
            tableORM = mydb.MarketDataInfoTableDataFrame.at[idx, 'tableORM']
            tableName = tableORM.__tablename__
            if tableName not in ['MarketData_CFD_IBDE30_EUR_SMART']:
                # continue
                pass
            ssn = mydb.Session()
            print(
                f'tableName: {tableName}; nRows: {ssn.query(tableORM).count()}; ',
                end='',
                flush=True)
            ssn.close()
            dfLoop = mydb.findEntriesInMarketDataTable(
                tableName=tableName,
                valuesToFind=valuesToFind,
                startDateTime=None,
                endDateTime=None,
                doCorrection=False)
            # print(dfLoop)
            nRowsLoop = 0
            if dfLoop is not None:
                nRowsLoop = len(dfLoop)
                pass
            print(f'nRows with {valuesToFind}: {nRowsLoop}',
                  end='\n',
                  flush=True)
            if df is None:
                df = dfLoop.copy()
            else:
                if dfLoop is not None:
                    df = df.append(dfLoop)
                    pass
                pass
            pass
        df.sort_index(inplace=True)
        df = df.sort_values(by=['tableName', 'datetime'])
        print(df)
def runProg(args):
    """run program"""

    pd.set_option('display.width', 200)

    # log to a file
    util.logToFile(f'createTraingData.log')

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(), defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    host = config.get('InteractiveBrokers', 'host')
    port = config.getint('InteractiveBrokers', 'port')
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')
    clientId = config.get('InteractiveBrokers', 'clientId')

    # override configFile if clientId is given on the command line
    if args.clientId is not None:
        clientId = args.clientId

    if 1:
        # faster way for now
        ib = IB()
        ib.connect(host=host, port=port, clientId=clientId)
        pass

    pass

    # create database class
    mydbSQLite = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # load existing database
    mydbSQLite.instantiateExistingTablesAndClasses(ib=ib)
    # set log level
    mydbSQLite._loggerSQLAlchemy.setLevel(logging.ERROR)

    # create database class
    mydbMySQL = database.tradingDB(DBType='mysql', DBFileName=DBFileName)
    # load existing database
    mydbMySQL.instantiateExistingTablesAndClasses(ib=ib)
    # set log level
    mydbMySQL._loggerSQLAlchemy.setLevel(logging.ERROR)


    tblsSQLiteORM = mydbSQLite.MarketDataInfoTableDataFrame['tableORM']
    tblsMySQLORM = mydbMySQL.MarketDataInfoTableDataFrame['tableORM']

    nTables = len(tblsMySQLORM)

    ssnSQLite = mydbSQLite.Session()
    ssnMySQL = mydbMySQL.Session()

    for i in range(0,nTables):
        tt1 = time.time()

        tblSQLiteSchema = tblsSQLiteORM.iloc[i].__table__
        tblMySQLSchema = tblsMySQLORM.iloc[i].__table__


        print(tblSQLiteSchema.name)
        print(tblMySQLSchema)

        qs = ssnSQLite.query(tblSQLiteSchema)
        df_read = pd.read_sql(qs.statement, qs.session.bind)

        mydbMySQL.upsertDataFrame(df_read,tblMySQLSchema)
        ssnMySQL.commit()

        tt2 = time.time()
        ttdiff = tt2 - tt1
        print(f'bla {tblSQLiteSchema.name}: {ttdiff}')
        pass

    ssnSQLite.close()
    ssnMySQL.close()
def runProg(args):
    """run program"""

    util.patchAsyncio()

    # log to a file
    utils.logToFile(f'getRecentHistoricalData.log')
    # utils.logToConsole()

    apschedulerLogger = logging.getLogger('apscheduler')
    apschedulerLogger.setLevel(logging.ERROR)
    tradingLogger = logging.getLogger('trading')
    tradingLogger.setLevel(logging.WARNING)

    pd.set_option('display.width', 200)

    # load the config file
    configFile = args.configFile
    config = ConfigParser(interpolation=ExtendedInterpolation(),
                          defaults=os.environ)
    config.read(configFile)

    # load data from configFile
    host = config.get('InteractiveBrokers', 'host')
    port = config.getint('InteractiveBrokers', 'port')
    DBType = config.get('DataBase', 'DBType')
    DBFileName = config.get('DataBase', 'DBFileName')
    clientId = config.get('InteractiveBrokers', 'clientId')

    # for production mode: watchdog
    if 1:
        # start watchdog
        # ibc = IBC(963, gateway=True, tradingMode='paper',ibcIni='/home/bn/IBController/configPaper.ini')
        ibcIni = config.get('InteractiveBrokers', 'ibcIni')
        tradingMode = config.get('InteractiveBrokers', 'tradingMode')
        ibc = IBC(970, gateway=True, tradingMode=tradingMode, ibcIni=ibcIni)
        myWatchdogapp = myWatchdog.myWatchdog(ibc,
                                              appStartupTime=15,
                                              port=4002)
        myWatchdogapp.start()
        ib = myWatchdogapp.ib
        pass

    if 0:
        # faster way for now
        ib = IB()
        ib.connect(host=host, port=port, clientId=clientId)
        pass

    pass

    # create database class
    mydb = database.tradingDB(DBType=DBType, DBFileName=DBFileName)
    # load existing database
    mydb.instantiateExistingTablesAndClasses(ib=ib)
    # set log level of sqlalchemy
    mydb._loggerSQLAlchemy.setLevel(logging.WARNING)

    qcs = mydb.MarketDataInfoTableDataFrame.qualifiedContract
    for qc in qcs:
        print(qc, type(qc))
        ib.reqMktData(contract=qc,
                      genericTickList='',
                      snapshot=False,
                      regulatorySnapshot=False,
                      mktDataOptions=None)

        pass

    df = pd.DataFrame(
        columns='symbol bidSize bid ask askSize high low close'.split())
    df['symbol'] = [qc.localSymbol for qc in qcs]
    contract2Row = {qc: i for (i, qc) in enumerate(qcs)}
    pprint.pprint(contract2Row)

    def onPendingTickers(tickers):
        for t in tickers:
            iRow = contract2Row[t.contract]
            localSymbol = t.contract.localSymbol
            if localSymbol == "EUR.USD":
                nowUTC = pd.to_datetime(pd.datetime.utcnow()).tz_localize(None)
                nowUTCRounded = nowUTC.floor('1 min')
                dateTime = pd.to_datetime(t.time).tz_localize(None)
                print(localSymbol, nowUTCRounded,
                      ((dateTime - nowUTCRounded) / pd.Timedelta('1 sec')),
                      t.close)

    #         df.iloc[iRow, 1:] = (t.bidSize, t.bid, t.ask, t.askSize, t.high, t.low, t.close)
    #     print(df)

    ib.setCallback('pendingTickers', onPendingTickers)

    # ib.sleep(300)

    if 1:
        util.allowCtrlC()
        # Execution will block here until Ctrl+C (Ctrl+Break on Windows) is pressed.
        try:
            asyncio.get_event_loop().run_forever()
        except (KeyboardInterrupt, SystemExit):
            pass
Beispiel #8
0
from trading import database

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
mydb = database.tradingDB()
mydb.instantiateExistingTablesAndClasses(ib=None)
target_metadata = mydb.DBDeclarativeBase.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation