Пример #1
0
def run_volest_report(today_txt):
    """
    today_txt '%Y%m%d'
    """
    log = logger("Volest report ...")
    globalconf = config.GlobalConfig()
    if today_txt is None:
        today = dt.date.today()
        last_date = today.strftime('%Y%m%d')
    else:
        last_date = today_txt

    vol = volest.VolatilityEstimator(db_type="underl_ib_hist",
                                     symbol="SPY",
                                     expiry=None,
                                     last_date=last_date,
                                     num_days_back=200,
                                     resample="1D",
                                     estimator="GarmanKlass",
                                     clean=True)
    window = 30
    windows = [30, 60, 90, 120]
    quantiles = [0.25, 0.75]
    bins = 100
    normed = True
    # vol.term_sheet_to_png(window, windows, quantiles, bins, normed)
    # vol.term_sheet_to_json(window, windows, quantiles, bins, normed)
    # vol.term_sheet_to_html(window, windows, quantiles, bins, normed)
    #vol.term_sheet_to_db(window, windows, quantiles, bins, normed)

    # p = vol.cones_bokeh(windows=windows, quantiles=quantiles)
    vol.cones_data(windows=windows, quantiles=quantiles)
Пример #2
0
 def test_get_market_db_file(self):
     globalconf = config.GlobalConfig()
     db_type = "optchain_ib"
     expiry = "222222"
     db_file = get_market_db_file(db_type, expiry)
     print(db_file)
     self.assertEqual(db_file, "KKKK")
Пример #3
0
    def test_volestimator(self):
        import core.vol_estimators as volest
        log = logger("Testing ...")
        globalconf = config.GlobalConfig()
        today = dt.date.today()
        vol = volest.VolatilityEstimator(db_type="underl_ib_hist",
                                         symbol="AAPL",
                                         expiry=None,
                                         last_date=today.strftime('%Y%m%d'),
                                         num_days_back=200,
                                         resample="1D",
                                         estimator="GarmanKlass",
                                         clean=True)
        #fig, plt = vol.cones(windows=[30, 60, 90, 120], quantiles=[0.25, 0.75])
        #plt.show()

        window = 30
        windows = [30, 60, 90, 120]
        quantiles = [0.25, 0.75]
        bins = 100
        normed = True
        bench = 'SPY'
        # creates a pdf term sheet with all metrics
        vol.term_sheet_to_pdf(window, windows, quantiles, bins, normed, bench)
        # vol.term_sheet_to_png(window, windows, quantiles, bins, normed, bench)
        # vol.term_sheet_to_html(window, windows, quantiles, bins, normed, bench)
        self.assertEqual(100, 100)
Пример #4
0
def run():
    """
    Used as command to consolidate in the main h5 anciliary h5 generated due to column length exceptions
    """
    globalconf = config.GlobalConfig()
    path = globalconf.config['paths']['data_folder']
    log = logger("fix_h5 optchain")
    os.chdir(path)
    if not os.path.exists(path + "/optchain_ib_backups"):
        os.makedirs(path + "/optchain_ib_backups")

    optchain_orig = 'optchain_ib_hist_db.h5'
    pattern_optchain = 'optchain_ib_hist_db.h5*'
    optchain_out = 'optchain_ib_hist_db_complete.h5'
    lst1 = glob.glob(pattern_optchain)
    lst1.remove(optchain_orig)
    if not lst1:
        log.info("No ancilliary files to append, exiting ... ")
        return
    log.info(("List of ancilliary files that will be appended: ", lst1))
    dataframe = pd.DataFrame()
    for x in lst1:
        store_in1 = pd.HDFStore(path + x)
        root1 = store_in1.root
        log.info(("Root pathname of the input store: ", root1._v_pathname))
        for lvl1 in root1:
            log.info(("Level 1 pathname in the root if the H5: ", lvl1._v_pathname))
            if lvl1:
                df1 = store_in1.select(lvl1._v_pathname)
                dataframe = dataframe.append(df1)
                log.info(("Store_in1", len(df1), x))
        store_in1.close()
        os.rename(path + x, path + "/optchain_ib_backups/" + x)


    store_in1 = pd.HDFStore(path + optchain_orig)
    store_out = pd.HDFStore(path + optchain_out)
    root1 = store_in1.root
    root2 = store_out.root
    log.info(("Root pathname of the input store: ", root1._v_pathname))
    log.info(("Root pathname of the output store: ", root2._v_pathname))
    for lvl1 in root1:
        print (lvl1._v_pathname)
        if lvl1:
            df1 = store_in1.select(lvl1._v_pathname)
            dataframe = dataframe.append(df1)
            log.info(("Store_in1 length and name", len(df1), optchain_orig))
    store_in1.close()
    os.rename(path + optchain_orig, path + "/optchain_ib_backups/" + datetime.now().strftime('%Y%m%d%H%M%S') + optchain_orig)

    dataframe.sort_index(inplace=True,ascending=[True])
    names = dataframe['symbol'].unique().tolist()
    for name in names:
        print ("Storing " + name + " in ABT ..." + str(len(dataframe)))
        joe = dataframe[dataframe.symbol == name]
        joe=joe.sort_values(by=['symbol', 'current_datetime', 'expiry', 'strike', 'right'])
        store_out.append("/" + name, joe, data_columns=True,min_itemsize={'comboLegsDescrip': 25})
    store_out.close()
    os.rename(path + optchain_out, path + optchain_orig)
Пример #5
0
def store_etf_stocks_yahoo_to_db():
    log = logger("yahoo etf stocks")
    globalconf = config.GlobalConfig()
    if dt.datetime.now().date() in utils.get_trading_close_holidays(dt.datetime.now().year):
        log.info("This is a US Calendar holiday. Ending process ... ")
        return

    path = globalconf.config['paths']['data_folder']
    optchain_def = globalconf.get_tickers_optchain_yahoo()
    source1 = globalconf.config['use_case_yahoo_options']['source']
    log.info("Getting  etf stocks data from yahoo w pandas_datareader ... [%s] [%s]"
             % (dt.datetime.now().strftime('%Y-%m-%d %H:%M:%S') , source1  )  )
    wait_secs=10

    for _,row in optchain_def.iterrows():
        log.info("Init yahoo quotes downloader symbol=%s" % (row.symbol) )             

        db_file = get_market_db_file(db_type="underl_yhoo", expiry="NONE")
        store = sqlite3.connect(path + db_file)

        sql = "SELECT count(*) as count1 FROM sqlite_master WHERE type='table' AND name='" + str(row['symbol']) + "';"
        tbl_exist = pd.read_sql_query(sql, store)['count1'][0]
        log.info("Does this symbol has a table in sqlite? " + str(tbl_exist))
        last_record_stored = None
        if tbl_exist:
            sql = "SELECT MAX(DATE) as max1 FROM " + str(row['symbol']) + " ;"
            last_date = pd.read_sql_query( sql , store)['max1'][0]
            log.info("Last date in data store for symbol " + row['symbol'] + " is " + str(last_date) )
            if last_date is not None:
                if dt.datetime.strptime(str(last_date), '%Y-%m-%d %H:%M:%S').date() == dt.datetime.today().date():
                    log.info("All data available is already in the store. ")
                    continue
                last_record_stored = dt.datetime.strptime(str(last_date), '%Y-%m-%d %H:%M:%S') + dt.timedelta(days=1)
                
        try:
            df = web.DataReader(
                 name=row.symbol,
                 data_source=source1,
                 start=last_record_stored,
                 end=None,
                 retry_count=3,
                 pause=0.1
                 )
            df['Quote_Time'] =  dt.datetime.now()   
            df['Quote_Time_txt'] = df['Quote_Time'].dt.strftime("%Y-%m-%d %H:%M:%S")
            df = df.reset_index().set_index("Quote_Time")
            df['Expiry_txt'] = datetime.datetime(9999,12,31,23,59).strftime("%Y-%m-%d %H:%M:%S")
            df['Symbol'] = row.symbol
            count_row = df.shape[0]
            log.info("Number of new rows [%s] " %  (str(count_row)) )
            write_market_data_to_sqllite(df, "underl_yhoo")
            sleep(wait_secs)
            log.info("sleeping [%s] secs ..." % (str(wait_secs)))
        except (RemoteDataError,TypeError) as err:
            log.info("No information for ticker [%s] Error=[%s] sys_info=[%s]" % (str(row.symbol) , str(err) , sys.exc_info()[0] ))
            continue
        except KeyError as e:
            log.warn("KeyError raised [" + str(e) + "]...")
            continue
Пример #6
0
    def test_get_expiries(self):
        from persist.sqlite_methods import get_expiries
        import core.config as config
        globalconf = config.GlobalConfig()

        list = get_expiries(dsId='optchain_ib_exp', symbol="SPY")

        print(max(list))
        self.assertEqual(max(list), '2017-09')
Пример #7
0
def migrate_h5_to_sqllite_optchain(hdf5_pattern, h5_db_alias, drop_expiry, filter_symbol):
    """

    """
    globalconf = config.GlobalConfig()
    log = logger("migrate_h5_to_sqllite_optchain")
    path = globalconf.config['paths']['data_folder']
    lst1 = glob.glob(path + hdf5_pattern)
    if not lst1:
        log.info("No h5 files to append ... ")
    else:
        log.info(("List of h5 files that will be appended: ", lst1))
        time.sleep(1)
        try:
            input("Press Enter to continue...")
        except SyntaxError:
            pass

        for hdf5_path in lst1:
            store_file = pd.HDFStore(hdf5_path)
            root1 = store_file.root
            # todos los nodos hijos de root que son los underlying symbols
            list = [x._v_pathname for x in root1]
            log.info(("Processing file: ", hdf5_path))
            # only migrate the symbol indicated if available
            if filter_symbol != "ALL":
                list = [filter_symbol]
            store_file.close()
            log.info(("List of symbols: " + str(list)))
            for symbol in list:
                store_file = pd.HDFStore(hdf5_path)
                node1 = store_file.get_node(symbol)
                if node1:
                    log.info(("Symbol: " + symbol))
                    # Unfortunately th JSON field doesnt contain more info that the already present fields
                    # df1['json_dict'] = df1['JSON'].apply(CustomParser)
                    # following line converts dict column into n columns for the dataframe:
                    # https://stackoverflow.com/questions/20680272/reading-a-csv-into-pandas-where-one-column-is-a-json-string
                    # df1 = pd.concat([df1.drop(['json_dict','JSON'], axis=1), df1['json_dict'].apply(pd.Series)], axis=1)
                    # df1 = df1.drop(['JSON'], axis=1)


                    # this is a specifc case for underlying hisotry
                    if symbol == "/ES" and h5_db_alias == "underl_ib_hist":
                        for lvl1 in node1:
                            log.info(("Level 1 pathname in the root if the H5: ", lvl1._v_pathname))
                            if lvl1:
                                df1 = store_file.select(lvl1._v_pathname)
                                df1['expiry'] = lvl1._v_pathname
                                mkt.write_market_data_to_sqllite(df1, h5_db_alias)
                    else:
                        df1 = store_file.select(node1._v_pathname)
                    # Expiry is already in the index
                    if drop_expiry == True:
                        df1 = df1.drop(['Expiry'], axis=1)
                    mkt.write_market_data_to_sqllite(df1, h5_db_alias)
                store_file.close()
Пример #8
0
def consolidate_anciliary_h5_account():
    """
    Used as command to consolidate in the main h5 anciliary h5 generated due to column length exceptions
    """
    globalconf = config.GlobalConfig(level=logger.DEBUG)
    log = globalconf.log
    path = globalconf.config['paths']['data_folder']
    os.chdir(path)
    if not os.path.exists(path + "/account_backups"):
        os.makedirs(path + "/account_backups")

    acc_orig = 'account_db_new.h5'
    pattern_acc = 'account_db_new.h5*'
    acc_out = 'account_db_complete.h5'

    lst1 = glob.glob(pattern_acc)
    lst1.remove(acc_orig)
    if not lst1:
        log.info("No ancilliary files to append, exiting ... ")
        return

    log.info(("List of ancilliary files that will be appended: ", lst1))
    dataframe = pd.DataFrame()
    for x in lst1:
        store_in1 = pd.HDFStore(path + x)
        root1 = store_in1.root
        log.info(("Root pathname of the input store: ", root1._v_pathname))
        for lvl1 in root1:
            log.info(("Level 1 pathname in the root if the H5: ", lvl1._v_pathname))
            if lvl1:
                df1 = store_in1.select(lvl1._v_pathname)
                dataframe = dataframe.append(df1)
                log.info(("Store_in1", len(df1), x))
        store_in1.close()
        os.rename(path + x, path + "/account_backups/" + x)

    store_in1 = pd.HDFStore(path + acc_orig)
    store_out = pd.HDFStore(path + acc_out)
    root1 = store_in1.root
    log.info(("Root pathname of the input store: ", root1._v_pathname))
    root2 = store_out.root
    log.info(("Root pathname of the output store: ", root2._v_pathname))

    for lvl1 in root1:
        print (lvl1._v_pathname)
        if lvl1:
            df1 = store_in1.select(lvl1._v_pathname)
            dataframe = dataframe.append(df1)
            log.info(("Store_in1 length and name", len(df1), acc_orig))
    store_in1.close()
    os.rename(path + acc_orig, path + "/account_backups/" + datetime.now().strftime('%Y%m%d%H%M%S') + acc_orig)
    dataframe.sort_index(inplace=True,ascending=[True])
    write_acc_summary_to_h5(globalconf, log, dataframe, store_out)
    store_out.close()
    os.rename(path + acc_out, path + acc_orig)
Пример #9
0
def write_momentum_to_sqllite(dataframe,table):
    """
    Write to sqllite the momentum snapshot passed as argument
    """
    log.info("Appending momentum data [" +table+ "] to sqllite ... " )
    globalconf = config.GlobalConfig()
    db_file = globalconf.config['sqllite']['momentum_db']
    path = globalconf.config['paths']['data_folder']
    store = sqlite3.connect(path + db_file)
    dataframe.to_sql(table, store, if_exists='append')
    store.close()
Пример #10
0
 def test_market_data_from_sqllite(self):
     log = logger("Testing ...")
     globalconf = config.GlobalConfig()
     df = read_market_data_from_sqllite(db_type="underl_ib_hist",
                                        symbol="USO",
                                        expiry=None,
                                        last_date="20170623",
                                        num_days_back=50,
                                        resample="1D")
     print(df)
     self.assertEqual(len(df), 100)
Пример #11
0
def run_trend_report(today_txt):
    log = logger("Trend report ...")
    globalconf = config.GlobalConfig()
    if today_txt is None:
        today = dt.date.today()
        last_date = today.strftime('%Y%m%d')
    else:
        last_date = today_txt

    am.graph_coppock(symbol="SPY", period="1D")
    am.graph_emas(symbol="SPY")
    am.graph_volatility(symbol="SPY")
    am.graph_fast_move(symbol="SPY")
Пример #12
0
    def test_get_optchain_datasources(self):
        from persist.sqlite_methods import get_optchain_datasources
        import core.config as config

        globalconf = config.GlobalConfig()

        dict = get_optchain_datasources()

        print(dict['optchain_ib_exp']['SPY']['expiries'])
        print(dict['optchain_ib_exp']['IWM']['expiries'])

        self.assertListEqual(dict['optchain_ib_exp']['IWM']['expiries'],
                             ['2016-09', '2016-08'])
Пример #13
0
    def test_graphs(self):
        globalconf = config.GlobalConfig()
        from core import analytics_methods as am

        # am.print_coppock_diario(symbol="SPX",period="1D")
        # p = am.graph_coppock(symbol="SPX",period="1D")
        # p = am.graph_emas(symbol="SPY")
        # p = am.graph_volatility(symbol="SPY")
        # p = am.graph_fast_move(symbol="SPY")

        # show(p)
        am.graph_volatility_cone(symbol='SPY')
        # print (md.get_datasources(globalconf))
        self.assertGreater(1, 0)
Пример #14
0
def store_orders_from_ib_to_h5():
    """
    Method to retrieve orders -everything from the last business day-, intended for batch usage     
    """
    log=logger("store_orders_from_ib_to_h5")
    if dt.datetime.now().date() in misc_utilities.get_trading_close_holidays(dt.datetime.now().year):
        log.info("This is a US Calendar holiday. Ending process ... ")
        return

    log.info("Getting orders data from IB ... ")
    globalconf = config.GlobalConfig()
    client = ib.IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_data'])
    client.connect(clientid1=clientid1)

    ## Get the executions (gives you everything for last business day)
    execlist = client.get_executions(10)
    client.disconnect()
    log.info("execlist length = [%d]" % ( len(execlist) ))
    if execlist:
        dataframe = pd.DataFrame.from_dict(execlist).transpose()
        f = globalconf.open_orders_store()
        dataframe['current_date'] = dt.datetime.now().strftime('%Y%m%d')
        dataframe['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
        log.info("Appending orders to HDF5 store ...")
        # sort the dataframe
        #dataframe.sort(columns=['account'], inplace=True) DEPRECATED
        dataframe=dataframe.sort_values(by=['account'])
        # set the index to be this and don't drop
        dataframe.set_index(keys=['account'], drop=False, inplace=True)
        # get a list of names
        names = dataframe['account'].unique().tolist()

        for name in names:
            # now we can perform a lookup on a 'view' of the dataframe
            log.info("Storing " + name + " in ABT ...")
            joe = dataframe.loc[dataframe['account'] == name]
            #joe.sort(columns=['current_datetime'], inplace=True)  DEPRECATED
            joe = joe.sort_values(by=['current_datetime'])
            try:
                f.append("/" + name, joe, data_columns=joe.columns)
            except ValueError as e:
                log.warn("ValueError raised [" + str(e) + "]  Creating ancilliary file ...")
                aux = globalconf.open_orders_store_value_error()
                aux.append("/" + name, joe, data_columns=True)
                aux.close()
        f.close()
    else:
        log.info("No orders to append ...")
Пример #15
0
def consolidate_anciliary_h5_orders():
    globalconf = config.GlobalConfig()
    path = globalconf.config['paths']['data_folder']
    # path = '/home/david/data/'
    # http://stackoverflow.com/questions/2225564/get-a-filtered-list-of-files-in-a-directory
    os.chdir(path)
    orders_orig = 'orders_db.h5'
    pattern_orders = 'orders_db.h5*'
    orders_out = 'orders_db_new.h5'
    lst1 = glob.glob(pattern_orders)
    lst1.remove(orders_orig)
    print (lst1)
    dataframe = pd.DataFrame()
    for x in lst1:
        store_in1 = pd.HDFStore(path + x)
        root1 = store_in1.root
        print (root1._v_pathname)
        for lvl1 in root1:
            print (lvl1._v_pathname)
            if lvl1:
                df1 = store_in1.select(lvl1._v_pathname)
                dataframe = dataframe.append(df1)
                print ("store_in1", len(df1), x)
        store_in1.close()

    store_in1 = pd.HDFStore(path + orders_orig)
    store_out = pd.HDFStore(path + orders_out)
    root1 = store_in1.root
    print (root1._v_pathname)
    for lvl1 in root1:
        print (lvl1._v_pathname)
        if lvl1:
            df1 = store_in1.select(lvl1._v_pathname)
            dataframe = dataframe.append(df1)
            print ("store_in1", len(df1), orders_orig)
    store_in1.close()

    dataframe.sort_index(inplace=True,ascending=[True])
    names = dataframe['account'].unique().tolist()
    for name in names:
        print ("Storing " + name + " in ABT ..." + str(len(dataframe)))
        joe = dataframe[dataframe.account == name]
        joe=joe.sort_values(by=['current_datetime'])
        store_out.append("/" + name, joe, data_columns=True)
    store_out.close()
Пример #16
0
 def __init__(self):
     """
     Connection to the IB API
     """
     # Creation of Connection class
     object = config.GlobalConfig()
     port1 = int(object.config['ib_api']['port'])
     host1 = str(object.config['ib_api']['host'])
     clientid1 = int(object.config['ib_api']['clientid_data'])
     print("Calling connection port=%d host=%s clientid=%d" %
           (port1, host1, clientid1))
     self.connection = ibConnection(port=port1,
                                    host=host1,
                                    clientId=clientid1)
     # Register data handlers
     self.connection.registerAll(self.process_messages)
     # Connect
     self.connection.connect()
Пример #17
0
def write_portfolio_to_sqllite(dataframe):
    """
    Write to sqllite the portfolio snapshot passed as argument
    """
    log.info("Appending portfolio data to sqllite ... ")
    globalconf = config.GlobalConfig()
    db_file = globalconf.config['sqllite']['portfolio_db']
    path = globalconf.config['paths']['data_folder']
    store = sqlite3.connect(path + db_file)

    names=dataframe['accountName'].unique().tolist()
    for name in names:
        joe = dataframe.loc[dataframe['accountName']==name]
        # include this field which is sometimes used (options) to be used
        if 'multiplier' not in joe.columns:
            joe['multiplier'] = ""
        joe.to_sql(name, store, if_exists='append')
        store.close()
Пример #18
0
def store_optchain_yahoo_to_db():
    globalconf = config.GlobalConfig()
    optchain_def = globalconf.get_tickers_optchain_yahoo()
    source1 = globalconf.config['use_case_yahoo_options']['source']
    log = logger("yahoo options chain")
    if dt.datetime.now().date() in utils.get_trading_close_holidays(dt.datetime.now().year):
        log.info("This is a US Calendar holiday. Ending process ... ")
        return

    log.info("Getting options chain data from yahoo w pandas_datareader ... [%s]"
             % (dt.datetime.now().strftime('%Y-%m-%d %H:%M:%S') ))
    wait_secs=3
    for _,row in optchain_def.iterrows():
        log.info("Init yahoo quotes downloader symbol=%s" % (row.symbol) )
        try:
            option = web.YahooOptions(row.symbol)
            # FIX "Yahoo Options has been immediately deprecated due to large breaks in the API"
            # option = web.Options(symbol,source1)
            for j in option.expiry_dates:
                log.info("expiry=%s" % (str(j)))
                try:
                    joe = pd.DataFrame()
                    call_data = option.get_call_data(expiry=j)
                    put_data = option.get_put_data(expiry=j)
                    if call_data.values.any():
                        joe = joe.append(call_data)
                    if put_data.values.any():
                        joe = joe.append(put_data)
                    joe = joe.sort_index()
                    joe['Quote_Time_txt'] = joe['Quote_Time'].dt.strftime("%Y-%m-%d %H:%M:%S")
                    joe['Last_Trade_Date_txt'] = joe['Last_Trade_Date'].dt.strftime("%Y-%m-%d %H:%M:%S")
                    joe = joe.reset_index().set_index("Quote_Time")
                    joe['Expiry_txt'] = joe['Expiry'].dt.strftime("%Y-%m-%d %H:%M:%S")
                    if 'JSON' in joe.columns:
                        joe['JSON'] = ""

                    write_market_data_to_sqllite(joe, "optchain_yhoo")
                except KeyError as e:
                    log.warn("KeyError raised [" + str(e) + "]...")
            sleep(wait_secs)
        except (RemoteDataError,TypeError) as err:
            log.info("No information for ticker [%s] Error=[%s] sys_info=[%s]" % (str(row.symbol) , str(err) , sys.exc_info()[0] ))
            continue
Пример #19
0
def store_acc_summary_and_portfolio_from_ib_to_db():
    """ Stores Snapshot Summary information about account and portfolio from IB into db """
    log=logger("store_acc_summary_and_portfolio_from_ib_to_db")
    if dt.datetime.now().date() in get_trading_close_holidays(dt.datetime.now().year):
        log.info("This is a US Calendar holiday. Ending process ... ")
        return
    globalconf = config.GlobalConfig()
    client = ib.IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_data'])
    client.connect(clientid1=clientid1)

    acclist, summarylist = read_acc_summary_and_portfolio_from_ib(client, globalconf, log)
    log.info("acclist length [%d] " % ( len(acclist) ))
    log.info("summarylist length [%d]" % ( len(summarylist) ))

    if acclist:
        dataframe = pd.DataFrame.from_dict(acclist).transpose()
        dataframe['current_date'] = dt.datetime.now().strftime('%Y%m%d')
        dataframe['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
        dataframe.sort_values(by=['current_datetime'], inplace=True)
        dataframe.index=dataframe['current_datetime']
        dataframe.drop('current_datetime',axis=1,inplace=True)
        dataframe.drop('multiplier', axis=1, inplace=True)
        write_portfolio_to_sqllite(dataframe)
    else:
        log.info("Nothing to append to sqlite ... ")

    if summarylist:
        dataframe2 = pd.DataFrame.from_dict(summarylist).transpose()
        # print("dataframe = ",dataframe)
        dataframe2['current_date'] = dt.datetime.now().strftime('%Y%m%d')
        dataframe2['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
        dataframe2.sort_values(by=['current_datetime'], inplace=True)
        dataframe2.index=pd.to_datetime(dataframe2['current_datetime'], format="%Y%m%d%H%M%S")
        dataframe2.drop('current_datetime',axis=1,inplace=True)
        dataframe2['current_datetime_txt'] = dataframe2.index.strftime("%Y-%m-%d %H:%M:%S")
        dataframe2.drop('Guarantee_C_USD', axis=1, inplace=True)
        write_acc_summary_to_sqllite(dataframe2)
    else:
        log.info("Nothing to append to HDF5 ... ")

    client.disconnect()
Пример #20
0
    def test_extrae_detalle_operaciones(self):
        from persist.portfolio_and_account_data_methods import extrae_detalle_operaciones
        import datetime as dt
        symbol = "SPY"
        expiry = "20170317"
        secType = "OPT"
        import core.config as config
        globalconf = config.GlobalConfig()
        accountid = globalconf.get_accountid()
        fecha_valoracion = dt.datetime.now(
        )  # dt.datetime(year=2017,month=2,day=2) #
        scenarioMode = "N"
        simulName = "spy0317dls"
        df1 = extrae_detalle_operaciones(valuation_dttm=fecha_valoracion,
                                         symbol=symbol,
                                         expiry=expiry,
                                         secType=secType,
                                         accountid=accountid,
                                         scenarioMode=scenarioMode,
                                         simulName=simulName)

        print(df1)
Пример #21
0
def migrate_h5_to_sqllite_portfolio():
    """
    migrate_h5_to_sqllite_portfolio
    """
    hdf5_pattern = "portfolio_db*.h5*"
    globalconf = config.GlobalConfig()
    log = logger("migrate_h5_to_sqllite_portfolio")
    path = globalconf.config['paths']['data_folder']
    lst1 = glob.glob(path + hdf5_pattern)
    if not lst1:
        log.info("No h5 files to append ... ")
    else:
        log.info(("List of h5 files that will be appended: ", lst1))
        time.sleep(1)
        try:
            input("Press Enter to continue...")
        except SyntaxError:
            pass

        for hdf5_path in lst1:
            store_file = pd.HDFStore(hdf5_path)
            root1 = store_file.root
            # todos los nodos hijos de root que son los account ids
            list = [x._v_pathname for x in root1]
            log.info(("Root pathname of the input store: ", root1._v_pathname))

            store_file.close()
            log.info(("List of account ids: " + str(list)))
            for accountid in list:
                store_file = pd.HDFStore(hdf5_path)
                node1 = store_file.get_node(accountid)
                if node1:
                    log.info(("accountid: " + accountid))
                    df1 = store_file.select(node1._v_pathname)
                    df1.set_index(keys=['conId'], drop=True, inplace=True)
                    persist.sqlite_methods.write_portfolio_to_sqllite(df1)
                store_file.close()
Пример #22
0
def consolidate_anciliary_h5_portfolio():
    """
    Used as command to consolidate in the main h5 anciliary h5 generated due to column length exceptions
    """
    globalconf = config.GlobalConfig()
    log = logger("consolidate_anciliary_h5_portfolio")
    path = globalconf.config['paths']['data_folder']
    os.chdir(path)
    if not os.path.exists(path + "/portfolio_backups"):
        os.makedirs(path + "/portfolio_backups")

    port_orig = 'portfolio_db.h5'
    pattern_port = 'portfolio_db.h5*'
    port_out = 'portfolio_db_complete.h5'

    lst1 = glob.glob(pattern_port)
    lst1.remove(port_orig)
    dataframe = pd.DataFrame()
    old_format = False
    if not lst1:
        log.info("No ancilliary files to append ... ")
    else:
        log.info(("List of ancilliary files that will be appended: ", lst1))
        for x in lst1:
            store_in1 = pd.HDFStore(path + x)
            root1 = store_in1.root
            log.info(("Root pathname of the input store: ", root1._v_pathname))
            for lvl1 in root1:
                log.info(("Level 1 pathname in the root if the H5: ", x, lvl1._v_pathname))
                if lvl1:
                    try:
                        df1 = store_in1.select(lvl1._v_pathname)
                        dataframe = dataframe.append(df1)
                        log.info(("Store_in1", len(df1), x))
                    except (TypeError) as e:
                        log.info("This is the old format of the portfolio file...")
                        old_format = True
                        break
            if old_format:
                for lvl1 in root1:
                    for lvl2 in store_in1.get_node(lvl1._v_pathname):
                        for lvl3 in store_in1.get_node(lvl2._v_pathname):
                            for lvl4 in store_in1.get_node(lvl3._v_pathname):
                                for lvl5 in store_in1.get_node(lvl4._v_pathname):
                                    log.info(("Pathname level 5: ", x, lvl5._v_pathname))
                                    if lvl5:
                                        df1 = store_in1.select(lvl5._v_pathname)
                                        dataframe = dataframe.append(df1)

            store_in1.close()
            os.rename(path + x, path + "/portfolio_backups/" + x)

    store_in1 = pd.HDFStore(path + port_orig)
    store_out = pd.HDFStore(path + port_out)
    root1 = store_in1.root
    root2 = store_out.root
    old_format = False
    log.info(("Root pathname of the input store: ", root1._v_pathname, " and output the store: ", root2._v_pathname))
    for lvl1 in root1:
        log.info(("Level 1 pathname in the root if the H5: ", port_orig, lvl1._v_pathname))
        if lvl1:
            try:
                df1 = store_in1.select(lvl1._v_pathname)
                dataframe = dataframe.append(df1)
                log.info(("Store_in1", len(df1), port_orig))
            except (TypeError) as e:
                log.info("This is the old format of the portfolio file...")
                old_format = True
                break
    if old_format:
        for lvl1 in root1:
            for lvl2 in store_in1.get_node(lvl1._v_pathname):
                for lvl3 in store_in1.get_node(lvl2._v_pathname):
                    for lvl4 in store_in1.get_node(lvl3._v_pathname):
                        for lvl5 in store_in1.get_node(lvl4._v_pathname):
                            log.info(("Pathname level 5: ", port_orig, lvl5._v_pathname))
                            if lvl5:
                                df1 = store_in1.select(lvl5._v_pathname)
                                dataframe = dataframe.append(df1)

    store_in1.close()
    os.rename(path + port_orig, path + "/portfolio_backups/" + datetime.now().strftime('%Y%m%d%H%M%S') + port_orig)
    dataframe.sort_values(by=['current_datetime'], inplace=True)
    dataframe = dataframe.dropna(subset=['current_datetime'])
    dataframe.drop('multiplier', axis=1, inplace=True)
    # dataframe.drop('multiplier', axis=1, inplace=True)
    write_portfolio_to_h5(globalconf, log, dataframe, store_out)
    store_out.close()
    os.rename(path + port_out, path + port_orig)
Пример #23
0
def run_reader(now1=None):
    """
    Run with "NA" string to get today's

    :param now1:
    :return:
    """
    locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
    now = dt.datetime.now()  # + dt.timedelta(days=-4)
    mydate = utils.expiry_date(now1)
    if mydate:
        now = mydate
    weekday = now.strftime("%A").lower()
    log.info(
        ("Getting data from https://www.barchart.com/stocks/momentum ... ",
         now))
    #if (  weekday in ("saturday","sunday")  or
    #      now.date() in utils.get_trading_close_holidays(dt.datetime.now().year)):
    #    log.info("This is a US Calendar holiday or weekend. Ending process ... ")
    #    return

    base = "https://www.barchart.com/stocks/momentum"
    globalconf = config.GlobalConfig()
    url = base
    log.info('[%s] Downloading: %s' % (str(now), url))
    b_html = download(
        url=url,
        log=log,
        user_agent=
        "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
    )
    if not b_html is None:
        soup = BeautifulSoup(b_html, 'html.parser')
        fixed_html = soup.prettify()
        tables = soup.find_all('table')

        # Summary Of Stocks With New Highs And Lows
        table = tables[1]
        df1 = pd.read_html(str(table))[0]
        #df1.columns = df1.columns.astype(str)
        #df1 = df1.add_prefix('data_')
        df1.columns = [
            'description', 'data_5days', 'data_1month', 'data_3months',
            'data_6months', 'data_52weeks', 'data_ytd'
        ]
        df1 = df1.replace(np.nan, 'Default', regex=True)
        df1['load_dttm'] = now
        df1.set_index(keys=['load_dttm'], drop=True, inplace=True)
        sql.write_momentum_to_sqllite(df1, "stocks_hi_lo_summary")

        # Market Performance Indicator
        #   The percentage of stocks in $BCMM above their individual Moving Average per period.
        #
        # Barchart Market Momentum Index ($BCMM) is an exclusive index used as an indicator of change
        #   in overall markets. It reflects the movement of stocks who fit the following criteria: must have
        #   current SEC filings, must have traded for a minimum of 6-months, and must be trading above $2.
        table = tables[2]
        df2 = pd.read_html(str(table))[0]
        #df1.columns = df1.columns.astype(str)
        #df1 = df1.add_prefix('data_')
        df2.columns = [
            'description', '5day_ma', '20day_ma', '50day_ma', '100day_ma',
            '150day_ma', '200day_ma'
        ]
        df2 = df2.replace(np.nan, 'Default', regex=True)
        df2['load_dttm'] = now
        df2.set_index(keys=['load_dttm'], drop=True, inplace=True)
        sql.write_momentum_to_sqllite(df2, "stocks_bcmm_above_ma_summary")
Пример #24
0
def run_reader():
    globalconf = config.GlobalConfig()
    log = globalconf.log
    optchain_def = globalconf.get_tickers_optchain_ib()
    source1 = globalconf.config['use_case_ib_options']['source']

    if dt.datetime.now().date() in utils.get_trading_close_holidays(
            dt.datetime.now().year):
        log.info("This is a US Calendar holiday. Ending process ... ")
        return

    log.info("Getting realtime option chains data from IB ... ")

    underl = {}
    for index, row in optchain_def.iterrows():
        log.info("underl=[%s] [%s] [%s] [%s] [%s] [%d]" %
                 (row['symbol'], row['type'], str(row['Expiry']),
                  row['Exchange'], row['Currency'], int(index)))
        underl.update({
            int(index):
            RequestOptionData(row['symbol'], row['type'], str(row['Expiry']),
                              0, '', '', row['Exchange'], row['Currency'],
                              int(index))
        })

    underl_under = {}
    for index, row in optchain_def.iterrows():
        log.info("underl_under=[%s] [%s] [%s] [%s] [%s] [%d]" %
                 (row['symbol'], row['underl_type'], str(row['underl_expiry']),
                  row['underl_ex'], row['underl_curr'], int(index)))
        underl_under.update({
            int(index):
            RequestUnderlyingData(row['symbol'], row['underl_type'],
                                  str(row['underl_expiry']), 0, '', '',
                                  row['underl_ex'], row['underl_curr'],
                                  int(index))
        })

    client = ib.IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_data'])
    client.connect(clientid1=clientid1)

    #causa error en ubuntu : porque pone a isDOne la primera ::
    #   print("Get time from server [%s] isConnected? [%s] " % ( str(client.getTime()) , str(client.isConnected() )  ) )

    underl_prc = client.getMktData(underl_under)
    row1 = 0
    opt_chain_ranges = {}
    for reqId, request in underl_prc.items():
        if reqId in request.get_out_data().keys():
            row1 += 1
            if "closePrice" in request.get_out_data()[reqId]:
                log.info("Requestid [%d]: Option[%s] Results length [%d]" %
                         (reqId, str(request.get_in_data()),
                          len(request.get_out_data())))
                opt_chain_ranges.update({
                    request.get_in_data()["symbol"]:
                    request.get_out_data()[reqId]["closePrice"]
                })
                #print ("Requestid [%d]: modelVega=%0.5f" % ( reqId, request.get_out_data()[reqId]['modelVega'] ) )
    log.info("opt_chain_ranges = [%s] " % (str(opt_chain_ranges)))

    # get options chain
    list_results = client.getOptionsChain(underl)

    log.info("Number of requests [%d]" % (len(list_results)))
    for reqId, request in list_results.items():
        log.info(
            "Requestid [%d]: Option[%s] Len Results [%d]" %
            (reqId, str(request.get_in_data()), len(request.optionsChain)))

    contr = {}
    num = 100
    pct_range_opt_chain = float(
        globalconf.config['ib_api']['pct_range_opt_chain'])
    for reqId, request in list_results.items():
        #print ("Requestid [%d]: Chain size [%d] detail [%s]"
        #       % ( reqId , len( request.optionsChain ) , str(request.optionsChain)  ))
        for opt1 in request.optionsChain:
            if opt1["symbol"] in opt_chain_ranges:
                filtro_strikes = opt_chain_ranges[opt1["symbol"]]
                #print ("filtro_strikes = " , filtro_strikes , "opt1 = " , opt1 )
                if  opt1["strike"] >= (filtro_strikes * ( 1 - pct_range_opt_chain ) ) \
                        and opt1["strike"]<= (filtro_strikes * ( 1 + pct_range_opt_chain ) ):
                    contr[num] = RequestOptionData(
                        opt1["symbol"], opt1["secType"], opt1["expiry"],
                        opt1["strike"], opt1["right"], opt1["multiplier"],
                        opt1["exchange"], opt1["currency"], num)
                    num += 1

    list_results2 = client.getMktData(contr)
    client.disconnect()
    dataframe = pd.DataFrame()
    row1 = 0

    for reqId, request in list_results2.items():
        row1 += 1
        #print ("Requestid [%d]: Option[%s] Results [%s]" % ( reqId , str(request.get_in_data()), str(request.get_out_data()) ))
        #print ("Requestid [%d]: modelVega=%0.5f" % ( reqId, request.get_out_data()[reqId]['modelVega'] ) )
        dict1 = request.get_in_data().copy()
        if reqId in request.get_out_data():
            dict1.update(request.get_out_data()[reqId])
        for key in dict1.keys():
            dataframe.loc[row1, key] = dict1[key]

    log.info(str(list(dataframe.columns.values)))
    dataframe['current_date'] = dt.datetime.now().strftime('%Y%m%d')
    dataframe['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
    #dataframe.to_hdf(f,"IB_IDX",format='table')
    # c_day da un warning
    #   NaturalNameWarning: object name is not a valid Python identifier: '9'; it does not match the pattern
    #   ``^[a-zA-Z_][a-zA-Z0-9_]*$``; you will not be able to use natural naming to access this object; using
    #   ``getattr()`` will still work, though   NaturalNameWarning)
    #f.append(    c_year+"/"+c_month+"/"+c_day+"/"+c_hour+"/"+c_minute , dataframe, data_columns=dataframe.columns)
    #f.close()  # Close file

    # sort the dataframe
    dataframe = dataframe[dataframe.current_datetime.notnull()]
    types = dataframe.apply(lambda x: pd.lib.infer_dtype(x.values))
    # print str(types)
    for col in types[types == 'floating'].index:
        dataframe[col] = dataframe[col].map(lambda x: np.nan
                                            if x > 1e200 else x)

    dataframe['index'] = dataframe['current_datetime'].apply(
        lambda x: dt.datetime.strptime(x, '%Y%m%d%H%M%S'))

    dataframe = dataframe.sort_values(by=['index'], inplace=False)
    # set the index to be this and don't drop
    dataframe.set_index(keys=['index'], drop=True, inplace=True)
    # get a list of names
    names = dataframe['symbol'].unique().tolist()

    for name in names:
        # now we can perform a lookup on a 'view' of the dataframe
        log.info("Storing " + name + " in ABT ...")
        joe = dataframe[dataframe.symbol == name]
        joe = joe.sort_values(
            by=['symbol', 'current_datetime', 'expiry', 'strike', 'right'])
        # joe.to_excel(name+".xlsx")
        write_market_data_to_sqllite(joe, "optchain_ib")
Пример #25
0
def store_underlying_ib_to_db():
    log=logger("historical data loader")
    log.info("Getting historical underlying data from IB ... ")
    globalconf = config.GlobalConfig()
    path = globalconf.config['paths']['data_folder']
    underly_def = globalconf.get_tickers_historical_ib()
    client = IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_data'])
    client.connect(clientid1=clientid1)

    dt_now=dt.datetime.now()
    endDateTime =  dt_now.strftime('%Y%m%d %H:%M:%S')
    # lo mas que se puede pedir para barras de 30 min es un mes historico
    # barSizeSetting = "30 mins"
    barSizeSetting = "1 min"
    whatToShow = "TRADES"
    useRTH = 1
    formatDate = 1
    wait_secs = 40

    db_file = get_market_db_file(db_type="underl_ib_hist", expiry="NONE")
    store = sqlite3.connect(path + db_file)

    for index, row_req in underly_def.iterrows():
        log.info("underl=[%s] [%s] [%s] [%d] [%s] [%s] [%s] [%s] [%d]"
                        % ( str(row_req['symbol']), str(row_req['underl_type']),
                            str(row_req['underl_expiry']), 0, '', '',
                            str(row_req['underl_ex']), str(row_req['underl_curr']), int(index) ) )

        ticker = RequestUnderlyingData(str(row_req['symbol']), str(row_req['underl_type']), str(row_req['underl_expiry']), 0, '', '',
                                       str(row_req['underl_ex']), str(row_req['underl_curr']), int(index))
        symbol_expiry = "/" + str(row_req['symbol']) + "/" + str(row_req['underl_expiry'])

        sql = "SELECT count(*) as count1 FROM sqlite_master WHERE type='table' AND name='" + str(row_req['symbol']) + "';"
        tbl_exist = pd.read_sql_query(sql, store)['count1'][0]
        log.info("Does this symbol has a table in sqlite? " + str(tbl_exist))

        if tbl_exist:
            if int(row_req['underl_expiry']) > 0:
                sql = "SELECT MAX(DATE) as max1 FROM " + str(row_req['symbol']) +" WHERE EXPIRY = '" + symbol_expiry + "' ;"
            else:
                sql = "SELECT MAX(DATE) as max1 FROM " + str(row_req['symbol']) + " ;"

            last_date = pd.read_sql_query( sql , store)['max1'][0]
            log.info("Last date in data store for symbol " + symbol_expiry + " is " + str(last_date) )
            if last_date is not None:
                last_record_stored = dt.datetime.strptime(str(last_date), '%Y%m%d %H:%M:%S')
                # no se debe usar .seconds que da respuesta incorrecta debe usarse .total_seconds()
                # days= int(round( (dt_now - last_record_stored).total_seconds() / 60 / 60 / 24  ,0))
                # lo anterior no sirve porque debe considerarse la diferencia en business days
                # days = np.busday_count(last_record_stored.date(), dt_now.date())
                bh = utils.BusinessHours(last_record_stored, dt_now, worktiming=[15, 21], weekends=[6, 7])
                days = bh.getdays()
                durationStr = str(days) + " D"
            else:
                last_record_stored = 0
                durationStr = "30 D"
                barSizeSetting = "30 mins"

            if str(row_req['symbol']) in ['NDX','SPX','VIX']:
                barSizeSetting = "30 mins"
        else:
            last_record_stored = 0
            durationStr = "30 D"
            barSizeSetting = "30 mins"

        log.info( "last_record_stored=[%s] endDateTime=[%s] durationStr=[%s] barSizeSetting=[%s]"
                  % ( str(last_record_stored), str(endDateTime) , durationStr, barSizeSetting) )

        historicallist = client.get_historical(ticker, endDateTime, durationStr, barSizeSetting,whatToShow, useRTH,formatDate)
        dataframe = pd.DataFrame()
        if historicallist:
            for reqId, request in historicallist.items():
                for date, row in request.items():
                    temp1 = pd.DataFrame(row, index=[0])
                    temp1['symbol'] = str(row_req['symbol'])
                    if int(row_req['underl_expiry']) > 0:
                        temp1['expiry'] = symbol_expiry
                    else:
                        temp1['expiry'] = str(row_req['underl_expiry'])
                    temp1['type'] = str(row_req['underl_type'])
                    temp1['load_dttm'] = endDateTime
                    dataframe = dataframe.append(temp1.reset_index().drop('index', 1))
            dataframe = dataframe.sort_values(by=['date']).set_index('date')
            log.info( "appending data to sqlite ...")
            write_market_data_to_sqllite(dataframe, "underl_ib_hist")

        log.info("sleeping [%s] secs ..." % (str(wait_secs)))
        sleep(wait_secs)

    client.disconnect()
    store.close()
Пример #26
0
def historical_data_loader():
    """
    ADVERTENCIA USO DATOS HISTORICOS:

    Se inserta un registro duplicado en cada carga incremental. Es decir:
        se vuelve a insertar la barra de la ultima media hora que estaba cargada ya en el hdf5
        y tipicamente el close de esta barra es distinto al cargado inicialmente.
        La analitica que se haga sobre esta tabla debe contemplar eliminar primero de los registros duplicados
        porque asumimos que el segundo es el valido (dado que es igual al open de la siguiente barra de media hora
        como se hgha observado)
        este error se puede eliminar o mitigar si solamente se piden los datos histoticos con el mercado cerrado
        que es lo que se hace en el modo automatico (crontab) Validar esto.
    :return:
    """
    log=logger("historical data loader")
    log.info("Getting historical underlying data from IB ... ")

    globalconf = config.GlobalConfig()
    underly_def = globalconf.get_tickers_historical_ib()
    client = IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_data'])
    client.connect(clientid1=clientid1)

    dt_now=datetime.now()
    endDateTime =  dt_now.strftime('%Y%m%d %H:%M:%S')
    # lo mas que se puede pedir para barras de 30 min es un mes historico
    # barSizeSetting = "30 mins"
    barSizeSetting = "1 min"
    whatToShow = "TRADES"
    useRTH = 1
    formatDate = 1
    wait_secs = 40
    f = globalconf.open_historical_store()

    for index, row_req in underly_def.iterrows():
        log.info("underl=[%s] [%s] [%s] [%d] [%s] [%s] [%s] [%s] [%d]"
                        % ( str(row_req['symbol']), str(row_req['underl_type']),
                            str(row_req['underl_expiry']), 0, '', '',
                            str(row_req['underl_ex']), str(row_req['underl_curr']), int(index) ) )

        ticker = RequestUnderlyingData(str(row_req['symbol']), str(row_req['underl_type']), str(row_req['underl_expiry']), 0, '', '',
                                       str(row_req['underl_ex']), str(row_req['underl_curr']), int(index))

        path_h5 = "/" + str(row_req['symbol'])
        if long(row_req['underl_expiry']) > 0:
            path_h5 = path_h5 + "/" + str(row_req['underl_expiry'])
        last_record_stored = 0
        node = f.get_node(path_h5)
        if node:
            df1 = f.select(node._v_pathname)
            df1= df1.reset_index()['date']
            last_record_stored = datetime.strptime(str(df1.max()), '%Y%m%d %H:%M:%S')
            # no se debe usar .seconds que da respuesta incorrecta debe usarse .total_seconds()
            #days= int(round( (dt_now - last_record_stored).total_seconds() / 60 / 60 / 24  ,0))
            # lo anterior no sirve porque debe considerarse la diferencia en business days
            #days = np.busday_count(last_record_stored.date(), dt_now.date())
            bh= misc_utilities.BusinessHours(last_record_stored, dt_now, worktiming=[15, 21], weekends=[6, 7])
            days = bh.getdays()
            durationStr = str( days ) + " D"
        else:
            durationStr = "30 D"
            barSizeSetting = "30 mins"

        if str(row_req['symbol']) in ['NDX','SPX','VIX']:
            barSizeSetting = "30 mins"

        log.info( "last_record_stored=[%s] endDateTime=[%s] durationStr=[%s] barSizeSetting=[%s]"
                  % ( str(last_record_stored), str(endDateTime) , durationStr, barSizeSetting) )

        historicallist = client.get_historical(ticker, endDateTime, durationStr, barSizeSetting,whatToShow, useRTH,formatDate)
        #print historicallist
        dataframe = pd.DataFrame()
        if historicallist:
            for reqId, request in historicallist.items():
                for date, row in request.items():
                    # print ("date [%s]: row[%s]" % (date, str(row)))
                    temp1 = pd.DataFrame(row, index=[0])
                    temp1['symbol'] = str(row_req['symbol'])
                    temp1['expiry'] = str(row_req['underl_expiry'])
                    temp1['type'] = str(row_req['underl_type'])
                    temp1['load_dttm'] = endDateTime
                    dataframe = dataframe.append(temp1.reset_index().drop('index', 1))

            dataframe = dataframe.sort_values(by=['date']).set_index('date')
            log.info( "appending data in hdf5 ...")
            f.append(path_h5, dataframe, data_columns=dataframe.columns)
        log.info("sleeping [%s] secs ..." % (str(wait_secs)))
        sleep(wait_secs)

    client.disconnect()
    f.close()  # Close file
Пример #27
0
        html = urllib2.urlopen(request).read()
    except urllib2.URLError as e:
        log.error('Download error: %s' % (str(e.reason)))
        html = None
        if num_retries > 0:
            if hasattr(e, 'code') and 500 <= e.code < 600:
                # retry 5XX HTTP errors
                return download(url, user_agent, num_retries - 1)
    return html


if __name__ == "__main__":
    log = logger("ivolatility download")
    log.info("Getting volatility data from ivolatility.com ... ")
    url = "http://www.ivolatility.com/options.j?ticker=SPX&R=0"
    globalconf = config.GlobalConfig()
    log.info('[%s] Downloading: %s' % (str(dt.datetime.now()), url))
    b_html = download(
        url=url,
        log=log,
        user_agent=
        "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
    )

    soup = BeautifulSoup(b_html, 'html.parser')
    fixed_html = soup.prettify()
    ul = soup.find_all('font', attrs={'class': 's2'})
    #type(ul)
    #for i in ul:
    #    print i.text
Пример #28
0
def run_reader(now1 = None):
    """
    Run with "NA" string to get today's

    :param now1:
    :return:
    """
    locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
    now = dt.datetime.now()  # + dt.timedelta(days=-4)
    mydate = utils.expiry_date(now1)
    if mydate:
        now = mydate
    weekday = now.strftime("%A").lower()
    log.info(("Getting data from www.itpm.com ... ",now))
    if (  weekday in ("saturday","sunday")  or
          now.date() in utils.get_trading_close_holidays(dt.datetime.now().year)):
        log.info("This is a US Calendar holiday or weekend. Ending process ... ")
        return

    base = "https://www.itpm.com/wraps_post/"
    month = now.strftime("%B").lower()  # Changed format update 27DEC2019 nov --> november
    #daymonth = now.strftime("%d").zfill(2)
    # Fix URL change format: without leading zeroes
    daymonth = str(now.day)
    year = now.strftime("%Y")
    wrapTypes = ("opening","closing")
    globalconf = config.GlobalConfig()
    # example https://www.itpm.com/wraps_post/closing-wrap-friday-nov-17-2017/
    # Changed format update 27DEC2019: https://www.itpm.com/wraps_post/closing-wrap-friday-december-20-2019/
    for wrapType in wrapTypes:
        url = base + wrapType + "-wrap-" + weekday + "-" + month + "-" + daymonth + "-" + year + "/"
        log.info ('[%s] Downloading: %s' % (str(now) , url ) )
        b_html= download(url=url, log=log,
                         user_agent="Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
                         )
        if b_html is None:
            continue
        soup = BeautifulSoup(b_html, 'html.parser')
        fixed_html = soup.prettify()
        #print(fixed_html)
        #ul = soup.find_all('p', attrs={'class':'s2'})
        wrap = {
            'datetime': now,
            'type': wrapType
        }
        ul = soup.find_all('p')
        type(ul)
        #n =1
        for i in ul:
            #print((str(i.text),n))
            if "European Equity Markets" in str(i.text):
                wrap.update({"euro_equity_comment" : str(i.text)})
                wrap.update({"euro_equity_sentiment": get_sentiment(str(i.text)) })
            elif "Currency Markets" in str(i.text):
                wrap.update({"fx_comment" : str(i.text)})
                wrap.update({"fx_comment_sentiment": get_sentiment(str(i.text))})
            elif "Commodities Markets" in str(i.text):
                wrap.update({"commodities_comment" : str(i.text)})
                wrap.update({"commodities_sentiment": get_sentiment(str(i.text))})
            elif "US Equity Markets" in str(i.text):
                wrap.update({"us_equity_comment" : str(i.text)})
                wrap.update({"us_equity_sentiment": get_sentiment(str(i.text))})
            elif "Bond Markets" in str(i.text):
                wrap.update({"bonds_comment" : str(i.text)})
                wrap.update({"bonds_sentiment": get_sentiment(str(i.text))})
            #n=n+1
        #print(wrap)
        da.save_docs_to_db(globalconf, log, wrap, collection_name="itrm-wraps")
Пример #29
0
def read_10_days_acc_summary_and_current_positions():
    """ Summary information about account and portfolio for last 10 days is read from db and printed to console also."""
    days = 10
    globalconf = config.GlobalConfig(level=logger.ERROR)
    log = logger("print_10_days_acc_summary_and_current_positions")
    client = ib.IBClient()
    clientid1 = int(globalconf.config['ib_api']['clientid_orders'])
    client.connect(clientid1=clientid1)
    # this is to try to fit in one line each row od a dataframe when printing to console
    pd.set_option('display.max_rows', 500)
    pd.set_option('display.max_columns', 400)
    pd.set_option('display.width', 1000)
    now = dt.datetime.now()  # Get current time
    acclist, summarylist = read_acc_summary_and_portfolio_from_ib(client, globalconf, log)
    print("Real time valuation: %s" % (str(now)))
    if acclist:
        dataframe = pd.DataFrame.from_dict(acclist).transpose()
        dataframe['current_date'] = dt.datetime.now().strftime('%Y%m%d')
        dataframe['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
        dataframe = dataframe[[
                    u'averageCost',
                    u'conId',
                    u'expiry',
                    u'localSymbol',
                    u'right',
                    u'marketPrice',
                    u'marketValue',
                    u'multiplier',
                    u'position',
                    u'strike',
                    u'symbol',
                    u'unrealizedPNL'
                    ]]
        print("Portfolio = ")
        print(dataframe)
    if summarylist:
        dataframe2 = pd.DataFrame.from_dict(summarylist).transpose()
        dataframe2['current_date'] = dt.datetime.now().strftime('%Y%m%d')
        dataframe2['current_datetime'] = dt.datetime.now().strftime('%Y%m%d%H%M%S')
        # sort the dataframe
        dataframe2.sort_values(by=['AccountCode_'], inplace=True)
        # set the index to be this and don't drop
        dataframe2.set_index(keys=['AccountCode_'], drop=False,inplace=True)
        dataframe2 = dataframe2[[u'Cushion_',
                                 u'FullInitMarginReq_USD',
                                 u'FullMaintMarginReq_USD', u'GrossPositionValue_USD',
                                 u'NetLiquidation_USD', u'RegTEquity_USD',
                                 u'TotalCashBalance_BASE', u'UnrealizedPnL_BASE']]
        print("____________________________________________________________________________________________")
        print("Summary = ")
        print(dataframe2.transpose())
    client.disconnect()

    print("____________________________________________________________________________________________")
    print("Summary Account Last %d days valuation:" % (days))
    accountid = globalconf.get_accountid()
    df1 = read_historical_acc_summary_from_sqllite(globalconf, log, accountid)
    df1 = df1[[u'current_date',
               u'current_datetime_txt', u'FullInitMarginReq_USD',
               u'FullMaintMarginReq_USD', u'GrossPositionValue_USD',
               u'RegTMargin_USD',
               u'TotalCashBalance_BASE', u'UnrealizedPnL_BASE']]

    df1 = df1.groupby('current_date').last()
    df1 = df1.ix[-10:]
    print(df1)