예제 #1
0
def resample_data_insert(exchange, timeframe, market_type, resample_tf):

    ds_table_name = cu.get_table_name(timeframe, exchange, market_type)

    for r_tf in resample_tf:

        if r_tf <= timeframe:
            print(
                '''Resample timeframe : {rtf} equal or less than data source timeframe : {tf}
                     Skipping {tf} to {rtf} resample'''.format(rtf=r_tf,
                                                               tf=timeframe))
            continue

        #Generate Table names
        r_t_n_current = cu.get_table_name(r_tf, exchange, market_type)
        r_t_n_new = '''{r_t_n}_new'''.format(r_t_n=r_t_n_current)
        backup_table = '''{r_t_n}_old'''.format(r_t_n=r_t_n_current)

        #create table if not exsists for resampled timeframe
        dao.create_current_ohlcv_table(r_t_n_current)  # _new table is created
        dao.resample_and_insert_data(ds_table_name, r_t_n_new,
                                     r_tf)  # Insert into _new table

        #Rename current table to backup table
        dao.drop_table(backup_table)
        dao.rename_table(r_t_n_current, backup_table)

        #Rename new table to current
        dao.rename_table(r_t_n_new, r_t_n_current)
예제 #2
0
def scrip_master_setup(exchange, market_type, path, timeframe):

    scrip_master_ex_list = []
    metastock_csv_list = get_csv_list_from_path(path)
    try:

        if len(metastock_csv_list) > 0:

            for scrip in metastock_csv_list:

                file_name = scrip
                scrip_code, company_name = cu.get_scripode_company_name(scrip)

                scrip_master_row = (scrip_code, company_name, market_type,
                                    exchange, timeframe, file_name, 'y',
                                    datetime.datetime.now(),
                                    datetime.datetime.now())

                scrip_master_ex_list.append(scrip_master_row)

            if len(scrip_master_ex_list) > 0:

                dao.save_scrip_master(scrip_master_ex_list, exchange)

    except Exception:
        stack_trace = traceback.format_exc()
        print(stack_trace)
        logging.exception(str(stack_trace))

    return
예제 #3
0
def strat_to_db_csv(Strat_obj, ex, m_t, t_f, screen_name):

    home = expanduser("~")
    url_prefix = get_server_url()

    scrip_id_name_list = dao.get_scrip_id_name()
    scrip_id_name_url_df = pd.DataFrame(scrip_id_name_list,
                                        columns=['sc', 'sn'])
    scrip_id_name_url_df['url'] = url_prefix + scrip_id_name_url_df[
        'sc'].str.upper() + f':{ex}'.upper()
    scrip_id_name_url_df.set_index('sc', inplace=True)

    for k in Strat_obj:
        Strat_obj[k].update({'Exchange': ex.upper()})
        # print(k,data_value_json[k])

    df = pd.DataFrame(Strat_obj)
    df_T = df.T
    df_T = df_T.applymap(lambda x: "{0:.6f}".format(x)
                         if isinstance(x, (float)) else x)
    f_df = df_T.join(scrip_id_name_url_df)
    print(f_df)

    candle_dt = f_df['Date'].iloc[0].strftime("%d-%m-%Y")
    now = datetime.datetime.now()
    nowiso = now.isoformat()
    df_json = f_df.to_json(orient='split', date_format='iso')
    data_tuple = (screen_name, candle_dt, df_json, nowiso)
    # dao.insert_to_screener_value_table(data_tuple)

    f_df.to_csv(home + '/output_data/{dt}_{s_n}_ScreenOut.csv'.format(
        s_n=screen_name, dt=now.strftime("%d%m%Y")))
    # f_df.to_csv('{dt}_{s_n}_ScreenOut.csv'.format(s_n=screen_name,dt=now.strftime("%d%m%Y")))

    return df_T
예제 #4
0
def benchmark_data(exchange, market_type, res, data_len):

    try:
        connection = db.prod_db_conn()
        c = connection.cursor()

        # Get name of benchmark datas for the exchange sectors
        c.execute('''select benchmark from benchmark_master 
                  where exchange = '{ex}' '''.format(ex=exchange.lower()))

        benchmark_id = c.fetchall()[0][0]

        # Data fetch required candles from DB by query
        df = pd.read_sql_query('''Select * from (
                                    select datetime,open,high,low,close,volume
                                    from ohlcv_index_{mt}_{r}
                                    where scrip_code = '{bid}'
                                    ORDER BY datetime DESC limit {c} ) as c_d
                                    order by datetime ASC
                               '''.format(bid=benchmark_id,
                                          mt=market_type,
                                          r=str(res),
                                          c=data_len),
                               con=connection)

        # Create a Data Feed
        data = btfeeds.PandasData(dataname=df,
                                  timeframe=bt.TimeFrame.Days,
                                  compression=1,
                                  datetime=0,
                                  high=2,
                                  low=3,
                                  open=1,
                                  close=4,
                                  volume=5,
                                  openinterest=-1)
        benchmark_name = benchmark_id.split('.')[1]

        return data, benchmark_name

    except Exception as e:
        logging.exception(str(e))

    finally:
        dao.close_db_connection(connection)
예제 #5
0
def benchmark_name(exchange):

    try:
        connection = db.prod_db_conn()
        c = connection.cursor()
        c.execute('''select benchmark from benchmark_master 
                  where exchange = '{ex}' '''.format(ex=exchange.lower()))

        benchmark_id = c.fetchall()[0][0]
        benchmark_name = benchmark_id.split('.')[1]

        return benchmark_name

    except Exception as e:
        logging.exception(str(e))

    finally:
        dao.close_db_connection(connection)
예제 #6
0
def active_exchange_ohlcv_run():

    #save_logs()
    datasource_list = dao.get_datasource_list()

    try:
        if len(datasource_list) > 0:
            for d_s in datasource_list:

                try:

                    exchange = d_s[1]
                    timeframe = d_s[2]
                    market_type = d_s[3]
                    path = d_s[4]
                    handler = d_s[5]
                    resample_tf = d_s[6]

                    module = importlib.import_module('data_handlers.' +
                                                     handler)
                    ohlcv_csv_to_db_func = getattr(module, 'ohlcv_csv_to_db')

                    ohlcv_csv_to_db_func(exchange, market_type, str(timeframe),
                                         path)
                    if exchange is not None and timeframe is not None and market_type is not None and resample_tf is not None:

                        resample_data_insert(exchange, timeframe, market_type,
                                             resample_tf)

                except Exception:
                    stack_trace = traceback.format_exc()
                    print(stack_trace)
                    logging.exception(str(stack_trace))
                    continue
            return
        else:
            print("datasource_master table can not be blank")
            logging.info("datasource_master table can not be blank")

    except Exception:
        stack_trace = traceback.format_exc()
        print(stack_trace)
        logging.exception(str(stack_trace))
def setup_datasource():

    try:

        datasource_list = dao.get_datasource_list()

        if len(datasource_list) > 0:
            for d_s in datasource_list:

                try:

                    exchange = d_s[1]
                    timeframe = str(d_s[2])
                    market_type = d_s[3]
                    path = d_s[4]
                    handler = d_s[5]

                    # Build OHLCV Tables
                    #                    dao.create_ohlcv_table(exchange,market_type,timeframe)

                    # Setup scrip_master for daily
                    # Get func from hadler path
                    module = importlib.import_module('data_handlers.' +
                                                     handler)
                    scrip_master_setup_func = getattr(module,
                                                      'scrip_master_setup')

                    scrip_master_setup_func(exchange, market_type, path,
                                            timeframe)
                except Exception:
                    stack_trace = traceback.format_exc()
                    print(stack_trace)
                    logging.exception(str(stack_trace))
                    continue
        else:
            print("datasource_master table can not be blank")
            logging.info("datasource_master table can not be blank")

    except Exception:
        stack_trace = traceback.format_exc()
        print(stack_trace)
        logging.exception(str(stack_trace))
예제 #8
0
def run():

    try:

        start = timeit.default_timer()

        screener_list = dao.get_screener_list()

        # screener_list = [
        #                 ('ohlcv_ns_equity_10080', 'all', 'CHH', None, 'output_csv/','FFM_ns'),
        # #                 ('ohlcv_bo_equity_10080', 'all', 'cmOne', None, 'output_csv/', 'cmOne_bo'),
        #             ]

        all_screener_output = {}

        with concurrent.futures.ProcessPoolExecutor() as executor:
            for instance, df in zip(range(len(screener_list)),
                                    executor.map(main, screener_list)):
                if df is not None:
                    try:
                        screen_name = screener_list[instance][5]
                        all_screener_output[screen_name] = df
                    except Exception as e:
                        print(e)
                        # err.error_log(str(e),run.__name__,'bt_run')
                        logging.info(str(e))
                        continue

        # execution_time logger
        stop = timeit.default_timer()
        execution_time = stop - start
        print("Program Executed in " + str(execution_time))
    except Exception as e:
        # err.error_log(str(e),run.__name__,'bt_run')
        logging.exception(str(e))

    return all_screener_output
예제 #9
0
def data_feed(scrip_code, ex, m_t, t_f, cerebro, min_data):

    try:
        connection = db.prod_db_conn()
        c = connection.cursor()

        print(
            f'Fetching data for {scrip_code} scrip(s) from exhange {ex}, timeframe {t_f} & market {m_t}'
        )
        table_name = su.get_table_name(t_f, ex, m_t)

        if scrip_code == 'all':  ## For Ticker list Handling

            try:
                p_s_f_amt = ap.penny_stock_filter[ex]
            except KeyError as e:
                print('Exchange', e,
                      'Does not exsist in Application Properties')
                print(
                    'Ensure Penny Stock filter Exchange amount pair in Application Properties'
                )

            # Data fetch required candles from DB by query
            c.execute(
                '''with scrips_with_close_filter AS (select scdtc.scrip_code from (select distinct on (scrip_code) scrip_code,datetime,close
                        FROM {t_n}
                        order by scrip_code, datetime DESC) as scdtc
                        inner join scrip_master sm on scdtc.scrip_code = sm.scrip_code
                        where scdtc.close > {sf} and sm.isactive in ('t' , 'y'))
                                              
                        SELECT scrip_code, array_agg(Array[datetime::text,open::text,high::text,low::text,close::text,volume::text])
                        FROM  (SELECT n.datetime,n.open,n.high,n.low,n.close,n.volume,n.scrip_code,RANK () OVER (PARTITION BY n.scrip_code ORDER BY n.scrip_code,datetime DESC) sc_rank
                        FROM {t_n} as n 
                        inner join scrips_with_close_filter cf on n.scrip_code = cf.scrip_code
                         ORDER BY n.scrip_code, n.datetime ASC) as ol
                         where ol.sc_rank <= {candles}
                        GROUP BY ol.scrip_code;'''.format(t_n=table_name,
                                                          sf=p_s_f_amt,
                                                          candles=min_data))

            scrip_data_list = c.fetchall()

            for scrip in scrip_data_list:
                try:
                    scrip_name = scrip[0]
                    print(scrip_name)
                    df = pd.DataFrame.from_records(scrip[1],
                                                   columns=[
                                                       'datetime', 'open',
                                                       'high', 'low', 'close',
                                                       'volume'
                                                   ])
                    df2 = df.astype({
                        'datetime': 'datetime64[ns]',
                        'open': 'float64',
                        'high': 'float64',
                        'low': 'float64',
                        'close': 'float64',
                        'volume': 'float64'
                    })
                    # print(df2)

                    if len(df2) < min_data:
                        print('Minimum candles not present, skipping',
                              scrip_name)
                        continue

                    # Create a Data Feed
                    data = btfeeds.PandasData(dataname=df2,
                                              timeframe=bt.TimeFrame.Days,
                                              compression=1,
                                              datetime=0,
                                              high=2,
                                              low=3,
                                              open=1,
                                              close=4,
                                              volume=5,
                                              openinterest=-1)

                    # cerebro.resampledata(data, timeframe=bt.TimeFrame.Weeks,
                    #                         compression=1, name=scrip_name)

                    cerebro.adddata(data, name=scrip_name)

                except Exception as e:
                    # err.error_log(str(e),data_feed.__name__,'bt_run')
                    logging.exception(str(e))

        elif isinstance(scrip_code, tuple):

            for scrip in scrip_code:
                try:
                    df = pd.read_sql_query('''Select * from (
                                            SELECT datetime,open,high,low,close,volume
                                            FROM "{t_n}" 
            								 WHERE scrip_code = '{sc}'
            								 ORDER BY datetime DESC limit {c} ) as c_d
                                             order by datetime ASC
                                                 '''.format(t_n=table_name,
                                                            sc=scrip,
                                                            c=min_data),
                                           con=connection)

                    if len(df) < min_data:
                        print('Minimum candles not present')
                        continue

                    # Create a Data Feed
                    data = btfeeds.PandasData(dataname=df,
                                              timeframe=bt.TimeFrame.Days,
                                              compression=1,
                                              datetime=0,
                                              high=2,
                                              low=3,
                                              open=1,
                                              close=4,
                                              volume=5,
                                              openinterest=-1)

                    cerebro.adddata(data, name=scrip)

                except Exception as e:
                    # err.error_log(str(e),data_feed.__name__,'bt_run')
                    logging.exception(str(e))

        else:  ## For Single Ticker Handling
            df = pd.read_sql_query('''
                                   Select * from (
                                   SELECT datetime,open,high,low,close,volume
                                         FROM "{t_n}" 
            								 WHERE scrip_code = '{sc}'
            								 ORDER BY datetime DESC limit {c} ) as c_d
                                             order by datetime ASC
                                             '''.format(t_n=table_name,
                                                        sc=scrip_code,
                                                        c=min_data),
                                   con=connection)

            # print(df)
            if len(df) < min_data:
                print('Minimum candles not present')
                return
            # Create a Data Feed
            data = btfeeds.PandasData(dataname=df,
                                      timeframe=bt.TimeFrame.Days,
                                      compression=1,
                                      datetime=0,
                                      high=2,
                                      low=3,
                                      open=1,
                                      close=4,
                                      volume=5,
                                      openinterest=-1)

            cerebro.adddata(data, name=scrip_code)

        ## Add Benchmark Data to Cerebro

        b_data, benchmark_name = benchmark_data(ex, m_t, t_f, min_data)
        cerebro.adddata(b_data, name=benchmark_name)
        print('Benchmark Added')
        return cerebro

    except Exception as e:
        logging.exception(str(e))

    finally:
        dao.close_db_connection(connection)
예제 #10
0
def ohlcv_csv_to_db(exchange, market_type, timeframe, path):

    scrip_list = dao.get_scrip_list(exchange, market_type, timeframe)

    if len(scrip_list) > 0:

        if cu.isDirectoryAndPathExists(path):

            if cu.isDirectoryEmpty(path):
                print(path + ":: Directory is empty")
                logging.info(path + ":: Directory is empty")

            else:

                fromDate = ap.fromDate
                r_conn = db.prod_db_conn()
                c = r_conn.cursor()
                scrip_code_id_list = []
                empty_file_scrip_code_id = []
                error_text = ""
                dynamic_table_name = cu.get_table_name(timeframe, exchange,
                                                       market_type)

                temp_ohlcv_query = dao.create_temp_ohlcv_table()
                c.execute(temp_ohlcv_query)  # Temp table session start

                logging.info("ohlcv_temp table created successfully..")
                print("ohlcv_temp table created successfully..")

                dao.create_current_ohlcv_table(dynamic_table_name)

                table_name = '{d_t_n}_new'.format(d_t_n=dynamic_table_name)

                insert_ohlcv_query = """insert into {t_n}(scrip_code,datetime,open,high,low,close,volume)
                                        select scrip_code,datetime::timestamp,open,high,low,close,volume
                                        from ohlcv_temp where datetime::timestamp >= '{frm_date}';
                                        """.format(t_n=table_name,
                                                   frm_date=fromDate)

                #    print(insert_ohlcv_query)
                logging.info(
                    "Copying CSV data to ohlcv_temp table is in Progress..")
                print("Copying CSV data to ohlcv_temp table is in Progress..")

                for scrip_data in scrip_list:

                    scrip_code_id = scrip_data[4]
                    #        scrip_code = scrip_data[0]
                    #        company_name = scrip_data[1]
                    file_name = scrip_data[2]
                    #        scrip_csv_name = scrip_code + '#' + company_name +'.csv'
                    csv_complete_pth = path + file_name

                    try:

                        if cu.isFileExists(csv_complete_pth):

                            if not cu.isEmpty(csv_complete_pth):

                                with open(csv_complete_pth, 'r') as f:

                                    next(f)  # Skip the header row.
                                    c.copy_from(f,
                                                'ohlcv_temp',
                                                sep=',',
                                                columns=[
                                                    'scrip_code', 'interval',
                                                    'datetime', 'open', 'high',
                                                    'low', 'close', 'volume'
                                                ])
                            else:
                                empty_file_scrip_code_id.append(scrip_code_id)
                                print('File is Empty', file_name)
                                logging.info('File is Empty' + str(file_name))
                                continue
                        else:
                            scrip_code_id_list.append(scrip_code_id)
                            error_text = "File Not Found"
                            print('File not found for scrip', file_name)
                            logging.info('File not found for scrip' +
                                         str(file_name))
                            continue

                    except psycopg2.IntegrityError as e:
                        stack_trace = traceback.format_exc()
                        print('Possible Duplicate Entries', file_name)
                        logging.exception(str(stack_trace))
                        print(str(e) + stack_trace)
                        continue
                    except psycopg2.DataError as e:
                        scrip_code_id_list.append(scrip_code_id)
                        error_text = "DataError"
                        print('Possible DataError', file_name)
                        logging.exception(str(e))
                        print(e)
                        continue
                    except psycopg2.Error as e:
                        stack_trace = traceback.format_exc()
                        print('General Error in', file_name)
                        logging.exception(str(stack_trace))
                        print(str(e) + stack_trace)
                        continue
                    except Exception as e:
                        stack_trace = traceback.format_exc()
                        print(str(e) + stack_trace)
                        print('General Error in111', file_name)
                        logging.exception(str(stack_trace))
                        continue

                try:

                    logging.info(
                        "Started transfering data from ohlcv_temp table to main table..."
                    )
                    print(
                        "Started transfering data from ohlcv_temp table to main table..."
                    )
                    c.execute(insert_ohlcv_query)
                    r_conn.commit()

                    print(
                        "Records inserted successfully from temp_table to ::" +
                        table_name)
                    logging.info(
                        "Records inserted successfully from temp_table to ::" +
                        table_name)

                    old_ohlv_table_name = """{d_t_n}_old""".format(
                        d_t_n=dynamic_table_name)

                    dao.drop_table(old_ohlv_table_name)

                    from_table_name = dynamic_table_name
                    to_table_name = old_ohlv_table_name

                    dao.rename_table(from_table_name, to_table_name)

                    from_table_name = table_name
                    to_table_name = dynamic_table_name

                    dao.rename_table(from_table_name, to_table_name)

                    if len(scrip_code_id_list) > 0:

                        dao.save_data_error(scrip_code_id_list, error_text,
                                            exchange, ohlcv_csv_to_db.__name__)
                        dao.update_scrip_master(scrip_code_id_list)

                    if len(empty_file_scrip_code_id) > 0:
                        ## update the scrip_code status to 'e' if any file is empty
                        dao.update_empty_file_status(empty_file_scrip_code_id)

                    ### update isactive='n' in scrip_code table if we did not
                    ### receive data for any scrip_code in last 7 Days

                    ### update isactive='t' in scrip_code table if we did not
                    ### receive data for any scrip_code within 7 days (temporarily in active)

                    dao.disable_scrip_code(timeframe, exchange, market_type,
                                           dynamic_table_name)
                    print("scrip_code disabled successsfully..")

                except Exception:
                    stack_trace = traceback.format_exc()
                    print(stack_trace)
                    logging.exception(str(stack_trace))

                ## finally block Alwasy execute whether exception occured or not
                ##  to close db connection
                finally:
                    c.close()
                    r_conn.close()  # This closes temp_table session
        else:
            print(path + " :: Directory don't exists")
            logging.info(path + " :: Directory don't exists")
    else:
        print("All scrip code is in active for exchange :" + exchange + "," +
              timeframe)
        logging.info("All scrip code is in active for exchange :" +
                     str(exchange) + "," + str(timeframe))