def sort_file(): df_nse50 = rd.get_list('nse_Top50', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() #Extract Data from a given date to past x no of traded days for symbol in list_nse50[0:51]: df_temp = pd.read_csv(rd.list_to_path(symbol), index_col='Date', parse_dates=True,dayfirst=True, na_values=['nan']) #Extract date , series and price df_temp = df_temp.reset_index().drop_duplicates(subset='Date', keep='last').set_index('Date') df_temp.sort_index(ascending=True, inplace=True) #Remove any blank records df_temp.to_csv("/Volumes/2/PyD/nsedb/{}.csv".format(str(symbol)))
def check_null(): df_nse50 = rd.get_list('nse_Top50', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() #Array of Stcok symbols end_date = datetime.date(2018,12,31) #date.today() df_Stock = rd.get_data(list_nse50[0:51],end_date,3000) #Extract Data from a given date to past x no of traded days df_Stock = df_Stock.dropna(how='all') null_count = df_Stock.isnull().sum(axis=0) print(null_count)
def test_run(): ######Reading Data################### df_nse50 = rd.get_list('nse_Top50', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() #Array of Stcok symbols df_Stock = rd.get_data(list_nse50[0:20],datetime.date.today(),50) #Extract Data from a given date to past x no of traded days #test = find_nlargest(df_Stock.iloc[:,:1]) #print(df_Stock.iloc[:25,:1]) nparray = df_Stock.iloc[:15, :1].values hist,edges = np.histogram(nparray,bins=4) i,j,k = find_nlargest(hist.tolist(),3) iloc = hist.tolist().index(i) if j == i: hist[iloc] = -1 jloc = hist.tolist().index(j) if k == j: hist[jloc] = -1 kloc = hist.tolist().index(k) print(iloc,jloc,kloc)
""" import os import numpy as np import datetime import importlib import read_Stock_Data as rd importlib.reload(rd) import pandas as pd from openpyxl import load_workbook excel_file = '/Volumes/2/Files/PraChin/InvestValue.xlsx' #/Volumes/2/Files/PraChin/ D:\\ppadayac\\PraChin\\InvestValue.xlsx movies_sheet1 = pd.read_excel(excel_file, sheet_name=0) movies_sheet1['Date'] = pd.to_datetime(movies_sheet1['Date'], dayfirst=True) Dates = movies_sheet1['Date'].tolist() df_nse50 = rd.get_list('nse_Top50', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() df_Stock = rd.get_data(list_nse50[1:51], datetime.date.today(), 365) columns_buy = movies_sheet1['Buy'].tolist() columns_sell = movies_sheet1['Sell'].tolist() buy_size = movies_sheet1['BuySize'].tolist() sell_size = movies_sheet1['Sell Size'].tolist() date_range = pd.date_range(start=Dates[1], end=Dates[2]) new_df = df_Stock.loc[np.concatenate( (np.array([Dates[0]]), date_range.tolist()), axis=0), np.concatenate((columns_buy, columns_sell), axis=0)] new_df = new_df.dropna(how='all') new_df = new_df.dropna(how='all', axis=1) one = np.concatenate((buy_size, sell_size), axis=0)
def test_run(): ######Reading Data################### df_nse50 = rd.get_list('nse_Top50', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() #Array of Stcok symbols df_Stock = rd.get_data( list_nse50[0:50], datetime.date.today(), 365) #Extract Data from a given date to past x no of traded days df_Stock.index = df_Stock.index.strftime( '%d.%m.%Y') #Formate date to DD/MM/YYYY formate #print(df_Stock) ######Creating Filter################ df_filter = ( df_Stock[-1:] ).T #Transposing the matrix to (rows as stocks and 1st column as last close price) for given date str_trade_date = ''.join( df_filter.columns.tolist()) #Extrating the 1st column name col_A = "A~CP-{}".format(str_trade_date) df_filter = df_filter.rename(columns={str_trade_date: col_A}) df_filter = (frm.stockAvg(df_Stock.tail(50), df_filter)).round( 2) # 1st Filter of calculating 50 Day Mean by less than 5 percent df_filter = df_filter.rename(columns={'Mean': 'B~50DA'}) df_filter = (frm.stockAvg(df_Stock.tail(200), df_filter)).round( 2) # 2nd Filter of calculating 200 Day Mean by less than 5 percent df_filter = df_filter.rename(columns={'Mean': 'C~200DA'}) df_filter = frm.stockRet(df_Stock, df_filter, 10) # 3rd Filter of calculating 10 Day Return df_filter = df_filter.rename(columns={'Return': 'D~10DR'}) df_filter_temp = frm.modstockRet( df_Stock, 1) # 4th Filter of calculating Absolute Return of 50 days df_filter = frm.stockAvg(df_filter_temp, df_filter) df_filter = df_filter.rename(columns={'Mean': 'E~AR50D'}) df_filter = (frm.RSI(df_Stock.tail(15), df_filter)).round( 2) # 5th Filter of calculating RSI of 14 days df_filter = df_filter.rename(columns={'RSI': 'F~RSI'}) #####################Setting Criteria####### ########################Citeria 1 to Buy########### df_filter['A<(B/C) '] = np.where(((df_filter[col_A] <= df_filter['B~50DA']) & \ (df_filter[col_A] >= (df_filter['B~50DA'])*0.95)) \ |((df_filter[col_A] <= df_filter['C~200DA']) & \ (df_filter[col_A] >= (df_filter['C~200DA'])*0.95)), 1 , 0) ########################Criteria 2 to Buy########## df_filter['D<-8%'] = np.where(df_filter['D~10DR'] < -0.08, 1, 0) ########################Criteria 3 to Buy ########## df_filter['E<1.5%'] = np.where(df_filter['E~AR50D'] < 0.015, 1, 0) ########################Criteria 4 to Buy ########## df_filter['F<30%'] = np.where(df_filter['F~RSI'] < 30, 1, 0) ########################Citeria to Sell########### df_filter['A>(B/C) '] = np.where(((df_filter[col_A] >= df_filter['B~50DA']) & \ (df_filter[col_A] <= (df_filter['B~50DA'])*1.05)) \ |((df_filter[col_A] >= df_filter['C~200DA']) & \ (df_filter[col_A] <= (df_filter['C~200DA'])*1.05)), 1 , 0) ########################Criteria 2 to Sell ########## df_filter['D>8%'] = np.where(df_filter['D~10DR'] > 0.08, 1, 0) ########################Criteria 3 to Sell########## df_filter['E>1.5%'] = np.where(df_filter['E~AR50D'] > 0.015, 1, 0) ########################Criteria 4 to Sell ########## df_filter['F>70%'] = np.where(df_filter['F~RSI'] > 70, 1, 0) ####################Ranking to Buy/Sell##### df_filter['BuyS'] = df_filter['A<(B/C) '] + df_filter['D<-8%'] + df_filter[ 'E<1.5%'] + df_filter['F<30%'] df_filter['SellS'] = df_filter['A>(B/C) '] + df_filter['D>8%'] + df_filter[ 'E>1.5%'] + df_filter['F>70%'] pd.set_option('display.max_rows', 500) print(df_filter) df_filter.to_csv("TradeCall_{}.csv".format(str_trade_date))
def test_run(): ######Reading Data################### df_nse50 = rd.get_list('ind_nifty50list', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() #Array of Stcok symbols csv = "CSV" excel = "EXCEL" start_date = datetime.date(2012, 1, 1) end_date = datetime.date(2014, 1, 1) #date.today() datetime.date(2013,12,31) df_Stock = rd.get_data( list_nse50[0:51], end_date, 700) #Extract Data from a given date to past x no of traded days df_Stock = df_Stock.dropna(how='all') #Formate date to DD/MM/YYYY formate df_Stock.index.name = 'Date' str_trade_date = ''.join(df_Stock.iloc[-1:, :0].index.strftime('%d.%m.%Y')) #writer = cmn.to_file(df_Stock.sort_index(ascending=False, inplace=False) ,"1_StockPrice",csv,str_trade_date) #writer = cmn.to_file(df_Stock.iloc[:2,:2],"StockPrice2",excel,str_trade_date,writer) rm_200 = rf.get_rolling_mean(df_Stock, window=200) #writer = cmn.to_file(rm_200.sort_index(ascending=False, inplace=False),"2_RollMean200",csv,str_trade_date) rm_50 = rf.get_rolling_mean(df_Stock, window=50) #writer = cmn.to_file(rm_50.sort_index(ascending=False, inplace=False),"3_RollMean50",csv,str_trade_date) ''' day_1_returns = rf.compute_daily_returns(df_Stock,1) #writer = cmn.to_file(day_1_returns.sort_index(ascending=False, inplace=False),"4_OneDayRet",csv,str_trade_date) abs_day_1_returns = day_1_returns.abs() #writer = cmn.to_file(abs_day_1_returns.sort_index(ascending=False, inplace=False),"5_OneDayAbsRet",csv,str_trade_date) rm_abs1day_50 = rf.get_rolling_mean(abs_day_1_returns, window=50) counter_rm_abs1day_50 = rm_abs1day_50 < 2 #writer = cmn.to_file(counter_rm_abs1day_50.sort_index(ascending=False, inplace=False),"6_Counter50DayAbsRet",csv,str_trade_date) day_10_returns = rf.compute_daily_returns(df_Stock,10) writer = cmn.to_file(day_10_returns.sort_index(ascending=False, inplace=False),"7_10DayRet",csv,str_trade_date) rd_10DRet = rf.get_rolling_std(day_10_returns, window=1200) #writer = cmn.to_file(rd_10DRet.sort_index(ascending=False, inplace=False),"8_10DayRetStdDev",csv,str_trade_date) ra_10plarge = rf.get_rolling_nAvg(df_Stock,1200,120,120,40) writer = cmn.to_file(ra_10plarge.sort_index(ascending=False, inplace=False),"9_10PCLarge",csv,str_trade_date) ra_10psmall = rf.get_rolling_nAvg(df_Stock,1200,1080,1080,1160) writer = cmn.to_file(ra_10psmall.sort_index(ascending=False, inplace=False),"10_10PCSmall",csv,str_trade_date) ra_3plarge = rf.get_rolling_nAvg(df_Stock,1200,40,40,13) writer = cmn.to_file(ra_3plarge.sort_index(ascending=False, inplace=False),"11_3PCLarge",csv,str_trade_date) ra_3psmall = rf.get_rolling_nAvg(df_Stock,1200,1140,1160,1147) writer = cmn.to_file(ra_3psmall.sort_index(ascending=False, inplace=False),"12_3PCSmall",csv,str_trade_date) ra_15plarge = rf.get_rolling_nAvg(df_Stock,1200,120,180,180) writer = cmn.to_file(ra_15plarge.sort_index(ascending=False, inplace=False),"13_15PCLarge",csv,str_trade_date) ra_15psmall = rf.get_rolling_nAvg(df_Stock,1200,1080,1020,1120) writer = cmn.to_file(ra_15psmall.sort_index(ascending=False, inplace=False),"14_15PCSmall",csv,str_trade_date) array_cntr = np.where(ra_3psmall.isnull() ,np.nan, np.where(day_10_returns*100 <= ra_3psmall/100,3, np.where(day_10_returns*100 <= ra_10psmall/100,2, np.where(day_10_returns*100 <= ra_15psmall/100,1,0)))) indx = ra_15psmall.index.values Cntr_10D_Buy = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_10D_Buy.sort_index(ascending=False, inplace=False),"15_10DCounterBuy",csv,str_trade_date) array_cntr = np.where(ra_3plarge.isnull() ,np.nan, np.where(day_10_returns*100 > ra_3plarge/100,3, np.where(day_10_returns*100 > ra_10plarge/100,2, np.where(day_10_returns*100 > ra_15plarge/100,1,0)))) indx = ra_15plarge.index.values Cntr_10D_Sell = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_10D_Sell.sort_index(ascending=False, inplace=False),"16_10DCounterSell",csv,str_trade_date) rRSI_13 = rf.get_Rolling_RSI(df_Stock,13) #writer = cmn.to_file(rRSI_13.sort_index(ascending=False, inplace=False),"17_13DRSI",csv,str_trade_date) array_cntr = np.where(rRSI_13.isnull() ,np.nan, np.where(rRSI_13 <= 15,3, np.where(rRSI_13 <= 25,2, np.where(rRSI_13 <= 35,1,0)))) indx = rRSI_13.index.values Cntr_RSI_Buy = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_RSI_Buy.sort_index(ascending=False, inplace=False),"18_RSICounterBuy",csv,str_trade_date) array_cntr = np.where(rRSI_13.isnull() ,np.nan, np.where(rRSI_13 > 85,3, np.where(rRSI_13 > 75,2, np.where(rRSI_13 > 65,1,0)))) indx = rRSI_13.index.values Cntr_RSI_Sell = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_RSI_Sell.sort_index(ascending=False, inplace=False),"19_RSICounterSell",csv,str_trade_date) vwap1 = rf.get_rolling_VMAP(df_Stock,200,1) writer = cmn.to_file(vwap1.sort_index(ascending=False, inplace=False) ,"20_VWAP1",csv,str_trade_date) vwap2 = rf.get_rolling_VMAP(df_Stock,200,2) writer = cmn.to_file(vwap2.sort_index(ascending=False, inplace=False) ,"21_VWAP2",csv,str_trade_date) vwap3 = rf.get_rolling_VMAP(df_Stock,200,3) writer = cmn.to_file(vwap2.sort_index(ascending=False, inplace=False) ,"22_VWAP3",csv,str_trade_date) v1_ratio = np.where(vwap1.isnull() ,np.nan,(df_Stock - vwap1)/df_Stock) v2_ratio = np.where(vwap1.isnull() ,np.nan,(df_Stock - vwap2)/df_Stock) v3_ratio = np.where(vwap1.isnull() ,np.nan,(df_Stock - vwap3)/df_Stock) indx = vwap3.index.values df_v1_ratio = pd.DataFrame(v1_ratio,index=indx ,columns=list_nse50) df_v2_ratio = pd.DataFrame(v2_ratio,index=indx ,columns=list_nse50) df_v3_ratio = pd.DataFrame(v3_ratio,index=indx ,columns=list_nse50) array_cntr = np.where(df_v1_ratio.isnull() ,np.nan, np.where((df_v1_ratio.abs() > 0.02) | (df_v2_ratio.abs() > 0.02) | (df_v3_ratio.abs() > 0.02),2,0)) indx = df_v1_ratio.index.values Cntr_VWAP = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_VWAP.sort_index(ascending=False, inplace=False),"23_VWAPCounter",csv,str_trade_date) df_dma50 = np.where(rm_50.isnull() ,np.nan,(df_Stock - rm_50)/df_Stock) df_dma200 = np.where(rm_200.isnull() ,np.nan,(df_Stock - rm_200)/df_Stock) array_cntr1 = np.where(np.isnan(df_dma50) ,np.nan, np.where((df_dma50 < 0.02) & (df_dma50 > -0.015),1,0)) array_cntr2 = np.where(np.isnan(df_dma200) ,np.nan, np.where((df_dma200 < 0.02) & (df_dma200 > -0.02),2,0)) array_cntr = np.where(np.isnan(array_cntr2) ,array_cntr1,array_cntr1 + array_cntr2) indx = rm_50.index.values Cntr_DMA_Buy = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_DMA_Buy.sort_index(ascending=False, inplace=False),"24_DMACounterBuy",csv,str_trade_date) array_cntr1 = np.where(np.isnan(df_dma50) ,np.nan, np.where((df_dma50 < 0.015) & (df_dma50 > -0.02),1,0)) array_cntr = np.where(np.isnan(array_cntr2) ,array_cntr1,array_cntr1 + array_cntr2) indx = rm_50.index.values Cntr_DMA_Sell = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_DMA_Sell.sort_index(ascending=False, inplace=False),"25_DMACounterSell",csv,str_trade_date) rs_SLBuy = rf.get_rolling_small(df_Stock,1100,1) writer = cmn.to_file(rs_SLBuy.sort_index(ascending=False, inplace=False),"26_SLBuy",csv,str_trade_date) array_cntr = np.where(rs_SLBuy.isnull() ,np.nan, np.where((((df_Stock - rs_SLBuy).abs())/df_Stock)<0.025,2,0)) indx = rs_SLBuy.index.values Cntr_SL_Buy = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_SL_Buy.sort_index(ascending=False, inplace=False),"27_SLCounterBuy",csv,str_trade_date) rs_SLSell = rf.get_rolling_small(df_Stock,1100,1100) writer = cmn.to_file(rs_SLSell.sort_index(ascending=False, inplace=False),"28_SLSel",csv,str_trade_date) array_cntr = np.where(rs_SLSell.isnull() ,np.nan, np.where((((df_Stock - rs_SLSell).abs())/df_Stock)<0.025,2,0)) indx = rs_SLSell.index.values Cntr_SL_Sell = pd.DataFrame(array_cntr,index=indx ,columns=list_nse50) writer = cmn.to_file(Cntr_SL_Sell.sort_index(ascending=False, inplace=False),"29_SLCounterSell",csv,str_trade_date) ''' rm_20 = rf.get_rolling_mean(df_Stock, window=20) rd_20 = ((df_Stock - rm_20) / df_Stock).abs() writer = cmn.to_file(rd_20.sort_index(ascending=False, inplace=False), "30_20Ddev", csv, str_trade_date) rl_5p = rf.get_rolling_small(rd_20, 1000, 950) writer = cmn.to_file(rl_5p.sort_index(ascending=False, inplace=False), "31_5pDev", csv, str_trade_date) rl_12p = rf.get_rolling_small(rd_20, 1000, 880) writer = cmn.to_file(rl_12p.sort_index(ascending=False, inplace=False), "32_12pDev", csv, str_trade_date) rl_20p = rf.get_rolling_small(rd_20, 1000, 800) writer = cmn.to_file(rl_20p.sort_index(ascending=False, inplace=False), "33_20pDev", csv, str_trade_date) array_cntr = np.where( rm_50.isnull(), np.nan, np.where( df_Stock > rm_50, 0, np.where( rd_20 > rl_5p, 3, np.where(rd_20 > rl_12p, 2, np.where(rd_20 > rl_20p, 1, 0))))) indx = rm_50.index.values Cntr_20dDev_Buy = pd.DataFrame(array_cntr, index=indx, columns=list_nse50) writer = cmn.to_file( Cntr_20dDev_Buy.sort_index(ascending=False, inplace=False), "34_20dDevCounterBuy", csv, str_trade_date) array_cntr = np.where( rm_50.isnull(), np.nan, np.where( df_Stock < rm_50, 0, np.where( rd_20 > rl_5p, 3, np.where(rd_20 > rl_12p, 2, np.where(rd_20 > rl_20p, 1, 0))))) indx = rm_50.index.values Cntr_20dDev_Sell = pd.DataFrame(array_cntr, index=indx, columns=list_nse50) writer = cmn.to_file( Cntr_20dDev_Sell.sort_index(ascending=False, inplace=False), "35_20dDevCounterSell", csv, str_trade_date) test_1 = np.where(abs_day_1_returns == day_1_returns, 1, 0) df_filter = df_filter.rename(columns={'Mean': 'C~200DA'}) df_filter = frm.stockRet(df_Stock, df_filter, 10) # 3rd Filter of calculating 10 Day Return df_filter = df_filter.rename(columns={'Return': 'D~10DR'}) df_filter_temp = frm.modstockRet( df_Stock, 1) # 4th Filter of calculating Absolute Return of 50 days df_filter = frm.stockAvg(df_filter_temp, df_filter) df_filter = df_filter.rename(columns={'Mean': 'E~AR50D'}) df_filter = (frm.RSI(df_Stock.tail(15), df_filter)).round( 2) # 5th Filter of calculating RSI of 14 days df_filter = df_filter.rename(columns={'RSI': 'F~RSI'}) #####################Setting Criteria####### ########################Citeria 1 to Buy########### df_filter['A<(B/C) '] = np.where(((df_filter[col_A] <= df_filter['B~50DA']) & \ (df_filter[col_A] >= (df_filter['B~50DA'])*0.95)) \ |((df_filter[col_A] <= df_filter['C~200DA']) & \ (df_filter[col_A] >= (df_filter['C~200DA'])*0.95)), 1 , 0) ########################Criteria 2 to Buy########## df_filter['D<-8%'] = np.where(df_filter['D~10DR'] < -0.08, 1, 0) ########################Criteria 3 to Buy ########## df_filter['E<1.5%'] = np.where(df_filter['E~AR50D'] < 0.015, 1, 0) ########################Criteria 4 to Buy ########## df_filter['F<30%'] = np.where(df_filter['F~RSI'] < 30, 1, 0) ########################Citeria to Sell########### df_filter['A>(B/C) '] = np.where(((df_filter[col_A] >= df_filter['B~50DA']) & \ (df_filter[col_A] <= (df_filter['B~50DA'])*1.05)) \ |((df_filter[col_A] >= df_filter['C~200DA']) & \ (df_filter[col_A] <= (df_filter['C~200DA'])*1.05)), 1 , 0) ########################Criteria 2 to Sell ########## df_filter['D>8%'] = np.where(df_filter['D~10DR'] > 0.08, 1, 0) ########################Criteria 3 to Sell########## df_filter['E>1.5%'] = np.where(df_filter['E~AR50D'] > 0.015, 1, 0) ########################Criteria 4 to Sell ########## df_filter['F>70%'] = np.where(df_filter['F~RSI'] > 70, 1, 0) ####################Ranking to Buy/Sell##### df_filter['BuyS'] = df_filter['A<(B/C) '] + df_filter['D<-8%'] + df_filter[ 'E<1.5%'] + df_filter['F<30%'] df_filter['SellS'] = df_filter['A>(B/C) '] + df_filter['D>8%'] + df_filter[ 'E>1.5%'] + df_filter['F>70%'] pd.set_option('display.max_rows', 500) print(df_filter) df_filter.to_csv("TradeCall_{}.csv".format(str_trade_date))
# -*- coding: utf-8 -*- """ Created on Tue Aug 14 17:39:40 2018 Load the Database with NSE Fifty stocks @author: ppadayac """ import pandas as pd import importlib from datetime import date from nsepy import get_history import numpy as np import read_Stock_Data as rd importlib.reload(rd) df_nse50 = rd.get_list('ind_nifty50list', 'Symbol') #Extract list of Top Nse50 Stcoks list_nse50 = df_nse50.index.tolist() start_date = date(2012, 1, 1) end_date = date(2014, 1, 1) #date.today()#date(2012,12,31) rec_count = {} sbin = get_history(symbol="NIFTY", start=start_date, end=end_date, index=True) rec_count['NIFTY'] = len(sbin.index) sbin.to_csv("/Volumes/2/PyD/nsedb/NIFTY.csv") '''with open('/Volumes/2/PyD/nsedb/NIFTY.csv', 'a') as f: sbin.to_csv(f, header=False)''' #list_nse50 = ['EICHERMOT'] for symbl in list_nse50[1:]: sbin = get_history(symbol=symbl, start=start_date, end=end_date)