Пример #1
0
def main():
    symbols = ["AAPL", "FB", "GOOG", "SPY"]
    start_date = "01/01/2017"
    end_date = "31/12/2017"

    #Portfolio and SPY Dataframes
    df_portfolio = get_data(symbols, start_date, end_date)
    df_SPY = df_portfolio.ix[:, "SPY"]
    df_SPY = df_SPY / df_SPY.ix[0]

    #Optimized Allocations
    optimized_allocations = compute_optimal_allocations(df_portfolio)
    optimized_portfolio = compute_daily_portfolio_value(
        df_portfolio, 100000, optimized_allocations)
    optimized_portfolio = optimized_portfolio / optimized_portfolio.ix[0]

    #Default Allocations
    default_allocations = [0.25, 0.25, 0.25, 0.25]
    default_portfolio = compute_daily_portfolio_value(df_portfolio, 100000,
                                                      default_allocations)
    default_portfolio = default_portfolio / default_portfolio.ix[0]

    df_comparsion = pd.concat(
        [optimized_portfolio, default_portfolio, df_SPY],
        keys=["Optimized Portfolio", "Default Portfolio", "S&P500"],
        axis=1)

    plot_data(df_comparsion, "Portfolio Optimization", "Date", "Price")
Пример #2
0
def assess_portfolio(start_date, end_date, symbols, allocs, start_val=1):
    # Read in adjusted closing prices for given symbols, date range
    dates = pd.date_range(start_date, end_date)
    prices_all = get_data(symbols, dates)  # automatically adds SPY
    prices = prices_all[symbols]  # only portfolio symbols
    prices_SPY = prices_all['SPY']  # only SPY, for comparison later

    # Get daily portfolio value
    port_val = get_portfolio_value(prices, allocs, start_val)

    #Get portfolio statistics (note: std_daily_ret = volatility)
    cum_ret, avg_daily_ret, std_daily_ret, sharpe_ratio = get_portfolio_stats(
        port_val)

    # Print statistics
    print "Start Date:", start_date
    print "End Date:", end_date
    print "Symbols:", symbols
    print "Allocations:", allocs
    print "Sharpe Ratio:", sharpe_ratio
    print "Volatility (stdev of daily returns):", std_daily_ret
    print "Average Daily Return:", avg_daily_ret
    print "Cumulative Return:", cum_ret

    # Compare daily portfolio value with SPY using a normalized plot
    df_temp = pd.concat([port_val, prices_SPY],
                        keys=['Portfolio', 'SPY'],
                        axis=1)
    plot_normalized_data(df_temp, title="Daily portfolio value and SPY")
Пример #3
0
def optimize_portfolio(start_date, end_date, symbols):
    dates = pd.date_range(start_date, end_date)
    prices_all = get_data(symbols, dates)

    prices = prices_all[symbols]  # only portfolio symbols
    prices_SPY = prices_all['SPY']  # only SPY, for comparison later

    # Get optimal allocations
    allocs = find_optimal_allocations(prices)
    allocs = allocs / np.sum(allocs)  # normalize allocations, if they don't sum to 1.0

    # Get daily portfolio value (already normalized since we use default start_val=1.0)
    port_val = get_portfolio_value(prices, allocs)

    # Get portfolio statistics (note: std_daily_ret = volatility)
    cum_ret, avg_daily_ret, std_daily_ret, sharpe_ratio = get_portfolio_stats(port_val)

    # Print statistics
    print "Start Date:", start_date
    print "End Date:", end_date
    print "Symbols:", symbols
    print "Optimal allocations:", allocs
    print "Sharpe Ratio:", sharpe_ratio
    print "Volatility (stdev of daily returns):", std_daily_ret
    print "Average Daily Return:", avg_daily_ret
    print "Cumulative Return:", cum_ret

    # Compare daily portfolio value with normalized SPY
    normed_SPY = prices_SPY / prices_SPY.ix[0, :]
    df_temp = pd.concat([port_val, normed_SPY], keys=['Portfolio', 'SPY'], axis=1)
    plot_data(df_temp, title="Daily Portfolio Value and SPY")
def run():
    # Define default parameters
    start_date = '2007-12-31'
    end_date = '2009-12-31'
    stock = 'IBM'

    #check for user input of stocks and date range
    if (len(sys.argv) > 1):
        file_path = "data/" + sys.argv[1] + ".csv"
        # Check if that file exists
        if not os.path.exists(file_path) or not os.path.isfile(file_path):
            print 'Data for the stock specified does not exist. Please reference stocks in the data folder, or run with no option provided (will display IBM data by default)'
            return
        stock = sys.argv[1]

    if (len(sys.argv) > 3):
        try:
            pd.date_range(sys.argv[2], sys.argv[3])
        except:
            print "The arguments you input for the start and end dates are not valid dates. Please enter your input in the format like 'YYYY-MM-DD' or omit arguments for default value"
            return
        start_date = sys.argv[2]
        end_date = sys.argv[3]
        #note that this does not check if the dates are actually present in the stock data files. This should be implemented later.

    dates = pd.date_range(start_date, end_date)
    prices_all = get_data([stock], dates)

    generate_orders_file(prices_all, dates, stock)
    calculate_portfolio_value("Unit2/orders/orders_mystrategy.csv", prices_all,
                              dates, stock)
Пример #5
0
def main():
    start_date = "01/01/2017"
    end_date = "31/12/2017"
    symbols = ["FB"]
    stock_symbol = "FB"
    df = get_data(symbols, start_date, end_date, include_SPY=False)
    print(df.head())
    print(df.tail())

    window = 20
    rolling_mean = df[stock_symbol].rolling(window=window).mean()
    rolling_std = df[stock_symbol].rolling(window=window).std()
    df["Rolling Mean"] = rolling_mean
    df["Upper Bollinger Band"], df[
        "Lower Bollinger Band"] = get_bollinger_bands(rolling_mean,
                                                      rolling_std)
    plot_data(df, stock_symbol + " Bollinger Bands", "Date", "Price")
Пример #6
0
    def train(self):
        @tf.function
        def train_step(image_batch):
            noise = tf.random.normal((self.configs.data.batch_size,
                                      eval(self.configs.noise_dim)[0]))
            with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
                generated_images = self.generator(noise, training=True)

                real_output = self.discriminator(image_batch, training=True)
                fake_output = self.discriminator(generated_images,
                                                 training=True)

                generator_loss = self.get_generator_loss(fake_output)
                discriminator_loss = self.get_discriminator_loss(
                    real_output, fake_output)

            gradients_of_generator = gen_tape.gradient(
                generator_loss, self.generator.trainable_variables)
            self.generator_opt.apply_gradients(
                zip(gradients_of_generator,
                    self.generator.trainable_variables))

            gradients_of_discriminator = disc_tape.gradient(
                discriminator_loss, self.discriminator.trainable_variables)
            self.discriminator_opt.apply_gradients(
                zip(gradients_of_discriminator,
                    self.discriminator.trainable_variables))
            return [('generator_loss', generator_loss.numpy()),
                    ('discriminator_loss', discriminator_loss.numpy())]

        data = get_data(self.configs.data.path, self.configs.data.batch_size)
        for i in range(self.num_epochs):
            print("\nepoch {}/{}".format(i + 1, self.num_epochs))
            prog_bar = Progbar(
                None,
                stateful_metrics=['generator_loss', 'discriminator_loss'])
            for idx, im_batch in enumerate(data):
                losses = train_step(im_batch)
                prog_bar.update(idx + 1, values=losses)
            self.save_results(i)
            if self.__save_model and (i + 1) % self.__save_interval == 0:
                self.save_models()
Пример #7
0
def main():
    capital = 100000
    symbols = ["AAPL", "FB", "GOOG", "SPY"]
    allocations = [0.25, 0.25, 0.25, 0.25]
    start_date = "01/01/2017"
    end_date = "31/12/2017"

    #Portfolio Dataframe
    df_portfolio = get_data(symbols, start_date, end_date)
    df_SPY = df_portfolio.ix[:, "SPY"]

    #Daily Portfolio Value
    daily_portfolio_value = compute_daily_portfolio_value(df_portfolio, capital, allocations)
    print(daily_portfolio_value.head())

    #Daily Portfolio Return
    daily_portfolio_return = compute_daily_portfolio_return(daily_portfolio_value)

    #Cummulative Portfolio Return
    cummulative_portfolio_return = compute_cummulative_portfolio_return(daily_portfolio_value)
    print("Cummulative Portfolio Return:", cummulative_portfolio_return)

    #Daily Portfolio Return Mean
    mean_daily_portfolio_return = compute_mean_daily_portfolio_return(daily_portfolio_return)
    print("Daily Portfolio Return Mean:", mean_daily_portfolio_return)

    #Daily Portfolio Return Standard Deviation
    std_daily_portfolio_return = compute_std_daily_portfolio_return(daily_portfolio_return)
    print("Daily Portfolio Return Standard Deviation:", std_daily_portfolio_return)

    #Daily Sampled Sharpe Ratio
    daily_sampled_sharpe_ratio = compute_daily_sampled_sharpe_ratio(mean_daily_portfolio_return, std_daily_portfolio_return)
    print("Daily Sampled Sharpe Ratio:", daily_sampled_sharpe_ratio)

    #Comparing between the portfolio and S&P500
    daily_portfolio_value_normalized = daily_portfolio_value/daily_portfolio_value.ix[0]
    df_SPY_normalized = df_SPY/df_SPY.ix[0]
    df_comparsion = pd.concat([daily_portfolio_value_normalized, df_SPY_normalized], keys=["Portfolio", "SPY"], axis=1)
    plot_data(df_comparsion, "Portfolio 2017 Normalized Price", "Date", "Price")
Пример #8
0
def simulate_strategy(date_start, date_end, folder_strategy, file_strategy, events=True):

    dates = pd.date_range(date_start, date_end)
    file_name = os.path.join(folder_strategy, '{}.csv'.format(file_strategy))

    # Load orders
    orders = pd.read_csv(file_name, index_col=0, parse_dates=True)
    stock = orders.ix[0]['Symbol'] 

    # Load stock data and join orders in the same DataFrame 
    prices_all = get_data([stock], dates)
    df = prices_all[[stock]]
    df = df.join(orders[['Order', 'Shares']])

    # Back-Testing Algorithm
    df['daily_val'] = 0.0
    df['shorting'] = 1.0
    hold = False
    shorting = False

    # Compute portfolio daily value
    for i, row in df.iterrows():
    
        if (row['Order'] == 'BUY LONG') | (row['Order'] == 'SELL SHORT'):
            hold = True
            n_stocks = row['Shares']
            if row['Order'] == 'SELL SHORT':
                shorting = True            
    
        if hold == True:
            df.loc[i, 'Shares'] = n_stocks # replicate shares when stock is hold
            df.loc[i, 'daily_val'] = n_stocks * row[stock]
            if shorting:
                df.loc[i, 'shorting'] = -1.0
                    
        if (row['Order'] == 'EXIT LONG') | (row['Order'] == 'EXIT SHORT'):
            hold = False
            shorting = False
        
    # Compute daily returns
    df['daily_return'] = df['daily_val'].diff()   
    df['daily_return'] = df['daily_return'] * df['shorting']
    # Reset to 0 NaN and not valid daily_returns  
    df.ix[0, 'daily_return'] = 0
    for (index1, row1),(index2, row2) in izip(df.iterrows(), df[1:].iterrows()):
        if np.isnan(row1['Shares']) or np.isnan(row2['Shares']):
            df.loc[index2, 'daily_return'] = 0.0
    df['cum_return'] = df['daily_return'].cumsum()

    # Compute cumulative portfolio value
    T0 = df[~pd.isnull(df['Order'])].index[0] # Timestamp @ first investment 
    Val0 = df.ix[T0]['daily_val'] # Initial portfolio value
    df['port_val'] = df['cum_return'] + Val0
    

    # Plot Section
    df_temp = pd.concat([df[stock]/df.ix[0][stock], df['port_val']/df.ix[T0]['port_val']], keys=[stock, 'Portfolio'], axis=1)
    ax = df_temp.plot(figsize=(15, 10))
    # Plot events
    if events:
        for day, key in orders.iterrows():
            if key['Order'] == 'BUY LONG':
                ax.axvline(x=day, color='green')
            elif key['Order'] == 'SELL SHORT':
                ax.axvline(x=day, color='red')
            elif (key['Order'] == 'EXIT LONG') | (key['Order'] == 'EXIT SHORT'):
                ax.axvline(x=day, color='black')
    plt.show()   

    # Compute stats sections
    sharpe_ratio, cum_ret, avg_daily_ret, std_daily_ret = get_portfolio_stats(df['port_val'])
    sharpe_ratio_stock, cum_ret_stock, avg_daily_ret_stock, std_daily_ret_stock = get_portfolio_stats(df[stock])

    # Compare portfolio against stock
    print "Cumulative Return of Fund: {}".format(cum_ret)
    print "Cumulative Return of {}: {}".format(stock, cum_ret_stock)
    print
    print "Standard Deviation of Fund: {}".format(std_daily_ret)
    print "Standard Deviation of {}: {}".format(stock, std_daily_ret_stock)
    print
    print "Average Daily Return of Fund: {}".format(avg_daily_ret)
    print "Average Daily Return of {}: {}".format(stock, avg_daily_ret_stock)
    print
    print "Sharpe Ratio of Fund: {}".format(sharpe_ratio)
    print "Sharpe Ratio of {}: {}".format(stock, sharpe_ratio_stock)
    print
    print "Initial Portfolio Value: {}".format(df['port_val'][0])
    print "Final Portfolio Value: {}".format(df['port_val'][-1])
    print "Final Portfolio Return: {}".format(df['cum_return'][-1])
    print "Final {} Return: {}".format(stock, (df[stock][-1]-df[stock][0])*n_stocks)
    
Пример #9
0
from selenium import webdriver
from selenium.webdriver.support.select import Select
import sys, time, unittest, re
import common.ReadConfig as co
sys.path.append('..')
from pages.loginpage import LoginPage
from pages.zf_userpage import UserPagezf
from pages.jqxxpage import JqxxPage
from pages.xfgk1cyed1xfedpage import Cyed1xfedPage
import utils.util as ut
file1 = ut.DATA_PATH + '/user_data.csv'
data1 = ut.get_data(file1)
file2 = ut.DATA_PATH + '/zfzjq_data.csv'
data2 = ut.get_data(file2)
url = co.getbrowsername('Url')
mark1 = 1


class TestCyed(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        cls.driver = webdriver.Firefox()
        cls.driver.maximize_window()
        cls.driver.get(url)
        time.sleep(3)
        cls.dl = LoginPage(cls.driver, Select)
        cls.zfuser = UserPagezf(cls.driver, Select)
        cls.jqxx = JqxxPage(cls.driver, Select)
        cls.cyed1 = Cyed1xfedPage(cls.driver, Select)

    @classmethod
Пример #10
0
from selenium import webdriver
from selenium.webdriver.support.select import Select
import sys, time, unittest
import common.ReadConfig as co
sys.path.append('..')
from pages.loginpage import LoginPage
from pages.jc_userpage import UserPage
import utils.util as ut
file1 = ut.DATA_PATH + '/user_data.csv'
data1 = ut.get_data(file1)
url = co.getbrowsername('Url')
#dri=co.getbrowsername('BrowserName')
mark1 = 1


class TestUser(unittest.TestCase):
    def setUp(self):
        self.driver = webdriver.Firefox()
        self.driver.maximize_window()
        self.driver.get(url)
        time.sleep(2)
        self.dl = LoginPage(self.driver, Select)
        self.user = UserPage(self.driver, Select)

    def tearDown(self):
        global mark1
        mark1 += 1
        self.driver.quit()

    def testcase1(self):
        '''用例1,新增用户成功'''
def run():
    # Define default parameters
    start_date = '2008-01-01'
    end_date = '2009-12-31'
    start_test_date = '2010-01-01'
    end_test_date = '2010-12-31'
    stock = 'IBM'

    #check for user input of stocks and date range
    if (len(sys.argv) > 1):
        file_path = "data/" + sys.argv[1] + ".csv"
        # Check if that file exists
        if not os.path.exists(file_path) or not os.path.isfile(file_path):
            print 'Data for the stock specified does not exist. Please reference stocks in the data folder, or run with no option provided (will display IBM data by default)'
            return
        stock = sys.argv[1]

    dates = pd.date_range(start_date, end_date)
    test_dates = pd.date_range(start_test_date, end_test_date)

    #read in data that you're going to use
    prices_all = get_data([stock], dates)  # automatically adds SPY
    test_prices_all = get_data([stock], test_dates)

    #set up dataframe to train learner over
    data = pd.DataFrame(index=dates)
    data['actual_prices'] = prices_all[stock]
    data['bb_value'] = prices_all[stock] - pd.rolling_mean(prices_all[stock],
                                                           window=5)
    data['bb_value'] = data['bb_value'] / (
        pd.rolling_std(prices_all[stock], window=5) * 2)
    data['momentum'] = (prices_all[stock] /
                        prices_all[stock].shift(periods=-5)) - 1
    data['volatility'] = pd.rolling_std(
        ((prices_all[stock] / prices_all[stock].shift(periods=-1)) - 1),
        window=5)
    data['y_values'] = prices_all[stock].shift(periods=-5)
    data = data.dropna(subset=['actual_prices'])
    trainX = data.iloc[4:, 0:-1]
    trainY = data.iloc[4:, -1]

    #set up data frame to test learner over
    test_data = pd.DataFrame(index=test_dates)
    test_data['actual_prices'] = test_prices_all[stock]
    test_data['bb_value'] = test_prices_all[stock] - pd.rolling_mean(
        test_prices_all[stock], window=5)
    test_data['bb_value'] = test_data['bb_value'] / (
        pd.rolling_std(test_prices_all[stock], window=5) * 2)
    test_data['momentum'] = (test_prices_all[stock] /
                             test_prices_all[stock].shift(periods=-5)) - 1
    test_data['volatility'] = pd.rolling_std(
        ((test_prices_all[stock] / test_prices_all[stock].shift(periods=-1)) -
         1),
        window=5)
    test_data['y_values'] = test_prices_all[stock].shift(periods=-5)
    test_data = test_data.dropna(subset=['actual_prices'])
    testX = test_data.iloc[:, 0:-1]
    testY = test_data.iloc[:, -1]

    #create a KNN Learner for the data and add evidence to it
    learner = knn.KNNLearner(3)
    learner.addEvidence(trainX, trainY)

    #run a simulation of the trading strategy based on predicted future values over training data
    print "\nTraining Data Results:"
    run_simulation(learner, prices_all, stock, trainX, trainY, dates,
                   "Unit3/orders/orders_trainingdata.csv")
    calculate_portfolio_value("Unit3/orders/orders_trainingdata.csv",
                              prices_all, dates, stock)

    #run a simulation of the trading strategy over previously unseen testing data to test it's performance
    print "\nTest Data Results:"
    run_simulation(learner, test_prices_all, stock, testX, testY, test_dates,
                   "Unit3/orders/orders_testdata.csv")
    calculate_portfolio_value("Unit3/orders/orders_testdata.csv",
                              test_prices_all, test_dates, stock)
def calculate_portfolio_value(datafile, prices_all, dates, stock):
    #read in the data from the order file
    orders_df = pd.read_csv(datafile, index_col='Date', parse_dates=True)
    orders_df = orders_df.sort_index(kind='mergesort')

    #set up the data frame for the stocks over the period
    daily_prices_df = pd.DataFrame(index=dates)
    daily_prices_df = daily_prices_df.join(prices_all[stock])
    daily_prices_df.dropna(inplace=True)
    daily_prices_df['Cash'] = 1.0

    #make an additional data frame called trades
    trades_df = daily_prices_df.copy()
    trades_df[0:] = 0.0
    trades_df.loc[0:, 'Cash'] = 10000

    #step through the orders frame and for each order update the trades table
    for index, row in orders_df.iterrows():
        symbol_to_update = row['Symbol']
        amount_to_update = row['Shares']

        if row['Order'] == "SELL":
            amount_to_update = amount_to_update * -1.0

        cash_gained_or_lost = amount_to_update * -1.0 * daily_prices_df.loc[
            index, symbol_to_update]
        current_holdings = amount_to_update + trades_df.loc[index,
                                                            symbol_to_update]
        current_cash = cash_gained_or_lost + trades_df.loc[index, 'Cash']

        trades_df.loc[index:, symbol_to_update] = current_holdings
        trades_df.loc[index:, 'Cash'] = current_cash

    value_of_portfolio = trades_df * daily_prices_df
    portvals = value_of_portfolio.sum(axis=1)
    if isinstance(portvals, pd.DataFrame):
        portvals = portvals[portvals.columns[0]]

    cum_ret, avg_daily_ret, std_daily_ret, sharpe_ratio = get_portfolio_stats(
        portvals)

    # Simulate a $SPX-only reference portfolio to get stats
    prices_SPX = get_data(['$SPX'], dates)
    prices_SPX = prices_SPX[['$SPX']]  # remove SPY
    portvals_SPX = get_portfolio_value(prices_SPX, [1.0])
    cum_ret_SPX, avg_daily_ret_SPX, std_daily_ret_SPX, sharpe_ratio_SPX = get_portfolio_stats(
        portvals_SPX)

    # Compare portfolio against $SPX
    print "Sharpe Ratio of Fund: {}".format(sharpe_ratio)
    print "Sharpe Ratio of $SPX: {}".format(sharpe_ratio_SPX)
    print
    print "Cumulative Return of Fund: {}".format(cum_ret)
    print "Cumulative Return of $SPX: {}".format(cum_ret_SPX)
    print
    print "Standard Deviation of Fund: {}".format(std_daily_ret)
    print "Standard Deviation of $SPX: {}".format(std_daily_ret_SPX)
    print
    print "Average Daily Return of Fund: {}".format(avg_daily_ret)
    print "Average Daily Return of $SPX: {}".format(avg_daily_ret_SPX)
    print
    print "Final Portfolio Value: {}".format(portvals[-1])

    # Plot computed daily portfolio value
    df_temp = pd.concat([portvals, prices_SPX['$SPX']],
                        keys=['Portfolio', '$SPX'],
                        axis=1)
    plot_normalized_data(df_temp, title="Daily portfolio value and $SPX")