예제 #1
0
        ax2 = pyplot.twinx()
        pyplot.plot(SPY_close, "k")
        ax2.set_ylabel("S (black)")
        pyplot.title("V and S Closing Px")

        pyplot.subplot(3, 2, 3)
        pyplot.plot(pnl)
        pyplot.plot(cost)
        pyplot.title("PnL v cost")

        pyplot.subplot(3, 2, 2)
        pyplot.plot(running_pnl)
        pyplot.plot(running_cost)
        pyplot.title("Running PnL v Cost")

        pyplot.subplot(3, 2, 4)
        pyplot.plot(running_net)
        pyplot.title("Running Net")

        pyplot.subplot(3, 2, 5)
        pyplot.bar(range(len(trade_count)), trade_count)
        pyplot.title("Trade Count")

        pyplot.subplot(3, 2, 6)
        correlation = get_corr(offset=0)
        pyplot.plot(correlation)
        pyplot.title("Correlation (1 period lag)")

        pyplot.show()

예제 #2
0
def main():
    with h5py.File(filename) as root:
        trade_dates = list(root)[start_day:end_day]
        pnls = np.zeros(len(trade_dates))
        costs = np.zeros(len(trade_dates))
        VXX_closes = np.zeros(len(trade_dates))
        SPY_closes = np.zeros(len(trade_dates))
        trade_counts = np.zeros(len(trade_dates))
        std = 0.00015
        for day, trade_date in enumerate(trade_dates):
            # print trade_date
            names = root[trade_date]["names"].value
            SPY_prices = root[trade_date][("prices")].value[:, 0][:]
            VXX_prices = root[trade_date][("prices")].value[:, 1][:]
            epoch_times = root[trade_date]["dates"].value

            SPY_closes[day] = SPY_prices[-1]
            VXX_closes[day] = VXX_prices[-1]

            pnls[day], costs[day], std, trade_counts[day] = \
                                   backtest_one_day(SPY_prices, VXX_prices, std)

        running_pnls = np.cumsum(pnls)
        running_costs = np.cumsum(costs)
        running_nets = running_pnls - running_costs

        total_pnl = sum(pnls)
        total_cost = sum(costs)

        print " *" * 40
        print "Net pnl: %s" % (total_pnl - total_cost)
        print "Trading pnl: %s" % total_pnl
        print "Gross cost: %s" % total_cost
        print "Shares traded: %s" % "hello"
        print "Average: %s" % (np.mean(pnls) - np.mean(costs))
        print "Standard Deviation: %s" % np.std(pnls - costs)
        print "Average Trades/Day: %s" % np.mean(trade_counts)
        print

        ax1 = pyplot.subplot(3, 2, 1)
        pyplot.plot(VXX_closes, "r")
        ax1.set_ylabel("VXX (red)")
        ax2 = pyplot.twinx()
        pyplot.plot(SPY_closes, "k")
        ax2.set_ylabel("SPY (black)")
        pyplot.title("VXX and SPY Closing Px")

        pyplot.subplot(3, 2, 3)
        pyplot.plot(pnls)
        pyplot.plot(costs)
        pyplot.title("PnL v cost")

        pyplot.subplot(3, 2, 2)
        pyplot.plot(running_pnls)
        pyplot.plot(running_costs)
        pyplot.title("Running PnL v Cost")

        pyplot.subplot(3, 2, 4)
        pyplot.plot(running_nets)
        pyplot.title("Running Net")

        pyplot.subplot(3, 2, 5)
        pyplot.bar(range(len(trade_counts)), trade_counts)
        pyplot.title("Trade Count")

        pyplot.subplot(3, 2, 6)
        correlation = get_corr(offset=1)
        pyplot.plot(correlation)
        pyplot.title("Correlation (1 period lag)")

        pyplot.show()
datasink = pe.Node(interface = DataSink(), name = 'sinker')
datasink.inputs.base_directory = out_path

# Workflow and it's connections
vbm_preprocess = pe.Workflow(name = "vbm_preprocess")
vbm_preprocess.connect([(converter, reorient, [('out_files','in_file')]),\
	                    (reorient, segmentation, [('out_file', 'channel_files')]),\
	                    (segmentation, list_normalized_images, [('normalized_class_images','normalized_class_images')]),\
	                    (list_normalized_images, smoothing, [('list_norm_images','in_files')]),\
	                    (segmentation, datasink, [('modulated_class_images','vbm_spm12'),('native_class_images','vbm_spm12.@1'),\
	                                             ('normalized_class_images','vbm_spm12.@2'),('transformation_mat','vbm_spm12.@3')]),\
	                    (smoothing, datasink, [('smoothed_files','vbm_spm12.@4')])])

try:
    # Run the workflow
    sys.stderr.write("running vbm workflow")
    res = vbm_preprocess.run()

except:
    # If fails raise the excpetion and set status False
    status = False

else:
    # If succeds, set status True
    status = True
        
finally:
    # Finally , write the status to .json object and calculate correlation coefficent
    segmented_file = glob.glob(out_path+"/vbm_spm12/swc1*nii")[0]
    corr_value = correlation.get_corr(tpm_path,segmented_file)
    sys.stdout.write(json.dumps({"vbm_preprocess" : status}, sort_keys=True, indent=4, separators=(',', ': ')))