Exemple #1
0
 def __init__(self,
              code,
              start,
              end,
              capital_base=100000,
              consume_portfolio=None):
     super(ZiplineRunThread, self).__init__()
     self.code = code
     self.start_date = Date(tz='utc', as_timestamp=True).parser(start)
     self.end_date = Date(tz='utc', as_timestamp=True).parser(end)
     self.capital_base = capital_base
     self.consume_portfolio = consume_portfolio
Exemple #2
0
    '--bundle',
    default='quantopian-quandl',
    metavar='BUNDLE-NAME',
    show_default=True,
    help='The data bundle to use for the simulation.',
)
@click.option('--bundle-timestamp',
              type=Timestamp(),
              default=pd.Timestamp.utcnow(),
              show_default=False,
              help='The date to lookup data on or before.\n'
              '[default: <current-time>]')
@click.option(
    '-s',
    '--start',
    type=Date(tz='utc', as_timestamp=True),
    help='The start date of the simulation.',
)
@click.option(
    '-e',
    '--end',
    type=Date(tz='utc', as_timestamp=True),
    help='The end date of the simulation.',
)
@click.option(
    '-o',
    '--output',
    default='-',
    metavar='FILENAME',
    show_default=True,
    help="The location to write the perf data. If this is '-' the perf will"
Exemple #3
0
    bundle_data.equity_minute_bar_reader.first_trading_day
data = DataPortal(
    trading_environment.asset_finder,
    trading_calendar,
    first_trading_day=first_trading_day,
    equity_minute_reader=bundle_data.equity_minute_bar_reader,
    equity_daily_reader=bundle_data.equity_daily_bar_reader,
    adjustment_reader=bundle_data.adjustment_reader,
)
################################## sim_params
capital_base = DEFAULT_CAPITAL_BASE
start = '2017-1-1'
end = '2017-11-30'
sim_params = create_simulation_parameters(
    capital_base=capital_base,
    start=Date(tz='utc', as_timestamp=True).parser(start),
    end=Date(tz='utc', as_timestamp=True).parser(end),
    data_frequency='daily',
    trading_calendar=trading_calendar,
)


#######################################################################
def rebalance(context, data):
    #print ("rebalance:",get_datetime())
    #print context.pipeline_data
    pipeline_data = context.pipeline_data
    keys = list(context.posset)
    for asset in keys:
        if data.can_trade(asset):
            #print("flattern:",asset)
Exemple #4
0
# coding=utf-8

from zipline.api import order, record, symbol


def initialize(context):
    pass


def handle_data(context, data):
    order(symbol('000001'), 10)
    record(AAPL=data.current(symbol('000001'), 'price'))


if __name__ == '__main__':
    from cn_zipline.utils.run_algo import run_algorithm
    from zipline.utils.cli import Date
    from cn_stock_holidays.zipline.default_calendar import shsz_calendar

    start = Date(tz='utc', as_timestamp=True).parser('2017-01-01')

    end = Date(tz='utc', as_timestamp=True).parser('2017-10-20')
    run_algorithm(start,
                  end,
                  initialize,
                  10e6,
                  handle_data=handle_data,
                  bundle='tdx',
                  trading_calendar=shsz_calendar,
                  output='out.pickle')
Exemple #5
0
            order(symbol('002450'), 300, limit_price=buy_price)
            buy_status = True


if __name__ == '__main__':
    from zipline.utils.cli import Date
    from zipline.utils.run_algo import run_algorithm
    from zipline.gens.brokers.tdx_shipane_broker import TdxShipaneBroker
    from zipline.gens.shipane_client import ShipaneClient
    import pandas as pd
    import os
    import datetime

    if platform.architecture()[0] == '32bit':
        client_uri = 'config.json'
    else:
        client_uri = "tcp://127.0.0.1:4242"

    shipane_client = ShipaneClient(client_key="1")
    broker = TdxShipaneBroker(client_uri, shipane_client)
    if not os.path.exists('tmp'):
        os.mkdir('tmp')
    realtime_bar_target = 'tmp/real-bar-{}'.format(str(pd.to_datetime('today').date()))
    state_filename = 'tmp/live-state'

    start = Date(tz='utc', as_timestamp=True).parser('2017-10-01')

    end = Date(tz='utc', as_timestamp=True).parser(datetime.datetime.now().strftime("%Y-%m-%d"))
    run_algorithm(start, end, initialize, 10e6, handle_data=handle_data, bundle='tdx',
                  trading_calendar='SHSZ', data_frequency="minute", output='out.pickle',
                  broker=broker, state_filename=state_filename, realtime_bar_target=realtime_bar_target)
Exemple #6
0
    "--benchmark-sid",
    default=None,
    type=int,
    help="The sid of the instrument to be used as a benchmark "
    "(should exist in the ingested bundle)",
)
@click.option(
    "--no-benchmark",
    is_flag=True,
    default=False,
    help="If passed, use a benchmark of zero returns.",
)
@click.option(
    "-s",
    "--start",
    type=Date(tz="utc", as_timestamp=True),
    help="The start date of the simulation.",
)
@click.option(
    "-e",
    "--end",
    type=Date(tz="utc", as_timestamp=True),
    help="The end date of the simulation.",
)
@click.option(
    "-o",
    "--output",
    default="-",
    metavar="FILENAME",
    show_default=True,
    help="The location to write the perf data. If this is '-' the perf will"
Exemple #7
0
    pipe = Pipeline(columns=pipe_columns,
           screen=private_universe,
           )
    i = 0
    for c in ONEHOTCLASS:
        pipe.add(c,sector_indict_keys[i])
        i +=1
    return pipe


pd.set_option('display.width', 8000)
research = Research()
#print(research.get_engine()._finder)
my_pipe = make_pipeline(research.get_engine()._finder)
result = research.run_pipeline(my_pipe,
                               Date(tz='utc', as_timestamp=True).parser(start),
                               Date(tz='utc', as_timestamp=True).parser(end))

result = result.reset_index().drop(['level_0','level_1'],axis = 1).replace([np.inf,-np.inf],np.nan).fillna(0)
print "############################################"


X = result.drop('returns', 1)
Y = result['returns']

print ("total data size :",len(result))
#test_size=2000
Train_X = X.values
Train_Y = Y.values
# Test_X  = X[-test_size:].values
# Test_Y  = Y[-test_size:].values