예제 #1
0
파일: factory.py 프로젝트: aichi/zipline
def load_market_data():
    try:
        fp_bm = get_datafile('benchmark.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from Yahoo Finance.
""".strip()
        dump_benchmarks()
        fp_bm = get_datafile('benchmark.msgpack', "rb")

    bm_list = msgpack.loads(fp_bm.read())
    bm_returns = []
    for packed_date, returns in bm_list:
        event_dt = tuple_to_date(packed_date)
        #event_dt = event_dt.replace(
        #    hour=0,
        #    minute=0,
        #    second=0,
        #    tzinfo=pytz.utc
        #)

        daily_return = risk.DailyReturn(date=event_dt, returns=returns)
        bm_returns.append(daily_return)

    fp_bm.close()

    bm_returns = sorted(bm_returns, key=attrgetter('date'))

    try:
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from data.treasury.gov
""".strip()
        dump_treasury_curves()
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")

    tr_list = msgpack.loads(fp_tr.read())
    tr_curves = {}
    for packed_date, curve in tr_list:
        tr_dt = tuple_to_date(packed_date)
        #tr_dt = tr_dt.replace(hour=0, minute=0, second=0, tzinfo=pytz.utc)
        tr_curves[tr_dt] = curve

    fp_tr.close()

    tr_curves = OrderedDict(sorted(
                            ((dt, c) for dt, c in tr_curves.iteritems()),
                            key=lambda t: t[0]))

    return bm_returns, tr_curves
예제 #2
0
def load_market_data():
    try:
        fp_bm = get_datafile('benchmark.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from Yahoo Finance.
""".strip()
        dump_benchmarks()
        fp_bm = get_datafile('benchmark.msgpack', "rb")

    bm_list = msgpack.loads(fp_bm.read())
    bm_returns = []
    for packed_date, returns in bm_list:
        event_dt = tuple_to_date(packed_date)
        #event_dt = event_dt.replace(
        #    hour=0,
        #    minute=0,
        #    second=0,
        #    tzinfo=pytz.utc
        #)

        daily_return = risk.DailyReturn(date=event_dt, returns=returns)
        bm_returns.append(daily_return)

    fp_bm.close()

    bm_returns = sorted(bm_returns, key=attrgetter('date'))

    try:
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from data.treasury.gov
""".strip()
        dump_treasury_curves()
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")

    tr_list = msgpack.loads(fp_tr.read())
    tr_curves = {}
    for packed_date, curve in tr_list:
        tr_dt = tuple_to_date(packed_date)
        #tr_dt = tr_dt.replace(hour=0, minute=0, second=0, tzinfo=pytz.utc)
        tr_curves[tr_dt] = curve

    fp_tr.close()

    return bm_returns, tr_curves
예제 #3
0
def drop_before_date(source_name, last_date):
    """
    Loads the msgpack file for the given @source_name and drops values
    up to @last_date.

    Used so that we can test logic that updates the msgpack's to download
    current data if the data isn't current enough.
    """
    filename = DATA_SOURCES[source_name]['filename']
    data = loader.get_saved_data(filename)

    filtered_data = itertools.takewhile(
        lambda x: tuple_to_date(x[0]).date() <= last_date.date(), data)

    with loader.get_datafile(filename, mode='wb') as fp:
        fp.write(msgpack.dumps(list(filtered_data)))
예제 #4
0
def drop_before_date(source_name, last_date):
    """
    Loads the msgpack file for the given @source_name and drops values
    up to @last_date.

    Used so that we can test logic that updates the msgpack's to download
    current data if the data isn't current enough.
    """
    filename = DATA_SOURCES[source_name]['filename']
    data = loader.get_saved_data(filename)

    filtered_data = itertools.takewhile(
        lambda x: tuple_to_date(x[0]).date() <= last_date.date(), data)

    with loader.get_datafile(filename, mode='wb') as fp:
        fp.write(msgpack.dumps(list(filtered_data)))
예제 #5
0
def last_date(source_name, args):
    data = loader.get_saved_data(DATA_SOURCES[source_name]['filename'])
    date = tuple_to_date(data[-1][0])
    print "Last saved {source_name} date is {date}".format(
        source_name=source_name, date=date)
예제 #6
0
def load_market_data(bm_symbol='^GSPC'):
    try:
        fp_bm = get_datafile(get_benchmark_filename(bm_symbol), "rb")
    except IOError:
        print("""
data msgpacks aren't distributed with source.
Fetching data from Yahoo Finance.
""").strip()
        dump_benchmarks(bm_symbol)
        fp_bm = get_datafile(get_benchmark_filename(bm_symbol), "rb")

    bm_list = msgpack.loads(fp_bm.read())

    # Find the offset of the last date for which we have trading data in our
    # list of valid trading days
    last_bm_date = tuple_to_date(bm_list[-1][0])
    last_bm_date_offset = trading_days.searchsorted(
        last_bm_date.strftime('%Y/%m/%d'))

    # If more than 1 trading days has elapsed since the last day where
    # we have data,then we need to update
    if len(trading_days) - last_bm_date_offset > 1:
        update_benchmarks(bm_symbol, last_bm_date)
        fp_bm = get_datafile(get_benchmark_filename(bm_symbol), "rb")
        bm_list = msgpack.loads(fp_bm.read())

    bm_returns = []
    for packed_date, returns in bm_list:
        event_dt = tuple_to_date(packed_date)

        daily_return = DailyReturn(date=event_dt, returns=returns)
        bm_returns.append(daily_return)

    fp_bm.close()

    bm_returns = sorted(bm_returns, key=attrgetter('date'))

    try:
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
    except IOError:
        print("""
data msgpacks aren't distributed with source.
Fetching data from data.treasury.gov
""").strip()
        dump_treasury_curves()
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")

    tr_list = msgpack.loads(fp_tr.read())

    # Find the offset of the last date for which we have trading data in our
    # list of valid trading days
    last_tr_date = tuple_to_date(tr_list[-1][0])
    last_tr_date_offset = trading_days.searchsorted(
        last_tr_date.strftime('%Y/%m/%d'))

    # If more than 1 trading days has elapsed since the last day where
    # we have data,then we need to update
    if len(trading_days) - last_tr_date_offset > 1:
        update_treasury_curves(last_tr_date)
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
        tr_list = msgpack.loads(fp_tr.read())

    tr_curves = {}
    for packed_date, curve in tr_list:
        tr_dt = tuple_to_date(packed_date)
        # tr_dt = tr_dt.replace(hour=0, minute=0, second=0, microsecond=0,
        #                       tzinfo=pytz.utc)
        tr_curves[tr_dt] = curve

    fp_tr.close()

    tr_curves = OrderedDict(sorted(
                            ((dt, c) for dt, c in tr_curves.iteritems()),
                            key=lambda t: t[0]))

    return bm_returns, tr_curves
예제 #7
0
def last_date(source_name, args):
    data = loader.get_saved_data(DATA_SOURCES[source_name]['filename'])
    date = tuple_to_date(data[-1][0])
    print "Last saved {source_name} date is {date}".format(
        source_name=source_name, date=date)