Exemplo n.º 1
0
def load_market_data():
    try:
        fp_bm = get_datafile('benchmark.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from Yahoo Finance.
""".strip()
        dump_benchmarks()
        fp_bm = get_datafile('benchmark.msgpack', "rb")

    bm_list = msgpack.loads(fp_bm.read())
    bm_returns = []
    for packed_date, returns in bm_list:
        event_dt = tuple_to_date(packed_date)
        #event_dt = event_dt.replace(
        #    hour=0,
        #    minute=0,
        #    second=0,
        #    tzinfo=pytz.utc
        #)

        daily_return = risk.DailyReturn(date=event_dt, returns=returns)
        bm_returns.append(daily_return)

    fp_bm.close()

    bm_returns = sorted(bm_returns, key=attrgetter('date'))

    try:
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from data.treasury.gov
""".strip()
        dump_treasury_curves()
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")

    tr_list = msgpack.loads(fp_tr.read())
    tr_curves = {}
    for packed_date, curve in tr_list:
        tr_dt = tuple_to_date(packed_date)
        #tr_dt = tr_dt.replace(hour=0, minute=0, second=0, tzinfo=pytz.utc)
        tr_curves[tr_dt] = curve

    fp_tr.close()

    tr_curves = OrderedDict(sorted(
                            ((dt, c) for dt, c in tr_curves.iteritems()),
                            key=lambda t: t[0]))

    return bm_returns, tr_curves
Exemplo n.º 2
0
def load_market_data():
    try:
        fp_bm = get_datafile('benchmark.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from Yahoo Finance.
""".strip()
        dump_benchmarks()
        fp_bm = get_datafile('benchmark.msgpack', "rb")

    bm_list = msgpack.loads(fp_bm.read())
    bm_returns = []
    for packed_date, returns in bm_list:
        event_dt = tuple_to_date(packed_date)
        #event_dt = event_dt.replace(
        #    hour=0,
        #    minute=0,
        #    second=0,
        #    tzinfo=pytz.utc
        #)

        daily_return = risk.DailyReturn(date=event_dt, returns=returns)
        bm_returns.append(daily_return)

    fp_bm.close()

    bm_returns = sorted(bm_returns, key=attrgetter('date'))

    try:
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")
    except IOError:
        print """
data msgpacks aren't distribute with source.
Fetching data from data.treasury.gov
""".strip()
        dump_treasury_curves()
        fp_tr = get_datafile('treasury_curves.msgpack', "rb")

    tr_list = msgpack.loads(fp_tr.read())
    tr_curves = {}
    for packed_date, curve in tr_list:
        tr_dt = tuple_to_date(packed_date)
        #tr_dt = tr_dt.replace(hour=0, minute=0, second=0, tzinfo=pytz.utc)
        tr_curves[tr_dt] = curve

    fp_tr.close()

    return bm_returns, tr_curves
Exemplo n.º 3
0
def drop_before_date(source_name, last_date):
    """
    Loads the msgpack file for the given @source_name and drops values
    up to @last_date.

    Used so that we can test logic that updates the msgpack's to download
    current data if the data isn't current enough.
    """
    filename = DATA_SOURCES[source_name]['filename']
    data = loader.get_saved_data(filename)

    filtered_data = itertools.takewhile(
        lambda x: tuple_to_date(x[0]).date() <= last_date.date(), data)

    with loader.get_datafile(filename, mode='wb') as fp:
        fp.write(msgpack.dumps(list(filtered_data)))
Exemplo n.º 4
0
def drop_before_date(source_name, last_date):
    """
    Loads the msgpack file for the given @source_name and drops values
    up to @last_date.

    Used so that we can test logic that updates the msgpack's to download
    current data if the data isn't current enough.
    """
    filename = DATA_SOURCES[source_name]['filename']
    data = loader.get_saved_data(filename)

    filtered_data = itertools.takewhile(
        lambda x: tuple_to_date(x[0]).date() <= last_date.date(), data)

    with loader.get_datafile(filename, mode='wb') as fp:
        fp.write(msgpack.dumps(list(filtered_data)))