Пример #1
0
def read_and_write_quotes(snapshot_date):
    mkt_conf = config.read_markets()
    active_mkt_conf = config.active_markets(mkt_conf, snapshot_date)
    retrieved_markets = {}
    for mkt_name, mkt_conf in active_mkt_conf.items():
        print('{}: retrieving {}. . . '.format(snapshot_date, mkt_name),
              end='')
        mkt = contract.Market(mkt_name)

        # Market snapshot may have been retrieved already
        if mkt_name not in retrieved_markets:
            quotes_dfs = px_hist.read_quote_frames(mkt_conf)
            retrieved_markets.update(quotes_dfs)

        quotes_df = retrieved_markets[mkt_name]

        with open_store(mode='a') as hdf_store:
            key = quote_key(market=mkt)
            if key in hdf_store:
                prev_df = hdf_store[key]
                dedupe_idx = quotes_df.index.difference(prev_df.index)
                iter_df = quotes_df.reindex(dedupe_idx)
            else:
                iter_df = quotes_df
            hdf_store.put(key=key, value=iter_df, format='t', append=True)
        print('{}: completed {}'.format(pd.Timestamp.now(), mkt_name))
def create_all(metadata, mkt_conf):
    config.markets_table(metadata)
    config.bundles_table(metadata)
    config.assets_table(metadata)

    # Daily market quotes
    mkts = [contract.Market(mkt_nm) for mkt_nm in mkt_conf.keys()]
    for mkt in mkts:
        recorder.daily_market_table(metadata, mkt)
        recorder.quotes_table(metadata, mkt)
Пример #3
0
def retrieve_and_store_daily_data():
    """
    For each market in the markets config file, retrieve the daily snapshots
    from the IEM site. Previous daily snapshot data for that market is
    overwritten in the process

    :return: None
    """
    for mkt_name in config.read_markets().keys():
        mkt = contract.Market(mkt_name)
        px_hist_df = read_daily_market_data(mkt_name)

        with open_store(mode='a') as hdf_store:
            key = history_key(market=mkt)
            hdf_store.put(key=key, value=px_hist_df, format='t')
    # Engine connect to MySQL
    sa_url = 'sqlalchemy.url'
    mysql_url, dbname = db_conf[sa_url].rsplit(sep='/', maxsplit=1)
    mysql_engine = sa.engine_from_config({sa_url: mysql_url})

    create_fmt = 'CREATE DATABASE IF NOT EXISTS {dbname}'
    mysql_engine.execute(create_fmt.format(dbname=dbname))

    # Engine connecting to MySQL database
    engine = sa.engine_from_config(db_conf)

    metadata = sa.MetaData()
    mkt_conf = config.read_markets()
    create_all(metadata, mkt_conf)
    metadata.create_all(engine)

    insert_config_data(engine)

    # Query market table. Does metadata drop and create the table?
    db_mkt_df = pd.read_sql_table(config.MARKETS, engine, index_col=config.ID)
    db_bundle_df = pd.read_sql_table(config.BUNDLES, engine, index_col=config.ID)
    db_asset_df = pd.read_sql_table(config.ASSETS, engine, index_col=config.ID)

    # TODO: Populate data
    markets = [contract.Market(mkt_nm) for mkt_nm in mkt_conf.keys()]
    with recorder.open_store('data/hist_iem.hdf') as hdf_store:
        for market in markets:
            mkt_hist_key = recorder.history_key(market)
            mkt_hist_df = hdf_store[mkt_hist_key]
Пример #5
0
    return pd.IndexSlice[:, bundle_assets]


def plot_cumsum(bundle_history_srs):
    cumsum_df = cumsum_frame(bundle_history_srs)
    # First trade index
    fst_trade_idx = (cumsum_df.sum(axis=1) > 0).idxmax()
    nonzero_cum_units_df = cumsum_df.loc[fst_trade_idx:]
    plt.figure(get_new_fignum())
    plt.plot(nonzero_cum_units_df)
    plt.show()


if __name__ == '__main__':
    mkt_name = 'FedPolicyB'
    mkt = contract.Market(mkt_name)
    with recorder.open_store(mode='r') as hdf_store:
        px_hist_df = hdf_store[recorder.history_key(market=mkt)]

    mkts_json = config.read_markets()
    mkt_conf = mkts_json[mkt_name]

    # Clean dataframe
    df = px_hist_df.copy().reset_index()
    # Expiry
    df[EXPIRY] = df[iem.CONTRACT].apply(expiry)
    # Expiry date
    expiry_date_json = mkt_conf[config.BUNDLE]
    kwargs = {'expiry_date_json': expiry_date_json}
    df[config.EXPIRY_DATE] = df[EXPIRY].apply(expiry_date_series, **kwargs)
    # Days to expiration