def s3_push_databases(service, codes=None): """ Push database(s) to Amazon S3. Parameters ---------- serivce : str, required only push databases for this service (specify 'all' to push all services) codes: list of str, optional only push databases identified by these codes (omit to push all databases for service) Returns ------- json status message """ data = {} if codes: data["codes"] = codes response = houston.put("/db/s3/{0}".format(service), data=data) houston.raise_for_status_with_json(response) return response.json()
def delete_universe(code, domain=None): """ Delete a universe. The listings details of the member securities won't be deleted, only their grouping as a universe. Parameters ---------- code : str, required the universe code domain : str, optional the domain from which to delete the universe (default is 'main', which runs against quantrocket.master.main.sqlite. Possible choices: main, sharadar) Returns ------- dict status message """ url = "/master/{0}universes/{1}".format( "{0}/".format(domain) if domain else "", code) response = houston.delete(url) houston.raise_for_status_with_json(response) return response.json()
def set_timezone(tz, service=None): """ Set the countdown service timezone. Parameters ---------- tz : str, required the timezone to set (pass a partial timezone string such as 'newyork' or 'europe' to see close matches, or pass '?' to see all choices) service : str, optional the name of the countdown service (default 'countdown') Returns ------- dict status message Examples -------- Set the countdown timezone to America/New_York: >>> set_timezone("America/New_York") """ service = service or "countdown" params = {"tz": tz} response = houston.put("/{0}/timezone".format(service), params=params) houston.raise_for_status_with_json(response) return response.json()
def drop_db(code, confirm_by_typing_db_code_again=None): """ Delete a history database. Parameters ---------- code : str, required the database code confirm_by_typing_db_code_again : str, required enter the db code again to confirm you want to drop the database, its config, and all its data Returns ------- dict status message """ params = { "confirm_by_typing_db_code_again": confirm_by_typing_db_code_again } response = houston.delete("/history/databases/{0}".format(code), params=params) houston.raise_for_status_with_json(response) return response.json()
def s3_pull_databases(service, codes=None, force=False): """ Pull database(s) from Amazon S3 to the db service. Parameters ---------- serivce : str, required only pull databases for this service (specify 'all' to pull all services) codes: list of str, optional only pull databases identified by these codes (omit to pull all databases for service) force: bool overwrite existing database if one exists (default is to fail if one exists) Returns ------- json status message """ params = {} if codes: params["codes"] = codes if force: params["force"] = force response = houston.get("/db/s3/{0}".format(service), params=params) houston.raise_for_status_with_json(response) return response.json()
def fetch_reuters_financials(universes=None, conids=None): """ Fetch Reuters financial statements from IB and save to database. This data provides cash flow, balance sheet, and income metrics. Parameters ---------- universes : list of str, optional limit to these universes (must provide universes, conids, or both) conids : list of int, optional limit to these conids (must provide universes, conids, or both) Returns ------- dict status message """ params = {} if universes: params["universes"] = universes if conids: params["conids"] = conids response = houston.post("/fundamental/reuters/financials", params=params) houston.raise_for_status_with_json(response) return response.json()
def clean_bundles(bundles, before=None, after=None, keep_last=None, clean_all=False): """ Remove previously ingested data for one or more bundles. Parameters ---------- bundles : list of str, required the data bundles to clean before : str (YYYY-MM-DD[ HH:MM:SS]), optional clear all data before this timestamp. Mutually exclusive with keep_last and clean_all. after : str (YYYY-MM-DD[ HH:MM:SS]), optional clear all data after this timestamp. Mutually exclusive with keep_last and clean_all. keep_last : int, optional clear all but the last N ingestions. Mutually exclusive with before, after, and clean_all. clean_all : bool clear all ingestions for bundle(s), and delete bundle configuration. Default False. Mutually exclusive with before, after, and keep_last. Returns ------- dict bundles removed Examples -------- Remove all but the last ingestion for a bundle called 'aus-1min': >>> from quantrocket.zipline import clean_bundles >>> clean_bundles("aus-1min", keep_last=1) Remove all ingestions for bundles called 'aus-1min' and 'usa-1min': >>> clean_bundles(["aus-1min", "usa-1min"], clean_all=True) """ params = {} params["bundles"] = bundles if before: params["before"] = before if after: params["after"] = after if keep_last: params["keep_last"] = keep_last if clean_all: params["clean_all"] = clean_all response = houston.delete("/zipline/bundles", params=params) houston.raise_for_status_with_json(response) return response.json()
def fetch_reuters_estimates(universes=None, conids=None): """ Fetch Reuters estimates and actuals from IB and save to database. This data provides analyst estimates and actuals for a variety of indicators. Parameters ---------- universes : list of str, optional limit to these universes (must provide universes, conids, or both) conids : list of int, optional limit to these conids (must provide universes, conids, or both) Returns ------- dict status message """ params = {} if universes: params["universes"] = universes if conids: params["conids"] = conids response = houston.post("/fundamental/reuters/estimates", params=params) houston.raise_for_status_with_json(response) return response.json()
def cancel_collections(codes, queues=None): """ Cancel running or pending historical data collections. Parameters ---------- codes : list of str, required the database code(s) to cancel collections for queues : list of str, optional only cancel collections in these queues. Possible choices: standard, priority Returns ------- dict standard and priority queues """ params = {} if codes: params["codes"] = codes if queues: params["queues"] = queues response = houston.delete("/history/queue", params=params) houston.raise_for_status_with_json(response) return response.json()
def cancel_orders(order_ids=None, conids=None, order_refs=None, accounts=None, cancel_all=None): """ Cancel one or more orders by order ID, conid, or order ref. Parameters ---------- order_ids : list of str, optional cancel these order IDs conids : list of int, optional cancel orders for these conids order_refs: list of str, optional cancel orders for these order refs accounts : list of str, optional cancel orders for these accounts cancel_all : bool cancel all open orders Returns ------- dict status message Examples -------- Cancel orders by order ID: >>> cancel_orders(order_ids=['6002:45','6002:46']) Cancel orders by conid: >>> cancel_orders(conids=[123456]) Cancel orders by order ref: >>> cancel_orders(order_refs=['my-strategy']) Cancel all open orders: >>> cancel_orders(cancel_all=True) """ params = {} if order_ids: params["order_ids"] = order_ids if conids: params["conids"] = conids if order_refs: params["order_refs"] = order_refs if accounts: params["accounts"] = accounts if cancel_all: params["cancel_all"] = cancel_all response = houston.delete("/blotter/orders", params=params) houston.raise_for_status_with_json(response) return response.json()
def optimize_databases(service, codes=None): """ Optimize database file(s) to improve performance. Parameters ---------- serivce : str, required only optimize databases for this service (specify 'all' to optimize all services) codes: list of str, optional only optimize databases identified by these codes (omit to optimize all databases for service) Returns ------- json status message """ data = {} if codes: data["codes"] = codes response = houston.post("/db/optimizations/{0}".format(service), data=data) houston.raise_for_status_with_json(response) return response.json()
def fetch_borrow_fees(countries=None): """ Fetch IB borrow fees data and save to database. Data is organized by country and updated every 15 minutes. Historical data is available from April 2018. Parameters ---------- countries : list of str, optional limit to these countries (pass '?' or any invalid country to see available countries) Returns ------- dict status message """ params = {} if countries: params["countries"] = countries response = houston.post("/fundamental/stockloan/fees", params=params) houston.raise_for_status_with_json(response) return response.json()
def wait_for_collections(codes, timeout=None): """ Wait for historical data collection to finish. Parameters ---------- codes : list of str, required the database code(s) to wait for timeout : str, optional time out if data collection hasn't finished after this much time (use Pandas timedelta string, e.g. 30sec or 5min or 2h) Returns ------- dict status message """ params = {} params["codes"] = codes if timeout: params["timeout"] = timeout response = houston.put("/history/queue", params=params, timeout=60*60*24*365) houston.raise_for_status_with_json(response) return response.json()
def drop_db(code, confirm_by_typing_db_code_again=None, cascade=False): """ Delete a tick database or aggregate database. Deleting a tick database deletes its configuration and data and any associated aggregate databases. Deleting an aggregate database does not delete the tick database from which it is derived. Deleting databases is irreversible. Parameters ---------- code : str, required the tick database code or aggregate database code confirm_by_typing_db_code_again : str, required enter the db code again to confirm you want to drop the database, its config, and all its data cascade : bool also delete associated aggregated databases, if any. Only applicable when deleting a tick database. Returns ------- dict status message """ params = {"confirm_by_typing_db_code_again": confirm_by_typing_db_code_again} if cascade: params["cascade"] = cascade response = houston.delete("/realtime/databases/{0}".format(code), params=params) houston.raise_for_status_with_json(response) return response.json()
def list_exchanges(regions=None, sec_types=None): """ List exchanges by security type and country as found on the IB website. Parameters ---------- regions : list of str, optional limit to these regions. Possible choices: north_america, europe, asia, global sec_types : list of str, optional limit to these securitiy types. Possible choices: STK, ETF, FUT, CASH, IND Returns ------- dict """ params = {} if sec_types: params["sec_types"] = sec_types if regions: params["regions"] = regions response = houston.get("/master/exchanges", params=params, timeout=180) houston.raise_for_status_with_json(response) return response.json()
def translate_conids(conids, from_domain=None, to_domain=None): """ Translate conids (contract IDs) from one domain to another. Only translations to and from the "main" domain (that is, the IB domain) are supported. Parameters ---------- conids : list of int, required the conids to translate from_domain : str, optional the domain to translate from. This is the domain of the provided conids. Possible choices: main, sharadar to_domain : str, optional the domain to translate to. Possible choices: main, sharadar Returns ------- dict dict of <from_domain conid>:<to_domain conid> Examples -------- Translate a DataFrame with IB conids as columns to one with Sharadar conids as columns, and mask columns that can't be translated: >>> ib_conids = list(df_with_ib_cols.columns) >>> ib_to_sharadar = translate_conids(ib_conids, to_domain="sharadar") >>> df_with_sharadar_cols = df_with_ib_cols.rename(columns=ib_to_sharadar) >>> # Mask columns where no Sharadar conid was available >>> no_translations = set(ib_conids) - set(ib_to_sharadar) >>> df_with_sharadar_cols.loc[:, no_translations] = None Translate a DataFrame with Sharadar conids as columns to one with IB conids as columns, and drop columns that can't be translated: >>> sharadar_conids = list(df_with_sharadar_cols.columns) >>> sharadar_to_ib = translate_conids(sharadar_conids, from_domain="sharadar") >>> df_with_ib_cols = df_with_sharadar_cols.rename(columns=sharadar_to_ib) >>> # Drop columns where no IB conid was available >>> no_translations = set(sharadar_conids) - set(sharadar_to_ib) >>> df_with_ib_cols = df_with_ib_cols.drop(no_translations, axis=1) """ params = {} params["conids"] = conids if from_domain: params["from_domain"] = from_domain if to_domain: params["to_domain"] = to_domain response = houston.get("/master/translations", params=params) houston.raise_for_status_with_json(response) translations = response.json() # JSON requires dict keys to be strings, re-cast to int translations = dict([(int(k),v) for k,v in translations.items()]) return translations
def collect_option_chains(universes=None, conids=None, infilepath_or_buffer=None): """ Collect option chains for underlying securities. Note: option chains often consist of hundreds, sometimes thousands of options per underlying security. Be aware that requesting option chains for large universes of underlying securities, such as all stocks on the NYSE, can take numerous hours to complete, add hundreds of thousands of rows to the securities master database, increase the database file size by several hundred megabytes, and potentially add latency to database queries. Parameters ---------- universes : list of str, optional collect options for these universes of underlying securities conids : list of int, optional collect options for these underlying conids infilepath_or_buffer : str or file-like object, optional collect options for the conids in this file (specify '-' to read file from stdin) Returns ------- dict status message """ params = {} if universes: params["universes"] = universes if conids: params["conids"] = conids if infilepath_or_buffer == "-": response = houston.post("/master/options", params=params, data=to_bytes(sys.stdin)) elif infilepath_or_buffer and hasattr(infilepath_or_buffer, "read"): if infilepath_or_buffer.seekable(): infilepath_or_buffer.seek(0) response = houston.post("/master/options", params=params, data=to_bytes(infilepath_or_buffer)) elif infilepath_or_buffer: with open(infilepath_or_buffer, "rb") as f: response = houston.post("/master/options", params=params, data=f) else: response = houston.post("/master/options", params=params) houston.raise_for_status_with_json(response) return response.json()
def fetch_listings(exchange=None, sec_types=None, currencies=None, symbols=None, universes=None, conids=None): """ Fetch securities listings from IB into securities master database, either by exchange or by universes/conids. Specify an exchange (optionally filtering by security type, currency, and/or symbol) to fetch listings from the IB website and fetch associated contract details from the IB API. Or, specify universes or conids to fetch details from the IB API, bypassing the website. Parameters ---------- exchange : str the exchange code to fetch listings for (required unless providing universes or conids) sec_types : list of str, optional limit to these security types. Possible choices: STK, ETF, FUT, CASH, IND currencies : list of str, optional limit to these currencies symbols : list of str, optional limit to these symbols universes : list of str, optional limit to these universes conids : list of int, optional limit to these conids Returns ------- dict status message """ params = {} if exchange: params["exchange"] = exchange if sec_types: params["sec_types"] = sec_types if currencies: params["currencies"] = currencies if symbols: params["symbols"] = symbols if universes: params["universes"] = universes if conids: params["conids"] = conids response = houston.post("/master/listings", params=params) houston.raise_for_status_with_json(response) return response.json()
def trade(strategies, accounts=None, review_date=None, output="csv", filepath_or_buffer=None): """ Run one or more strategies and generate orders. Allocations are read from configuration (quantrocket.moonshot.allocations.yml). Parameters ---------- strategies : list of str, required one or more strategy codes accounts : list of str, optional limit to these accounts review_date : str (YYYY-MM-DD), optional generate orders as if it were this date, rather than using today's date output : str, required the output format (choices are csv or json) filepath_or_buffer : str, optional the location to write the orders file (omit to write to stdout) Returns ------- None """ params = {} if strategies: params["strategies"] = strategies if accounts: params["accounts"] = accounts if review_date: params["review_date"] = review_date output = output or "csv" if output not in ("csv", "json"): raise ValueError( "invalid output: {0} (choices are csv or json".format(output)) response = houston.get("/moonshot/orders.{0}".format(output), params=params, timeout=60 * 5) houston.raise_for_status_with_json(response) # Don't write a null response to file if response.content[:4] == b"null": return filepath_or_buffer = filepath_or_buffer or sys.stdout write_response_to_filepath_or_buffer(filepath_or_buffer, response)
def cancel_orders(order_ids=None, conids=None, strategies=None, cancel_all=None): """ Cancel one or more orders by order ID, conid, or strategy (order ref). Parameters ---------- order_ids : list of str, optional cancel these order IDs conids : list of int, optional cancel orders for these conids strategies: list of str, optional cancel orders for these strategy codes) cancel_all : bool cancel all open orders Returns ------- dict status message Examples -------- Cancel orders by order ID: >>> cancel_orders(order_ids=['DU12345:7002:45','DU12345:7002:46']) Cancel orders by conid: >>> cancel_orders(conids=[123456]) Cancel orders by strategy (order ref): >>> cancel_orders(strategies=['my-strategy']) Cancel all open orders: >>> cancel_orders(cancel_all=True) """ params = {} if order_ids: params["order_ids"] = order_ids if conids: params["conids"] = conids if strategies: params["strategies"] = strategies if cancel_all: params["cancel_all"] = cancel_all response = houston.delete("/blotter/orders", params=params) houston.raise_for_status_with_json(response) return response.json()
def fetch_history(codes, priority=False, conids=None, start_date=None, end_date=None, delist_missing=False): """ Fetch historical market data from IB and save it to a history database. The request is queued and the data is fetched asynchronously. Parameters ---------- codes : list of str, required the database code(s) to fetch data for priority : bool use the priority queue (default is to use the standard queue) conids : list of int, optional fetch history for these conids (overrides config) start_date : str (YYYY-MM-DD), optional fetch history back to this start date (overrides config) end_date : str (YYYY-MM-DD), optional fetch history up to this end date (overrides config) delist_missing : bool auto-delist securities that are no longer available from IB Returns ------- dict status message """ params = {} if codes: params["codes"] = codes if priority: params["priority"] = priority if conids: params["conids"] = conids if start_date: params["start_date"] = start_date if end_date: params["end_date"] = end_date if delist_missing: params["delist_missing"] = delist_missing response = houston.post("/history/queue", params=params) houston.raise_for_status_with_json(response) return response.json()
def list_universes(): """ List universes and their size. Returns ------- dict dict of universe:size """ response = houston.get("/master/universes") houston.raise_for_status_with_json(response) return response.json()
def get_license_profile(): """ Return the current license profile. Returns ------- dict license profile """ response = houston.get("/license-service/license") houston.raise_for_status_with_json(response) return response.json()
def download_positions(filepath_or_buffer=None, output="csv", order_refs=None, accounts=None, conids=None): """ Query current positions and write results to file. To return positions as a Python list, see list_positions. Parameters ---------- filepath_or_buffer : str or file-like object filepath to write the data to, or file-like object (defaults to stdout) output : str output format (json, csv, txt, default is csv) order_refs : list of str, optional limit to these order refs accounts : list of str, optional limit to these accounts conids : list of int, optional limit to these conids Returns ------- None """ params = {} if order_refs: params["order_refs"] = order_refs if accounts: params["accounts"] = accounts if conids: params["conids"] = conids output = output or "csv" if output not in ("csv", "json", "txt"): raise ValueError("Invalid ouput: {0}".format(output)) response = houston.get("/blotter/positions.{0}".format(output), params=params) houston.raise_for_status_with_json(response) # Don't write a null response to file if response.content[:4] == b"null": return filepath_or_buffer = filepath_or_buffer or sys.stdout write_response_to_filepath_or_buffer(filepath_or_buffer, response)
def create_combo(combo_legs): """ Create a combo (aka spread), which is a composite instrument consisting of two or more individual instruments (legs) that are traded as a single instrument. Each user-defined combo is stored in the securities master database with a SecType of "BAG". The combo legs are stored in the ComboLegs field as a JSON array. QuantRocket assigns a negative integer as the conid for the combo. The negative integer consists of a prefix of -11 followed by an autoincrementing digit, for example: -111, -112, -113, ... If the combo already exists, its conid will be returned instead of creating a duplicate record. Parameters ---------- combo_legs : list, required a list of the combo legs, where each leg is a list specifying action, ratio, and conid Returns ------- dict returns a dict containing the generated conid of the combo, and whether a new record was created Examples -------- To create a calendar spread on VX, first retrieve the conids of the legs: >>> from quantrocket.master import download_master_file >>> download_master_file("vx.csv", symbols="VIX", exchanges="CFE", sec_types="FUT") >>> vx_conids = pd.read_csv("vx.csv", index_col="LocalSymbol").ConId.to_dict() Then create the combo: >>> create_combo([ ["BUY", 1, vx_conids["VXV9"]], ["SELL", 1, vx_conids["VXQ9"]] ]) {"conid": -111, "created": True} """ f = six.StringIO() json.dump(combo_legs, f) f.seek(0) response = houston.put("/master/combos", data=f) houston.raise_for_status_with_json(response) return response.json()
def download_executions(filepath_or_buffer=None, order_refs=None, accounts=None, conids=None, start_date=None, end_date=None): """ Query executions from the executions database. Parameters ---------- filepath_or_buffer : str or file-like object filepath to write the data to, or file-like object (defaults to stdout) order_refs : list of str, optional limit to these order refs accounts : list of str, optional limit to these accounts conids : list of int, optional limit to these conids start_date : str (YYYY-MM-DD), optional limit to executions on or after this date end_date : str (YYYY-MM-DD), optional limit to executions on or before this date Returns ------- None """ params = {} if order_refs: params["order_refs"] = order_refs if accounts: params["accounts"] = accounts if conids: params["conids"] = conids if start_date: params["start_date"] = start_date if end_date: params["end_date"] = end_date response = houston.get("/blotter/executions.csv", params=params) houston.raise_for_status_with_json(response) filepath_or_buffer = filepath_or_buffer or sys.stdout write_response_to_filepath_or_buffer(filepath_or_buffer, response)
def list_databases(): """ List tick databases and associated aggregate databases. Returns ------- dict dict of {tick_db: [agg_dbs]} """ response = houston.get("/realtime/databases") houston.raise_for_status_with_json(response) return response.json()
def list_bundles(): """ List all of the available data bundles. Returns ------- dict data bundles and timestamps """ response = houston.get("/zipline/bundles") houston.raise_for_status_with_json(response) return response.json()
def get_history_queue(): """ Get the current queue of historical data collections. Returns ------- dict standard and priority queues """ response = houston.get("/history/queue") houston.raise_for_status_with_json(response) return response.json()
def list_databases(): """ List history databases. Returns ------- list list of database codes """ response = houston.get("/history/databases") houston.raise_for_status_with_json(response) return response.json()