コード例 #1
0
def wait_for_collections(codes, timeout=None):
    """
    Wait for historical data collection to finish.

    Parameters
    ----------
    codes : list of str, required
        the database code(s) to wait for

    timeout : str, optional
        time out if data collection hasn't finished after this much time (use Pandas
        timedelta string, e.g. 30sec or 5min or 2h)

    Returns
    -------
    dict
        status message

    """
    params = {}
    params["codes"] = codes
    if timeout:
        params["timeout"] = timeout
    response = houston.put("/history/queue", params=params, timeout=60*60*24*365)
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #2
0
def set_timezone(tz, service=None):
    """
    Set the countdown service timezone.

    Parameters
    ----------
    tz : str, required
        the timezone to set (pass a partial timezone string such as 'newyork'
        or 'europe' to see close matches, or pass '?' to see all choices)

    service : str, optional
        the name of the countdown service (default 'countdown')

    Returns
    -------
    dict
        status message

    Examples
    --------
    Set the countdown timezone to America/New_York:

    >>> set_timezone("America/New_York")
    """
    service = service or "countdown"
    params = {"tz": tz}
    response = houston.put("/{0}/timezone".format(service), params=params)
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #3
0
def s3_push_databases(service, codes=None):
    """
    Push database(s) to Amazon S3.

    Parameters
    ----------
    serivce : str, required
        only push databases for this service (specify 'all' to
        push all services)

    codes: list of str, optional
        only push databases identified by these codes (omit to
        push all databases for service)

    Returns
    -------
    json
        status message
    """
    data = {}
    if codes:
        data["codes"] = codes
    response = houston.put("/db/s3/{0}".format(service), data=data)
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #4
0
ファイル: master.py プロジェクト: jmscraig/quantrocket-client
def create_combo(combo_legs):
    """
    Create a combo (aka spread), which is a composite instrument consisting
    of two or more individual instruments (legs) that are traded as a single
    instrument.

    Each user-defined combo is stored in the securities master database with a
    SecType of "BAG". The combo legs are stored in the ComboLegs field as a JSON
    array. QuantRocket assigns a negative integer as the conid for the combo. The
    negative integer consists of a prefix of -11 followed by an autoincrementing
    digit, for example: -111, -112, -113, ...

    If the combo already exists, its conid will be returned instead of creating a
    duplicate record.

    Parameters
    ----------
    combo_legs : list, required
        a list of the combo legs, where each leg is a list specifying action, ratio,
        and conid

    Returns
    -------
    dict
        returns a dict containing the generated conid of the combo, and whether a new
        record was created

    Examples
    --------
    To create a calendar spread on VX, first retrieve the conids of the legs:

    >>> from quantrocket.master import download_master_file
    >>> download_master_file("vx.csv", symbols="VIX", exchanges="CFE", sec_types="FUT")
    >>> vx_conids = pd.read_csv("vx.csv", index_col="LocalSymbol").ConId.to_dict()

    Then create the combo:

    >>> create_combo([
            ["BUY", 1, vx_conids["VXV9"]],
            ["SELL", 1, vx_conids["VXQ9"]]
        ])
        {"conid": -111, "created": True}
    """

    f = six.StringIO()
    json.dump(combo_legs, f)
    f.seek(0)

    response = houston.put("/master/combos", data=f)

    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #5
0
def set_credentials(gateway, username=None, password=None, trading_mode=None):
    """
    Set IB username/password and trading mode (paper/live) for IB Gateway.

    Can be used to set new credentials or switch between paper and live trading
    (must have previously entered live credentials). Setting new credentials will
    restart IB Gateway and takes a moment to complete.

    Parameters
    ----------
    gateway : str, required
        name of IB Gateway service to set credentials for (for example, 'ibg1')

    username : str, optional
        IB username (optional if only modifying trading environment)

    password : str, optional
        IB password (if omitted and username is provided, will be prompted
        for password)

    trading_mode : str, optional
        the trading mode to use ('paper' or 'live')

    Returns
    -------
    dict
        status message
    """
    statuses = list_gateway_statuses(gateways=[gateway])
    if not statuses:
        raise ValueError("no such IB Gateway: {0}".format(gateway))

    if username and not password:
        password = getpass.getpass(prompt="Enter IB Password: "******"username"] = username
    if password:
        data["password"] = password
    if trading_mode:
        data["trading_mode"] = trading_mode

    response = houston.put("/{0}/credentials".format(gateway),
                           data=data,
                           timeout=180)
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #6
0
def set_license(key):
    """
    Set QuantRocket license key.

    Parameters
    ----------
    key : str, required
        the license key for your account

    Returns
    -------
    dict
        license profile
    """
    response = houston.put("/license-service/license/{0}".format(key))
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #7
0
def load_rollrules_config(filename):
    """
    Upload a new rollover rules config.

    Parameters
    ----------
    filename : str, required
        the rollover rules YAML config file to upload

    Returns
    -------
    dict
        status message
    """
    with open(filename) as file:
        response = houston.put("/master/config/rollover", data=file.read())
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #8
0
def load_launchpad_config(filename):
    """
    Uploads a new config.

    Parameters
    ----------
    filename : str, required
        the config file to upload to the launchpad service

    Returns
    -------
    dict
        status message
    """
    with open(filename) as file:
        response = houston.put("/launchpad/config", data=file.read())
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #9
0
def load_crontab(service, filename):
    """
    Upload a new crontab.

    Parameters
    ----------
    service : str, required
        the name of the service, e.g. ``countdown-usa``
    filename : str, required
        the crontab file to upload to the countdown service

    Returns
    -------
    dict
        status message
    """
    with open(filename) as file:
        response = houston.put("/{0}/crontab".format(service),
                               data=file.read())
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #10
0
def load_crontab(filename, service=None):
    """
    Upload a new crontab.

    Parameters
    ----------
    filename : str, required
        the crontab file to upload to the countdown service

    service : str, optional
        the name of the countdown service (default 'countdown')

    Returns
    -------
    dict
        status message
    """
    service = service or "countdown"
    with open(filename) as file:
        response = houston.put("/{0}/crontab".format(service), data=file.read())
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #11
0
ファイル: db.py プロジェクト: jonahhill/quantrocket-client
def set_s3_config(access_key_id=None, secret_access_key=None, bucket=None):
    """
    Set AWS S3 configuration for pushing and pulling databases to and from
    S3.

    See http://qrok.it/h/dbs3 to learn more.

    Parameters
    ----------
    access_key_id : str, optional
        AWS access key ID

    secret_access_key : str, optional
        AWS secret access key (if omitted and access_key_id is provided,
        will be prompted for secret_access_key)

    bucket : str, optional
        the S3 bucket name to push to/pull from

    Returns
    -------
    dict
        status message
    """
    if access_key_id and not secret_access_key:
        secret_access_key = getpass.getpass(
            prompt="Enter AWS Secret Access Key: ")

    data = {}
    if access_key_id:
        data["access_key_id"] = access_key_id
    if secret_access_key:
        data["secret_access_key"] = secret_access_key
    if bucket:
        data["bucket"] = bucket

    response = houston.put("/db/s3config", data=data)
    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #12
0
def create_db(code, universes=None, conids=None, start_date=None, end_date=None,
              vendor=None, bar_size=None, bar_type=None, outside_rth=False,
              primary_exchange=False, times=None, between_times=None,
              shard=None, no_config=False, config_filepath_or_buffer=None):
    """
    Create a new history database.

    Parameters
    ----------
    code : str, required
        the code to assign to the database (lowercase alphanumerics and hyphens only)

    universes : list of str
        include these universes

    conids : list of int
        include these conids

    start_date : str (YYYY-MM-DD), optional
        collect history back to this start date (default is to collect as far back as data
        is available)

    end_date : str (YYYY-MM-DD), optional
        collect history up to this end date (default is to collect up to the present)

    vendor : str, optional
        the vendor to collect data from (default 'ib'. Possible choices: ib, sharadar)

    bar_size : str, required for vendor ib
        the bar size to collect. Possible choices:
        "1 secs", "5 secs",	"10 secs", "15 secs", "30 secs",
        "1 min", "2 mins", "3 mins", "5 mins", "10 mins", "15 mins", "20 mins", "30 mins",
        "1 hour", "2 hours", "3 hours", "4 hours", "8 hours",
        "1 day",
        "1 week",
        "1 month"

    bar_type : str, optional
        the bar type to collect (if not specified, defaults to MIDPOINT for forex and
        TRADES for everything else). Possible choices:
        "TRADES",
        "ADJUSTED_LAST",
        "MIDPOINT",
        "BID",
        "ASK",
        "BID_ASK",
        "HISTORICAL_VOLATILITY",
        "OPTION_IMPLIED_VOLATILITY"

    outside_rth : bool
        include data from outside regular trading hours (default is to limit to regular
        trading hours)

    primary_exchange : bool
        limit to data from the primary exchange (default False)

    times : list of str (HH:MM:SS), optional
        limit to these times (refers to the bar's start time; mutually exclusive
        with `between_times`)

    between_times : list of str (HH:MM:SS), optional
        limit to times between these two times (refers to the bar's start time;
        mutually exclusive with `times`)

    shard : str, optional
        whether and how to shard the database, i.e. break it into smaller pieces.
        Required for intraday databases. Possible choices are `year` (separate
        database for each year), `time` (separate database for each bar time),
        `conid` (separate database for each security), `conid,time` (duplicate copies
        of database, one sharded by conid and the other by time), or `off` (no
        sharding). See http://qrok.it/h/shard for more help.

    no_config : bool
        create a database with no config (data can be loaded manually instead of collected
        from a vendor)

    config_filepath_or_buffer : str or file-like object, optional
        a YAML config file defining the historical data requirements (specify '-' to read file from stdin)

    Returns
    -------
    dict
        status message

    """
    params = {}
    if universes:
        params["universes"] = universes
    if conids:
        params["conids"] = conids
    if start_date:
        params["start_date"] = start_date
    if end_date:
        params["end_date"] = end_date
    if vendor:
        params["vendor"] = vendor
    if bar_size:
        params["bar_size"] = bar_size
    if bar_type:
        params["bar_type"] = bar_type
    if outside_rth:
        params["outside_rth"] = outside_rth
    if primary_exchange:
        params["primary_exchange"] = primary_exchange
    if times:
        params["times"] = times
    if between_times:
        params["between_times"] = between_times
    if shard:
        params["shard"] = shard
    if no_config:
        params["no_config"] = True

    if config_filepath_or_buffer == "-":
        response = houston.put("/history/databases/{0}".format(code), params=params,
                               data=to_bytes(sys.stdin))

    elif config_filepath_or_buffer and hasattr(config_filepath_or_buffer, "read"):
        response = houston.put("/history/databases/{0}".format(code), params=params,
                               data=to_bytes(config_filepath_or_buffer))

    elif config_filepath_or_buffer:
        with open(config_filepath_or_buffer, "rb") as f:
            response = houston.put("/history/databases/{0}".format(code), params=params, data=f)

    else:
        response = houston.put("/history/databases/{0}".format(code), params=params)

    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #13
0
def create_agg_db(code, tick_db_code, bar_size, fields=None):
    """
    Create an aggregate database from a tick database.

    Aggregate databases provide rolled-up views of the underlying tick data,
    aggregated to a desired frequency (such as 1-minute bars).

    Parameters
    ----------
    code : str, required
        the code to assign to the aggregate database (lowercase alphanumerics and hyphens only)

    tick_db_code : str, required
        the code of the tick database to aggregate

    bar_size : str, required
        the time frequency to aggregate to (use a Pandas timedelta string, for example
        10s or 1m or 2h or 1d)

    fields : dict of list of str, optional
        include these fields in aggregate database, aggregated in these ways. Provide a dict
        mapping tick db fields to lists of aggregate functions to apply to the field. Available
        aggregate functions are "Close", "Open", "High", "Low", "Mean", "Sum", and "Count".
        See examples section. If not specified, defaults to including the "Close" for each tick
        db field.

    Returns
    -------
    dict
        status message

    Examples
    --------
    Create an aggregate database of 1 minute bars consisting of OHLC trades and volume,
    from a tick database of US stocks, resulting in fields called LastPriceOpen, LastPriceHigh,
    LastPriceLow, LastPriceClose, and VolumeClose:

    >>> create_agg_db("usa-stk-trades-1min", tick_db_code="usa-stk-trades",
                      bar_size="1m",
                      fields={"LastPrice":["Open","High","Low","Close"],
                              "Volume": ["Close"]})

    Create an aggregate database of 1 second bars containing the closing bid and ask and
    the mean bid size and ask size, from a tick database of futures trades and
    quotes, resulting in fields called BidPriceClose, AskPriceClose, BidSizeMean, and AskSizeMean:

    >>> create_agg_db("globex-fut-taq-1sec", tick_db_code="globex-fut-taq",
                      bar_size="1s",
                      fields={"BidPrice":["Close"],
                              "AskPrice": ["Close"],
                              "BidSize": ["Mean"],
                              "AskSize": ["Mean"]
                              })
    """
    params = {}
    params["bar_size"] = bar_size
    if fields:
        if not isinstance(fields, dict):
            raise ParameterError("fields must be a dict")

        # convert lists to comma-separated strings
        _fields = {}
        for k, v in fields.items():
            if isinstance(v, (list, tuple)):
                v = ",".join(v)
            _fields[k] = v
        params["fields"] = dict_to_dict_strs(_fields)

    response = houston.put("/realtime/databases/{0}/aggregates/{1}".format(tick_db_code, code), params=params)

    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #14
0
def create_tick_db(code, universes=None, conids=None, vendor=None,
                   fields=None, primary_exchange=False):
    """
    Create a new database for collecting real-time tick data.

    The market data requirements you specify when you create a new database are
    applied each time you collect data for that database.

    Parameters
    ----------
    code : str, required
        the code to assign to the database (lowercase alphanumerics and hyphens only)

    universes : list of str
        include these universes

    conids : list of int
        include these conids

    vendor : str, optional
        the vendor to collect data from (default 'ib'. Possible choices: ib)

    fields : list of str
        collect these fields (pass '?' or any invalid fieldname to see
        available fields, default fields are 'LastPrice' and 'Volume')

    primary_exchange : bool
        limit to data from the primary exchange (default False)

    Returns
    -------
    dict
        status message

    Examples
    --------
    Create a database for collecting real-time trades and volume for US stocks:

    >>> create_tick_db("usa-stk-trades", universes="usa-stk", fields=["LastPrice", "Volume"])

    Create a database for collecting trades and quotes for a universe of futures:

    >>> create_tick_db("globex-fut-taq", universes="globex-fut",
                       fields=["LastPrice", "Volume", "BidPrice", "AskPrice", "BidSize", "AskSize"])
    """
    params = {}
    if universes:
        params["universes"] = universes
    if conids:
        params["conids"] = conids
    if vendor:
        params["vendor"] = vendor
    if fields:
        params["fields"] = fields
    if primary_exchange:
        params["primary_exchange"] = primary_exchange

    response = houston.put("/realtime/databases/{0}".format(code), params=params)

    houston.raise_for_status_with_json(response)
    return response.json()
コード例 #15
0
def create_db(code, universes=None, start_date=None, end_date=None,
              vendor=None, bar_size=None, bar_type=None, outside_rth=False,
              primary_exchange=False, times=None,
              no_config=False, config_filepath_or_buffer=None):
    """
    Create a new history database.

    Parameters
    ----------
    code : str, required
        the code to assign to the database (lowercase alphanumerics and hyphens only)

    universes : list of str
        include these universes

    start_date : str (YYYY-MM-DD), optional
        fetch history back to this start date (default is to fetch as far back as data
        is available)

    end_date : str (YYYY-MM-DD), optional
        fetch history up to this end date (default is to fetch up to the present)

    vendor : str, optional
        the vendor to fetch data from (defaults to 'ib' which is currently the only
        supported vendor)

    bar_size : str, required for vendor ib
        the bar size to fetch. Possible choices:
        "1 secs", "5 secs",	"10 secs", "15 secs", "30 secs",
        "1 min", "2 mins", "3 mins", "5 mins", "10 mins", "15 mins", "20 mins", "30 mins",
        "1 hour", "2 hours", "3 hours", "4 hours", "8 hours",
        "1 day",
        "1 week",
        "1 month"

    bar_type : str, optional
        the bar type to fetch (if not specified, defaults to MIDPOINT for forex and
        TRADES for everything else). Possible choices:
        "TRADES",
        "ADJUSTED_LAST",
        "MIDPOINT",
        "BID",
        "ASK",
        "BID_ASK",
        "HISTORICAL_VOLATILITY",
        "OPTION_IMPLIED_VOLATILITY"

    outside_rth : bool
        include data from outside regular trading hours (default is to limit to regular
        trading hours)

    primary_exchange : bool
        limit to data from the primary exchange (default False)

    times : list of str (HH:MM:SS), optional
        limit to these times

    no_config : bool
        create a database with no config (data can be loaded manually instead of fetched
        from a vendor)

    config_filepath_or_buffer : str or file-like object, optional
        a YAML config file defining the historical data requirements (specify '-' to read file from stdin)

    Returns
    -------
    dict
        status message

    """
    params = {}
    if universes:
        params["universes"] = universes
    if start_date:
        params["start_date"] = start_date
    if end_date:
        params["end_date"] = end_date
    if vendor:
        params["vendor"] = vendor
    if bar_size:
        params["bar_size"] = bar_size
    if bar_type:
        params["bar_type"] = bar_type
    if outside_rth:
        params["outside_rth"] = outside_rth
    if primary_exchange:
        params["primary_exchange"] = primary_exchange
    if times:
        params["times"] = times
    if no_config:
        params["no_config"] = True

    if config_filepath_or_buffer == "-":
        response = houston.put("/history/databases/{0}".format(code), params=params,
                               data=to_bytes(sys.stdin))

    elif config_filepath_or_buffer and hasattr(config_filepath_or_buffer, "read"):
        response = houston.put("/history/databases/{0}".format(code), params=params,
                               data=to_bytes(config_filepath_or_buffer))

    elif config_filepath_or_buffer:
        with open(config_filepath_or_buffer, "rb") as f:
            response = houston.put("/history/databases/{0}".format(code), params=params, data=f)

    else:
        response = houston.put("/history/databases/{0}".format(code), params=params)

    houston.raise_for_status_with_json(response)
    return response.json()