def setup_predict(ticker, granularity, fast, slow, direction, multiplier):
    entry = db_session.query(GetTickerTask).filter(
        GetTickerTask.ticker == ticker,
        GetTickerTask.granularity == granularity,
        GetTickerTask.price == 'M').first()

    sys = db_session.query(GenSignalTask).filter(
        GenSignalTask.batch_id == entry.id, GenSignalTask.fast == fast,
        GenSignalTask.slow == slow, GenSignalTask.trade_direction == direction,
        GenSignalTask.exit_strategy == f'trailing_atr_{multiplier}').first()

    db_session.query(Results).filter(Results.batch_id == sys.id).delete(
        synchronize_session=False)

    db_session.commit()

    train_sample_size = 500
    test_sample_size = 50
    num_chunks = ((sys.signal_count - train_sample_size - test_sample_size) /
                  test_sample_size) + 2
    chunks = []
    for ind in range(int(num_chunks)):
        chunks.append(
            (ind * test_sample_size,
             ind * test_sample_size + train_sample_size + test_sample_size))

    for i in chunks:
        action_predict.delay(sys.id, i, ticker, direction, train_sample_size)
def get_data(ticker, granularity, target=None):
    """Function that enqueues indicator calculations in celery to be actioned
    by a rabbitmq backend."""

    _id = None
    shift = -1
    entry = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.price == 'M',
        getTickerTask.granularity == granularity).first()
    if target:
        entry_target = db_session.query(getTickerTask).filter(
            getTickerTask.ticker == ticker, getTickerTask.price == 'M',
            getTickerTask.granularity == target).first()
        _id = entry_target.id
        shift = -4
    task_id = entry.id
    indicator = db_session.query(indicatorTask).get(task_id)
    if indicator is None:
        db_session.add(indicatorTask(get_id=task_id))
    else:
        for i in [
                'adx_status', 'atr_status', 'stochastic_status', 'rsi_status',
                'macd_status', 'ichimoku_status', 'sma_status', 'status'
        ]:
            setattr(indicator, i, 0)
        for table in [
                moving_average, ichimoku, convergence_divergence,
                table_momentum, relative_strength, table_stoch
        ]:
            db_session.query(table).filter(table.batch_id == entry.id).delete(
                synchronize_session=False)

    db_session.commit()

    tasks.set_smooth_moving_average.delay(task_id)

    for f in d.keys():
        tasks.set_indicator.delay(task_id,
                                  f,
                                  d[f][0],
                                  d[f][1],
                                  target=_id,
                                  shift=shift)

    return None
def get_data(ticker, granularity, system, multiplier):
    """Function that enqueues system signals generators in celery to be managed
    by a rabbitmq broker.

    Parameters
    ----------
    ticker : str
        Target ticker's symbol.
    granularity : str
        Granularity on which to apply signal generator, string includes
        supplementary granularity to call extra properties, delimited by a
        space.
    system : list
        A list of systems with each item defined in the following nomenclature,
        'close_sma_x close_sma_y'
    """
    # removing all checks, will implement later.
    mid = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.price == 'M',
        getTickerTask.granularity == granularity).first()
    ask = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.price == 'A',
        getTickerTask.granularity == granularity).first()
    bid = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.price == 'B',
        getTickerTask.granularity == granularity).first()

    comb = []
    if system[0] != 'close_sma_3 close_sma_5':
        for s in system:
            comb += parts[1 - int(s)]
    else:
        comb = system

    # print(comb)
    for s in comb:
        for trade in ['buy', 'sell']:
            tasks.gen_signals.delay(mid.id,
                                    ask.id,
                                    bid.id,
                                    s.split(' ')[0],
                                    s.split(' ')[1],
                                    trade,
                                    multiplier=int(multiplier))
def save_data(df, data_table, record_table, record_columns, task_id):
    """Helper function to record tasks' internal functions' success in the
    database."""
    df['batch_id'] = task_id
    rows = df.to_dict('records')
    db_session.bulk_insert_mappings(data_table, rows)
    del rows

    entry = db_session.query(record_table).get(task_id)
    for col in record_columns:
        setattr(entry, col, 1)
    db_session.commit()
    db_session.remove()
def query_func():
    """Function that inserts and queries records from the db, using the session
    defined in the core database module."""
    from htp.aux.database import db_session
    from htp.aux.models import GetTickerTask
    get_id = uuid4()
    db_session.add(
        GetTickerTask(id=get_id,
                      ticker='AUD_JPY',
                      price='M',
                      granularity='M15'))
    db_session.commit()
    entry = db_session.query(GetTickerTask).get(get_id)
    return entry.ticker
def session_get_data(self,
                     ticker,
                     params={
                         "count": 5,
                         "price": "M"
                     },
                     timeout=None):
    """Celery task function to engage Oanda instruments.Candles endpoint.

    Paramaters
    ----------
    ticker : str
        Target ticker's symbol.
    params : dict
        Dictionary containing endpoint arguments that specified in the Oanda
        documentation.
    timeout : float {None}
        Set timeout value for production server code so that function doesn't
        block.

    Returns
    -------
    str
        String containing an error's traceback message.
    pandas.core.frame.DataFrame
        The ticker's timeseries data returned by the api endpoint.
    """
    res = None
    if "price" not in params.keys():
        params["price"] = "M"
    url = f'https://api-fxpractice.oanda.com/v3/instruments/{ticker}/candles?'
    try:
        r = session_get_data.s.get(url, params=params, timeout=timeout)
    except requests.exceptions.RequestException as e:
        res = str(e)
    else:
        if r.status_code != requests.codes.ok:
            res = str(r.json()["errorMessage"])
        else:
            res = oanda.Candles.to_df(r.json(), params)
    finally:
        entry = db_session.query(SubTickerTask).get(UUID(self.request.id))
        if isinstance(res, str):
            entry.status = 0
            entry.error = res
        else:
            entry.status = 1
        db_session.commit()
        db_session.remove()
    return (res, self.request.id)
def conv_price(self, prev_id, signal_join_column, signal_target_column,
               conv_batch_id, sys_id):

    if prev_id is not None:
        AsyncResult(prev_id).forget()

    if not conv_batch_id:
        for r in db_session.query(Signals).\
                 filter(Signals.batch_id == sys_id).all():
            if signal_target_column == 'conv_entry_price':
                r.conv_entry_price = r.entry_price
            elif signal_target_column == 'conv_exit_price':
                r.conv_exit_price = r.exit_price
    else:
        for u, a in db_session.query(Signals, Candles.open).\
                join(Candles, getattr(Signals, signal_join_column) ==
                     Candles.timestamp).\
                filter(Candles.batch_id == conv_batch_id).\
                filter(Signals.batch_id == sys_id).all():
            setattr(u, signal_target_column, a)

    db_session.commit()
    return self.request.id
def prep_signals(self, prev_id, table, targets, batch_id, sys_id,
                 property_type):

    if prev_id is not None:
        AsyncResult(prev_id).forget()

    for s, d in db_session.query(Signals, table).\
            join(table, Signals.entry_datetime == table.timestamp_shift).\
            filter(table.batch_id == batch_id).\
            filter(Signals.batch_id == sys_id).all():
        for target in targets:
            setattr(s, f'{property_type}_{target}', getattr(d, target))

    db_session.commit()
    return self.request.id
Esempio n. 9
0
def get_data(ticker, price, granularity, from_, to, smooth):
    """Function to initiate ticker data download and entry logging in a
    database.

    Parameters
    ----------
    ticker : str
       The target instrument to be queried using the preset function for a
       given endpoint.
    price : str
       The candle type for which ticker data should be sourced.
    granularity : str
       The time interval the define the period which defines the timeseries
       data.
    from_ : datetime.datetime
       The startpoint from which data should be downloaded.
    to : datetime.datetime
       The endpoint to which data should be downloaded.
    smooth : bool
       A flag that the api endpoint accepts to ensure the close and open
       values for adjacent candles match.

    Returns
    -------
      None

    Notes
    -----
    - If the data download is successfull the timeseries will be saved as in
    the 'candles' table in the database, with a foreign key on each row
    relating the given entry the initial get ticker query to defines the ticker
    type, price, granularity, and batch from and to date.
    - The database logging functionality is designed to recylce pre-existing
    rows that match the same ticker, price and granularity criteris, updating
    the from_ and to values accordingly.
    """
    for val in price:
        for interval in granularity:

            args = {
                "price": val,
                "granularity": interval,
                "from": from_,
                "to": to,
                "smooth": smooth
            }

            entry = db_session.query(GetTickerTask).filter(
                GetTickerTask.ticker == ticker, GetTickerTask.price == val,
                GetTickerTask.granularity == interval).first()
            if entry is None:
                batch_id = uuid4()
                db_session.add(
                    GetTickerTask(id=batch_id,
                                  ticker=ticker,
                                  price=val,
                                  _from=from_,
                                  to=to,
                                  granularity=interval))
            else:
                batch_id = entry.id
                setattr(entry, "_from", from_)
                setattr(entry, "to", to)
                for table in [
                        SubTickerTask, Candles, IndicatorTask, Indicators
                ]:
                    db_session.query(table).filter(table.batch_id == entry.id)\
                        .delete(synchronize_session=False)

            header = []
            param_set = arg_prep(args)
            for params in param_set:
                g = tasks.session_get_data.signature((ticker, ), {
                    "params": params,
                    "timeout": 30
                })
                g.freeze()
                # print(g.id)
                header.append(g)
                db_session.add(
                    SubTickerTask(  # id=UUID(g.id),
                        batch_id=batch_id,
                        _from=datetime.strptime(params["from"],
                                                '%Y-%m-%dT%H:%M:%S.%f000Z'),
                        to=datetime.strptime(params["from"],
                                             '%Y-%m-%dT%H:%M:%S.%f000Z')))

            callback = tasks.merge_data.s(ticker,
                                          price,
                                          granularity,
                                          task_id=batch_id)
            chord(header)(callback)

    db_session.commit()
Esempio n. 10
0
def get_data(ticker, granularity, system, multiplier):
    conv = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker_conversion_pairs[ticker],
        getTickerTask.granularity == granularity,
        getTickerTask.price == 'A').first()

    conv_id = conv.id
    if ticker == ticker_conversion_pairs[ticker]:
        conv_id = False

    entry_target = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.granularity ==
        granularity, getTickerTask.price == 'M').first()

    entry_sup = db_session.query(getTickerTask).filter(
        getTickerTask.ticker == ticker, getTickerTask.granularity ==
        sup[granularity], getTickerTask.price == 'M').first()

    comb = []
    if system[0] != 'close_sma_3 close_sma_5':
        for s in system:
            comb += parts[1 - int(s)]
    else:
        comb = system

    # print(comb)
    for s in comb:
        for trade in ['buy', 'sell']:
            sig = db_session.query(genSignalTask).filter(
                genSignalTask.batch_id == entry_target.id,
                genSignalTask.fast == s.split(' ')[0],  # 'close_sma_3',
                genSignalTask.slow == s.split(' ')[1],  # 'close_sma_5',
                genSignalTask.trade_direction == trade,
                genSignalTask.exit_strategy == f'trailing_atr_{multiplier}'
                ).first()

            (conv_price.s(
                None, 'entry_datetime', 'conv_entry_price', conv_id, sig.id) |
                conv_price.s(
                    'exit_datetime', 'conv_exit_price', conv_id, sig.id) |
                prep_signals.s(
                    stochastic, tables[stochastic], entry_target.id, sig.id,
                    'target') |
                prep_signals.s(
                    relative_strength, tables[relative_strength],
                    entry_target.id, sig.id, 'target') |
                prep_signals.s(
                    momentum, tables[momentum], entry_target.id, sig.id,
                    'target') |
                prep_signals.s(
                    ichimoku, tables[ichimoku], entry_target.id, sig.id,
                    'target') |
                prep_signals.s(
                    convergence_divergence, tables[convergence_divergence],
                    entry_target.id, sig.id, 'target') |
                prep_signals.s(
                    stochastic, tables[stochastic], entry_sup.id, sig.id,
                    'sup') |
                prep_signals.s(
                    relative_strength, tables[relative_strength], entry_sup.id,
                    sig.id, 'sup') |
                prep_signals.s(
                    momentum, tables[momentum], entry_sup.id, sig.id, 'sup') |
                prep_signals.s(
                    ichimoku, tables[ichimoku], entry_sup.id, sig.id, 'sup') |
                prep_signals.s(
                    convergence_divergence, tables[convergence_divergence],
                    entry_sup.id, sig.id, 'sup')).delay()
def gen_signals(mid_id, ask_id, bid_id, fast, slow, trade, multiplier=6.0):

    price = {
        'buy': {
            'entry': ask_id,
            'exit': bid_id
        },
        'sell': {
            'entry': bid_id,
            'exit': ask_id
        }
    }
    mid_close = load_data(mid_id, 'candles', ['timestamp', 'close'])
    entry = load_data(price[trade]['entry'], 'candles', ['timestamp', 'open'])
    entry.rename(columns={'open': 'entry_open'}, inplace=True)
    exit_ = load_data(price[trade]['exit'], 'candles',
                      ['timestamp', 'open', 'high', 'low'])
    exit_.rename(columns={
        'open': 'exit_open',
        'high': 'exit_high',
        'low': 'exit_low'
    },
                 inplace=True)
    atr = load_data(mid_id, 'momentum', ['timestamp', 'atr'])
    dfsys = load_data(mid_id, 'moving_average', ['timestamp', fast, slow])
    df = pd.concat([mid_close, entry, exit_, atr, dfsys], axis=1)
    df.dropna(inplace=True)

    sys_signals = evaluate_fast.Signals.atr_stop_signals(df,
                                                         fast,
                                                         slow,
                                                         multiplier=multiplier,
                                                         trade=trade)

    close_to_close = observe.close_in_atr(mid_close, atr)
    close_to_fast_signal = observe.close_to_signal_by_atr(
        mid_close, dfsys, fast, atr)
    close_to_slow_signal = observe.close_to_signal_by_atr(mid_close,
                                                          dfsys,
                                                          slow,
                                                          atr,
                                                          speed='slow')
    obs = pd.concat(
        [close_to_close, close_to_fast_signal, close_to_slow_signal], axis=1)
    obs_shift = obs.shift(1)
    sys_signals = sys_signals.merge(obs_shift,
                                    how='left',
                                    left_on='entry_datetime',
                                    right_index=True,
                                    validate='1:1')

    sys_signals.dropna(inplace=True)
    sys_signals.drop(sys_signals[sys_signals['stop_loss'] <= 0].index,
                     inplace=True)

    sys_entry = db_session.query(GenSignalTask).filter(
        GenSignalTask.batch_id == mid_id, GenSignalTask.fast == fast,
        GenSignalTask.slow == slow, GenSignalTask.trade_direction == trade,
        GenSignalTask.exit_strategy == f'trailing_atr_{multiplier}').first()
    if sys_entry is None:
        sys_id = uuid4()
        db_session.add(
            GenSignalTask(id=sys_id,
                          batch_id=mid_id,
                          fast=fast,
                          slow=slow,
                          trade_direction=trade,
                          exit_strategy=f'trailing_atr_{multiplier}',
                          signal_count=len(sys_signals)))
    else:
        sys_id = sys_entry.id
        setattr(Signals, 'status', 0)
        db_session.query(Signals).filter(Signals.batch_id == sys_id).delete(
            synchronize_session=False)
    db_session.commit()
    save_data(sys_signals, Signals, GenSignalTask, ('status', ), sys_id)