Exemplo n.º 1
0
    def process(self, algo_id, ticker, dataset):
        """process

        Derive custom indicator processing to determine buy and sell
        conditions before placing orders. Just implement your own
        ``process`` method.

        Please refer to the TA Lib guides for details on building indicators:

        - Overlap Studies
          https://mrjbq7.github.io/ta-lib/func_groups/overlap_studies.html
        - Momentum Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/momentum_indicators.html
        - Volume Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volume_indicators.html
        - Volatility Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volatility_indicators.html
        - Price Transform
          https://mrjbq7.github.io/ta-lib/func_groups/price_transform.html
        - Cycle Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/cycle_indicators.html
        - Pattern Recognition
          https://mrjbq7.github.io/ta-lib/func_groups/pattern_recognition.html
        - Statistic Functions
          https://mrjbq7.github.io/ta-lib/func_groups/statistic_functions.html
        - Math Transform
          https://mrjbq7.github.io/ta-lib/func_groups/math_transform.html
        - Math Operators
          https://mrjbq7.github.io/ta-lib/func_groups/math_operators.html

        :param algo_id: string - algo identifier label for debugging datasets
            during specific dates
        :param ticker: string - ticker
        :param dataset: dictionary of ``pandas.DataFrame(s)`` to process
        """

        # set the algo config indicator 'uses_data' to 'day' or 'minute'
        df_status, self.use_df = self.get_subscribed_dataset(dataset=dataset)

        if df_status == ae_consts.EMPTY:
            self.lg('process end - no data found')
            return

        # notice the self.num_points is now a member variable
        # because the BaseIndicator class's __init__
        # converts any self.config keys into useable
        # member variables automatically in your derived class
        self.lg(f'process - num_points={self.num_points} '
                f'df={len(self.use_df.index)}')
        """
        real = ADX(high, low, close, timeperiod=14)
        """
        num_records = len(self.use_df.index)
        if num_records > self.num_points:
            cur_value = self.use_df['close'].iloc[-1]
            first_date = self.use_df['date'].iloc[0]
            end_date = self.use_df['date'].iloc[-1]
            start_row = num_records - self.num_points
            self.use_df = self.use_df[start_row:num_records]
            """
            for idx, row in self.use_df[start_row:-1].iterrows():
                high = row['high']
                low = row['low']
                open_val = row['open']
                close = row['close']
                row_date = row['date']
                self.lg(
                    f'{row_date} - high={high}, low={low}, '
                    f'close={close}, period={self.num_points}')
            """
            highs = self.use_df['high'].values
            lows = self.use_df['low'].values
            closes = self.use_df['close'].values

            self.adx_value = ae_consts.to_f(
                ae_talib.ADX(high=highs,
                             low=lows,
                             close=closes,
                             timeperiod=self.num_points)[-1])
            """
            Determine a buy or a sell as a label
            """

            if cur_value <= 0:
                self.lg(f'invalid current_value={cur_value}')
                return

            self.close = cur_value
            self.amount_to_close = ae_consts.to_f(cur_value - self.adx_value)
            self.percent_value = ae_consts.to_f(self.amount_to_close /
                                                cur_value * 100.0)

            self.is_buy = ae_consts.INDICATOR_IGNORE
            self.is_sell = ae_consts.INDICATOR_IGNORE

            if (self.buy_above_percent != -1
                    and self.percent_value > self.buy_above_percent):
                self.is_buy = ae_consts.INDICATOR_BUY
            elif (self.buy_below_percent != -1
                  and self.percent_value > self.buy_below_percent):
                self.is_buy = ae_consts.INDICATOR_BUY

            if (self.sell_above_percent != -1
                    and self.percent_value > self.sell_above_percent):
                self.is_sell = ae_consts.INDICATOR_SELL
            elif (self.sell_below_percent != -1
                  and self.percent_value > self.sell_below_percent):
                self.is_sell = ae_consts.INDICATOR_SELL

            self.lg(
                f'process end - {first_date} to {end_date} '
                f'buy_below={self.buy_below_percent} '
                f'buy_above={self.buy_above_percent} is_buy={self.is_buy} '
                f'sell_below={self.sell_below_percent} '
                f'sell_above={self.sell_above_percent} is_sell={self.is_sell}')
        else:
            self.lg('process end')
Exemplo n.º 2
0
def build_buy_order(ticker,
                    num_owned,
                    close,
                    balance,
                    commission,
                    date,
                    details,
                    use_key,
                    minute=None,
                    shares=None,
                    version=1,
                    auto_fill=True,
                    is_live_trading=False,
                    backtest_shares_default=10,
                    reason=None):
    """build_buy_order

    Create an algorithm buy order as a dictionary

    .. note:: setting the ``minute`` is required to build
        a minute-by-minute ``Trading History``

    :param ticker: ticker
    :param num_owned: integer current owned
        number of shares for this asset
    :param close: float closing price of the asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string with the minute that the
        order was placed. format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
    :param details: dictionary for full row of values to review
        all buys after the algorithm finishes.
        (usually ``row.to_json()``)
    :param use_key: string for redis and s3 publishing of the algorithm
        result dictionary as a json-serialized dictionary
    :param shares: optional - integer number of shares to buy
        if None buy max number of shares at the ``close`` with the
        available ``balance`` amount.
    :param version: optional - version tracking integer
    :param auto_fill: optional - bool for not assuming the trade
        filled (default ``True``)
    :param is_live_trading: optional - bool for filling trades
        for live trading or for backtest tuning filled
        (default ``False`` which is backtest mode)
    :param backtest_shares_default: optional - integer for
        simulating shares during a backtest even if there
        are not enough funds
        (default ``10``)
    :param reason: optional - string for recording why the algo
        decided to buy for review after the algorithm finishes
    """
    status = ae_consts.TRADE_OPEN
    s3_bucket_name = ae_consts.ALGO_BUYS_S3_BUCKET_NAME
    s3_key = use_key
    redis_key = use_key
    s3_enabled = True
    redis_enabled = True

    cost_of_trade = None
    new_shares = num_owned
    new_balance = balance
    created_date = None

    tradable_funds = balance - (2.0 * commission)

    if not is_live_trading:
        if not shares:
            shares = backtest_shares_default
        tradable_funds = ((shares * close) + (2.0 * commission))

    if close > 0.1 and tradable_funds > 10.0:
        can_buy_num_shares = shares
        if not can_buy_num_shares:
            can_buy_num_shares = int(tradable_funds / close)
        cost_of_trade = ae_consts.to_f(val=(can_buy_num_shares * close) +
                                       commission)
        if can_buy_num_shares > 0:
            if cost_of_trade > balance:
                status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
            else:
                created_date = ae_utils.utc_now_str()
                if auto_fill:
                    new_shares = int(num_owned + can_buy_num_shares)
                    new_balance = ae_consts.to_f(balance - cost_of_trade)
                    status = ae_consts.TRADE_FILLED
                else:
                    new_shares = shares
                    new_balance = balance
        else:
            status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
    else:
        status = ae_consts.TRADE_NOT_ENOUGH_FUNDS

    order_dict = {
        'ticker': ticker,
        'status': status,
        'balance': new_balance,
        'shares': new_shares,
        'buy_price': cost_of_trade,
        'prev_balance': balance,
        'prev_shares': num_owned,
        'close': close,
        'details': details,
        'reason': reason,
        'date': date,
        'minute': minute,
        'created': created_date,
        's3_bucket': s3_bucket_name,
        's3_key': s3_key,
        'redis_key': redis_key,
        's3_enabled': s3_enabled,
        'redis_enabled': redis_enabled,
        'version': version
    }

    use_date = minute
    if not use_date:
        use_date = date

    log.debug('{} {} buy {} order={}'.format(
        ticker, use_date, ae_consts.get_status(status=order_dict['status']),
        ae_consts.ppj(order_dict)))

    return order_dict
Exemplo n.º 3
0
def publish_ticker_aggregate_from_s3(self, work_dict):
    """publish_ticker_aggregate_from_s3

    Publish Aggregated Ticker Data from S3 to Redis

    :param work_dict: dictionary for key/values
    """

    label = 'pub-tic-agg-s3-to-redis'

    log.info(f'task - {label} - start work_dict={work_dict}')

    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'ticker': None,
        'ticker_id': None,
        's3_read_enabled': True,
        's3_upload_enabled': True,
        'redis_enabled': True,
        's3_bucket': None,
        's3_compiled_bucket': None,
        's3_key': None,
        'redis_key': None,
        'updated': None
    }
    res = build_result.build_result(status=ae_consts.NOT_RUN,
                                    err=None,
                                    rec=rec)

    try:
        ticker = work_dict.get('ticker', ae_consts.TICKER)
        ticker_id = int(work_dict.get('ticker_id', ae_consts.TICKER_ID))

        if not ticker:
            res = build_result.build_result(status=ae_consts.ERR,
                                            err='missing ticker',
                                            rec=rec)
            return res

        label = work_dict.get('label', label)
        s3_key = work_dict.get('s3_key', None)
        s3_bucket_name = work_dict.get('s3_bucket', 'pricing')
        s3_compiled_bucket_name = work_dict.get('s3_compiled_bucket',
                                                'compileddatasets')
        redis_key = work_dict.get('redis_key', None)
        updated = work_dict.get('updated', None)
        enable_s3_upload = work_dict.get('s3_upload_enabled',
                                         ae_consts.ENABLED_S3_UPLOAD)
        enable_redis_publish = work_dict.get('redis_enabled',
                                             ae_consts.ENABLED_REDIS_PUBLISH)
        serializer = work_dict.get('serializer', 'json')
        encoding = work_dict.get('encoding', 'utf-8')

        enable_s3_read = True

        rec['ticker'] = ticker
        rec['ticker_id'] = ticker_id
        rec['s3_bucket'] = s3_bucket_name
        rec['s3_compiled_bucket'] = s3_compiled_bucket_name
        rec['s3_key'] = s3_key
        rec['redis_key'] = redis_key
        rec['updated'] = updated
        rec['s3_read_enabled'] = enable_s3_read
        rec['s3_upload_enabled'] = enable_s3_upload
        rec['redis_enabled'] = enable_redis_publish

        if enable_s3_read:
            log.info(f'{label} parsing s3 values')
            access_key = work_dict.get('s3_access_key',
                                       ae_consts.S3_ACCESS_KEY)
            secret_key = work_dict.get('s3_secret_key',
                                       ae_consts.S3_SECRET_KEY)
            region_name = work_dict.get('s3_region_name',
                                        ae_consts.S3_REGION_NAME)
            service_address = work_dict.get('s3_address', ae_consts.S3_ADDRESS)
            secure = work_dict.get('s3_secure', ae_consts.S3_SECURE) == '1'

            endpoint_url = f'http{"s" if secure else ""}://{service_address}'

            log.info(f'{label} building s3 endpoint_url={endpoint_url} '
                     f'region={region_name}')

            s3 = boto3.resource(
                's3',
                endpoint_url=endpoint_url,
                aws_access_key_id=access_key,
                aws_secret_access_key=secret_key,
                region_name=region_name,
                config=boto3.session.Config(signature_version='s3v4'))

            try:
                log.info(f'{label} checking bucket={s3_bucket_name} exists')
                if s3.Bucket(s3_bucket_name) not in s3.buckets.all():
                    log.info(f'{label} creating bucket={s3_bucket_name}')
                    s3.create_bucket(Bucket=s3_bucket_name)
            except Exception as e:
                log.info(f'{label} failed creating bucket={s3_bucket_name} '
                         f'with ex={e}')
            # end of try/ex for creating bucket

            try:
                log.info(f'{label} checking bucket={s3_bucket_name} keys')
                date_keys = []
                keys = []
                # {TICKER}_YYYY-DD-MM regex
                reg = r'^.*_\d{4}-(0?[1-9]|1[012])-(0?[1-9]|[12][0-9]|3[01])$'
                for bucket in s3.buckets.all():
                    for key in bucket.objects.all():
                        if (ticker.lower() in key.key.lower()
                                and bool(re.compile(reg).search(key.key))):
                            keys.append(key.key)
                            date_keys.append(key.key.split(f'{ticker}_')[1])
            except Exception as e:
                log.info(f'{label} failed to get bucket={s3_bucket_name} '
                         f'keys with ex={e}')
            # end of try/ex for getting bucket keys

            if keys:
                data = []
                for idx, key in enumerate(keys):
                    try:
                        log.info(
                            f'{label} reading to s3={s3_bucket_name}/{key} '
                            f'updated={updated}')
                        loop_data = s3_read_contents_from_key.\
                            s3_read_contents_from_key(
                                s3=s3,
                                s3_bucket_name=s3_bucket_name,
                                s3_key=key,
                                encoding=encoding,
                                convert_as_json=True)

                        initial_size_value = \
                            len(str(loop_data)) / 1024000
                        initial_size_str = ae_consts.to_f(initial_size_value)
                        if ae_consts.ev('DEBUG_S3', '0') == '1':
                            log.info(f'{label} read s3={s3_bucket_name}/{key} '
                                     f'data={ae_consts.ppj(loop_data)}')
                        else:
                            log.info(
                                f'{label} read s3={s3_bucket_name}/{key} data '
                                f'size={initial_size_str} MB')
                        data.append({f'{date_keys[idx]}': loop_data})
                    except Exception as e:
                        err = (
                            f'{label} failed reading bucket={s3_bucket_name} '
                            f'key={key} ex={e}')
                        log.error(err)
                        res = build_result.build_result(
                            status=ae_consts.NOT_RUN, err=err, rec=rec)
                    # end of try/ex for creating bucket
            else:
                log.info(f'{label} No keys found in S3 '
                         f'bucket={s3_bucket_name} for ticker={ticker}')
        else:
            log.info(f'{label} SKIP S3 read bucket={s3_bucket_name} '
                     f'ticker={ticker}')
        # end of if enable_s3_read

        if data and enable_s3_upload:
            try:
                log.info(f'{label} checking bucket={s3_compiled_bucket_name} '
                         'exists')
                if s3.Bucket(s3_compiled_bucket_name) not in s3.buckets.all():
                    log.info(
                        f'{label} creating bucket={s3_compiled_bucket_name}')
                    s3.create_bucket(Bucket=s3_compiled_bucket_name)
            except Exception as e:
                log.info(f'{label} failed creating '
                         f'bucket={s3_compiled_bucket_name} with ex={e}')
            # end of try/ex for creating bucket

            try:
                cmpr_data = zlib.compress(json.dumps(data).encode(encoding), 9)

                if ae_consts.ev('DEBUG_S3', '0') == '1':
                    log.info(
                        f'{label} uploading to '
                        f's3={s3_compiled_bucket_name}/{s3_key} '
                        f'data={ae_consts.ppj(loop_data)} updated={updated}')
                else:
                    sizes = {
                        'MB': 1024000,
                        'GB': 1024000000,
                        'TB': 1024000000000,
                        'PB': 1024000000000000
                    }
                    initial_size_value = len(str(data))
                    org_data_size = 'MB'
                    for key in sizes.keys():
                        size = float(initial_size_value) / float(sizes[key])
                        if size > 1024:
                            continue
                        org_data_size = key
                        initial_size_value = size
                        break
                    initial_size_str = ae_consts.to_f(initial_size_value)

                    cmpr_data_size_value = len(cmpr_data)
                    cmpr_data_size = 'MB'
                    for key in sizes.keys():
                        size = float(cmpr_data_size_value) / float(sizes[key])
                        if size > 1024:
                            continue
                        cmpr_data_size = key
                        cmpr_data_size_value = size
                        break
                    cmpr_size_str = ae_consts.to_f(cmpr_data_size_value)
                    log.info(
                        f'{label} uploading to '
                        f's3={s3_compiled_bucket_name}/{s3_key} data '
                        f'original_size={initial_size_str} {org_data_size} '
                        f'compressed_size={cmpr_size_str} {cmpr_data_size} '
                        f'updated={updated}')
                s3.Bucket(s3_compiled_bucket_name).put_object(Key=s3_key,
                                                              Body=cmpr_data)
            except Exception as e:
                log.error(f'{label} failed '
                          f'uploading bucket={s3_compiled_bucket_name} '
                          f'key={s3_key} ex={e}')
            # end of try/ex for creating bucket
        else:
            log.info(
                f'{label} SKIP S3 upload bucket={s3_bucket_name} key={s3_key}')
        # end of if enable_s3_upload

        if data and enable_redis_publish:
            redis_address = work_dict.get('redis_address',
                                          ae_consts.REDIS_ADDRESS)
            redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
            redis_password = work_dict.get('redis_password',
                                           ae_consts.REDIS_PASSWORD)
            redis_db = work_dict.get('redis_db', None)
            if not redis_db:
                redis_db = ae_consts.REDIS_DB
            redis_expire = None
            if 'redis_expire' in work_dict:
                redis_expire = work_dict.get('redis_expire',
                                             ae_consts.REDIS_EXPIRE)
            log.info(f'redis enabled address={redis_address}@{redis_db} '
                     f'key={redis_key}')
            redis_host = redis_address.split(':')[0]
            redis_port = redis_address.split(':')[1]
            try:
                if ae_consts.ev('DEBUG_REDIS', '0') == '1':
                    log.info(
                        f'{label} publishing redis={redis_host}:{redis_port} '
                        f'db={redis_db} key={redis_key} updated={updated} '
                        f'expire={redis_expire} data={ae_consts.ppj(data)}')
                else:
                    log.info(
                        f'{label} publishing redis={redis_host}:{redis_port} '
                        f'db={redis_db} key={redis_key} '
                        f'updated={updated} expire={redis_expire}')
                # end of if/else

                rc = redis.Redis(host=redis_host,
                                 port=redis_port,
                                 password=redis_password,
                                 db=redis_db)

                redis_set_res = redis_set.set_data_in_redis_key(
                    label=label,
                    client=rc,
                    key=redis_key,
                    data=data,
                    serializer=serializer,
                    encoding=encoding,
                    expire=redis_expire,
                    px=None,
                    nx=False,
                    xx=False)

                log.info(
                    f'{label} redis_set '
                    f'status={ae_consts.get_status(redis_set_res["status"])} '
                    f'err={redis_set_res["err"]}')

            except Exception as e:
                log.error(f'{label} failed - redis publish to '
                          f'key={redis_key} ex={e}')
            # end of try/ex for creating bucket
        else:
            log.info(f'{label} SKIP REDIS publish key={redis_key}')
        # end of if enable_redis_publish

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(
            status=ae_consts.ERR,
            err=(f'failed - publish_from_s3 dict={work_dict} with ex={e}'),
            rec=rec)
        log.error(f'{label} - {res["err"]}')
    # end of try/ex

    log.info('task - publish_from_s3 done - '
             f'{label} - status={ae_consts.get_status(res["status"])}')

    return get_task_results.get_task_results(work_dict=work_dict, result=res)
def build_sell_order(ticker,
                     num_owned,
                     close,
                     balance,
                     commission,
                     date,
                     details,
                     use_key,
                     minute=None,
                     shares=None,
                     version=1,
                     auto_fill=True,
                     is_live_trading=False,
                     backtest_shares_default=10,
                     reason=None):
    """build_sell_order

    Create an algorithm sell order as a dictionary

    :param ticker: ticker
    :param num_owned: integer current owned
        number of shares for this asset
    :param close: float closing price of the asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string with the minute that the
        order was placed. format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
    :param details: dictionary for full row of values to review
        all sells after the algorithm finishes.
        (usually ``row.to_json()``)
    :param use_key: string for redis and s3 publishing of the algorithm
        result dictionary as a json-serialized dictionary
    :param shares: optional - integer number of shares to sell
        if None sell all ``num_owned`` shares at the ``close``.
    :param version: optional - version tracking integer
    :param auto_fill: optional - bool for not assuming the trade
        filled (default ``True``)
    :param is_live_trading: optional - bool for filling trades
        for live trading or for backtest tuning filled
        (default ``False`` which is backtest mode)
    :param backtest_shares_default: optional - integer for
        simulating shares during a backtest even if there
        are not enough funds
        (default ``10``)
    :param reason: optional - string for recording why the algo
        decided to sell for review after the algorithm finishes
    """
    status = ae_consts.TRADE_OPEN
    s3_bucket_name = ae_consts.ALGO_SELLS_S3_BUCKET_NAME
    s3_key = use_key
    redis_key = use_key
    s3_enabled = True
    redis_enabled = True

    cost_of_trade = None
    sell_price = 0.0
    new_shares = num_owned
    new_balance = balance
    created_date = None

    tradable_funds = balance - commission

    if num_owned == 0:
        status = ae_consts.TRADE_NO_SHARES_TO_SELL
    elif close > 0.1 and tradable_funds > 10.0:
        cost_of_trade = commission
        if shares:
            if shares > num_owned:
                shares = num_owned
        else:
            shares = num_owned
        sell_price = ae_consts.to_f(val=(shares * close) + commission)
        if cost_of_trade > balance:
            status = ae_consts.TRADE_NOT_ENOUGH_FUNDS
        else:
            created_date = ae_utils.utc_now_str()
            if auto_fill:
                new_shares = num_owned - shares
                new_balance = ae_consts.to_f(balance + sell_price)
                status = ae_consts.TRADE_FILLED
            else:
                new_shares = shares
                new_balance = balance
    else:
        status = ae_consts.TRADE_NOT_ENOUGH_FUNDS

    order_dict = {
        'ticker': ticker,
        'status': status,
        'balance': new_balance,
        'shares': new_shares,
        'sell_price': sell_price,
        'prev_balance': balance,
        'prev_shares': num_owned,
        'close': close,
        'details': details,
        'reason': reason,
        'date': date,
        'minute': minute,
        'created': created_date,
        's3_bucket': s3_bucket_name,
        's3_key': s3_key,
        'redis_key': redis_key,
        's3_enabled': s3_enabled,
        'redis_enabled': redis_enabled,
        'version': version
    }

    use_date = minute
    if not use_date:
        use_date = date

    log.debug(f'{ticker} {use_date} sell '
              f'{ae_consts.get_status(status=order_dict["status"])} '
              f'order={ae_consts.ppj(order_dict)}')

    return order_dict
Exemplo n.º 5
0
    def process(self, algo_id, ticker, dataset):
        """process

        Derive custom indicator processing to determine buy and sell
        conditions before placing orders. Just implement your own
        ``process`` method.

        Please refer to the TA Lib guides for details on building indicators:

        - Overlap Studies
          https://mrjbq7.github.io/ta-lib/func_groups/overlap_studies.html
        - Momentum Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/momentum_indicators.html
        - Volume Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volume_indicators.html
        - Volatility Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volatility_indicators.html
        - Price Transform
          https://mrjbq7.github.io/ta-lib/func_groups/price_transform.html
        - Cycle Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/cycle_indicators.html
        - Pattern Recognition
          https://mrjbq7.github.io/ta-lib/func_groups/pattern_recognition.html
        - Statistic Functions
          https://mrjbq7.github.io/ta-lib/func_groups/statistic_functions.html
        - Math Transform
          https://mrjbq7.github.io/ta-lib/func_groups/math_transform.html
        - Math Operators
          https://mrjbq7.github.io/ta-lib/func_groups/math_operators.html

        :param algo_id: string - algo identifier label for debugging datasets
            during specific dates
        :param ticker: string - ticker
        :param dataset: dictionary of ``pandas.DataFrame(s)`` to process
        """

        # set the algo config indicator 'uses_data' to 'day' or 'minute'
        df_status, self.use_df = self.get_subscribed_dataset(dataset=dataset)

        self.willr_value = None
        if df_status == ae_consts.EMPTY:
            self.lg('process end - no data found')
            return

        # notice the self.num_points is now a member variable
        # because the BaseIndicator class's __init__
        # converts any self.config keys into useable
        # member variables automatically in your derived class
        self.lg('process - num_points={} df={}'.format(self.num_points,
                                                       len(self.use_df.index)))
        """
        real = WILLR(high, low, close, timeperiod=14)
        """
        num_records = len(self.use_df.index)
        if num_records > self.num_points:
            first_date = self.use_df['date'].iloc[0]
            end_date = self.use_df['date'].iloc[-1]
            start_row = num_records - self.num_points
            self.use_df = self.use_df[start_row:num_records].dropna(axis=0,
                                                                    how='any')

            if len(self.use_df.index) == 0:
                self.lg('empty dataframe={} on date={}'.format(
                    self.uses_data, end_date))
                return
            """
            for idx, row in self.use_df[start_row:-1].iterrows():
                high = row['high']
                low = row['low']
                open_val = row['open']
                close = row['close']
                row_date = row['date']
                self.lg(
                    '{} - WILLR(high={}, low={}, '
                    'close={}, period={})'.format(
                        row_date,
                        high,
                        low,
                        close,
                        self.num_points))
            """
            highs = self.use_df['high'].values
            lows = self.use_df['low'].values
            closes = self.use_df['close'].values
            willr_values = talib.WILLR(highs, lows, closes, self.num_points)
            self.willr_value = ae_consts.to_f(willr_values[-1])
            """
            Determine a buy or a sell as a label
            """

            self.is_buy = ae_consts.INDICATOR_IGNORE
            self.is_sell = ae_consts.INDICATOR_IGNORE

            if self.willr_value < self.buy_below:
                self.is_buy = ae_consts.INDICATOR_BUY

            if self.willr_value > self.sell_above:
                self.is_sell = ae_consts.INDICATOR_SELL

            self.lg('process end - {} to {} willr_value={} '
                    'buy_below={} is_buy={} '
                    'sell_above={} is_sell={}'.format(
                        first_date, end_date, self.willr_value, self.buy_below,
                        self.is_buy, self.sell_above, self.is_sell))
        else:
            self.lg('process end - willr={}'.format(self.willr_value))
def prepare_pricing_dataset(
        self,
        work_dict):
    """prepare_pricing_dataset

    Prepare dataset for analysis. Supports loading dataset from
    s3 if not found in redis. Outputs prepared artifact as a csv
    to s3 and redis.

    :param work_dict: dictionary for key/values
    """

    label = 'prepare'

    log.info(
        'task - {} - start '
        'work_dict={}'.format(
            label,
            work_dict))

    initial_data = None

    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'ticker': None,
        'ticker_id': None,
        's3_enabled': True,
        'redis_enabled': True,
        's3_bucket': None,
        's3_key': None,
        'redis_key': None,
        'prepared_s3_key': None,
        'prepared_s3_bucket': None,
        'prepared_redis_key': None,
        'prepared_data': None,
        'prepared_size': None,
        'initial_data': None,
        'initial_size': None,
        'ignore_columns': None,
        'updated': None
    }
    res = build_result.build_result(
        status=ae_consts.NOT_RUN,
        err=None,
        rec=rec)

    try:
        ticker = work_dict.get(
            'ticker',
            ae_consts.TICKER)
        ticker_id = int(work_dict.get(
            'ticker_id',
            ae_consts.TICKER_ID))

        if not ticker:
            res = build_result.build_result(
                status=ae_consts.ERR,
                err='missing ticker',
                rec=rec)
            return res

        label = work_dict.get(
            'label',
            label)
        s3_key = work_dict.get(
            's3_key',
            None)
        s3_bucket_name = work_dict.get(
            's3_bucket',
            'pricing')
        s3_access_key = work_dict.get(
            's3_access_key',
            ae_consts.S3_ACCESS_KEY)
        s3_secret_key = work_dict.get(
            's3_secret_key',
            ae_consts.S3_SECRET_KEY)
        s3_region_name = work_dict.get(
            's3_region_name',
            ae_consts.S3_REGION_NAME)
        s3_address = work_dict.get(
            's3_address',
            ae_consts.S3_ADDRESS)
        s3_secure = work_dict.get(
            's3_secure',
            ae_consts.S3_SECURE) == '1'
        redis_address = work_dict.get(
            'redis_address',
            ae_consts.REDIS_ADDRESS)
        redis_key = work_dict.get(
            'redis_key',
            ae_consts.REDIS_KEY)
        redis_password = work_dict.get(
            'redis_password',
            ae_consts.REDIS_PASSWORD)
        redis_db = work_dict.get(
            'redis_db',
            None)
        if not redis_db:
            redis_db = ae_consts.REDIS_DB
        redis_expire = None
        if 'redis_expire' in work_dict:
            redis_expire = work_dict.get(
                'redis_expire',
                ae_consts.REDIS_EXPIRE)
        updated = work_dict.get(
            'updated',
            datetime.datetime.utcnow().strftime(
                '%Y_%m_%d_%H_%M_%S'))
        prepared_s3_key = work_dict.get(
            'prepared_s3_key',
            '{}_{}.csv'.format(
                ticker,
                updated))
        prepared_s3_bucket = work_dict.get(
            'prepared_s3_bucket',
            'prepared')
        prepared_redis_key = work_dict.get(
            'prepared_redis_key',
            'prepared')
        ignore_columns = work_dict.get(
            'ignore_columns',
            None)
        log.info(
            '{} redis enabled address={}@{} '
            'key={} prepare_s3={}:{} prepare_redis={} '
            'ignore_columns={}'.format(
                label,
                redis_address,
                redis_db,
                redis_key,
                prepared_s3_bucket,
                prepared_s3_key,
                prepared_redis_key,
                ignore_columns))
        redis_host = redis_address.split(':')[0]
        redis_port = redis_address.split(':')[1]

        enable_s3 = True
        enable_redis_publish = True

        rec['ticker'] = ticker
        rec['ticker_id'] = ticker_id
        rec['s3_bucket'] = s3_bucket_name
        rec['s3_key'] = s3_key
        rec['redis_key'] = redis_key
        rec['prepared_s3_key'] = prepared_s3_key
        rec['prepared_s3_bucket'] = prepared_s3_bucket
        rec['prepared_redis_key'] = prepared_redis_key
        rec['updated'] = updated
        rec['s3_enabled'] = enable_s3
        rec['redis_enabled'] = enable_redis_publish

        try:
            log.info(
                '{} connecting redis={}:{} '
                'db={} key={} '
                'updated={} expire={}'.format(
                    label,
                    redis_host,
                    redis_port,
                    redis_db,
                    redis_key,
                    updated,
                    redis_expire))
            rc = redis.Redis(
                host=redis_host,
                port=redis_port,
                password=redis_password,
                db=redis_db)
        except Exception as e:
            err = (
                '{} failed - redis connection to address={}@{} '
                'key={} ex={}'.format(
                    label,
                    redis_address,
                    redis_key,
                    redis_db,
                    e))
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res
        # end of try/ex for connecting to redis

        initial_data_res = redis_get.get_data_from_redis_key(
            label=label,
            client=rc,
            key=redis_key)

        log.info(
            '{} get redis key={} status={} err={}'.format(
                label,
                redis_key,
                ae_consts.get_status(initial_data_res['status']),
                initial_data_res['err']))

        initial_data = initial_data_res['rec'].get(
            'data',
            None)

        if enable_s3 and not initial_data:

            log.info(
                '{} failed to find redis_key={} trying s3 '
                'from s3_key={} s3_bucket={} s3_address={}'.format(
                    label,
                    redis_key,
                    s3_key,
                    s3_bucket_name,
                    s3_address))

            get_from_s3_req = \
                api_requests.build_publish_from_s3_to_redis_request()

            get_from_s3_req['s3_enabled'] = enable_s3
            get_from_s3_req['s3_access_key'] = s3_access_key
            get_from_s3_req['s3_secret_key'] = s3_secret_key
            get_from_s3_req['s3_region_name'] = s3_region_name
            get_from_s3_req['s3_address'] = s3_address
            get_from_s3_req['s3_secure'] = s3_secure
            get_from_s3_req['s3_key'] = s3_key
            get_from_s3_req['s3_bucket'] = s3_bucket_name
            get_from_s3_req['redis_key'] = redis_key
            get_from_s3_req['label'] = (
                '{}-run_publish_from_s3_to_redis'.format(
                    label))

            log.info(
                '{} load from s3={} to '
                'redis={}'.format(
                    label,
                    s3_key,
                    redis_key))

            try:
                # run in synchronous mode:
                get_from_s3_req['celery_disabled'] = True
                task_res = s3_to_redis.run_publish_from_s3_to_redis(
                    get_from_s3_req)
                if task_res.get(
                        'status',
                        ae_consts.ERR) == ae_consts.SUCCESS:
                    log.info(
                        '{} loaded s3={}:{} '
                        'to redis={} retrying'.format(
                            label,
                            s3_bucket_name,
                            s3_key,
                            redis_key))
                    initial_data_res = redis_get.get_data_from_redis_key(
                        label=label,
                        client=rc,
                        key=redis_key)

                    log.info(
                        '{} get redis try=2 key={} status={} err={}'.format(
                            label,
                            redis_key,
                            ae_consts.get_status(initial_data_res['status']),
                            initial_data_res['err']))

                    initial_data = initial_data_res['rec'].get(
                        'data',
                        None)
                else:
                    err = (
                        '{} ERR failed loading from bucket={} '
                        's3_key={} to redis_key={} with res={}'.format(
                            label,
                            s3_bucket_name,
                            s3_key,
                            redis_key,
                            task_res))
                    log.error(err)
                    res = build_result.build_result(
                        status=ae_consts.ERR,
                        err=err,
                        rec=rec)
                    return res
            except Exception as e:
                err = (
                    '{} extract from s3 and publish to redis failed loading '
                    'data from bucket={} in '
                    's3_key={} with publish to redis_key={} '
                    'with ex={}'.format(
                        label,
                        s3_bucket_name,
                        s3_key,
                        redis_key,
                        e))
                log.error(err)
                res = build_result.build_result(
                    status=ae_consts.ERR,
                    err=err,
                    rec=rec)
                return res
            # end of try/ex for publishing from s3->redis
        # end of if enable_s3

        if not initial_data:
            err = (
                '{} did not find any data to prepare in redis_key={} or '
                's3_key={} in bucket={}'.format(
                    label,
                    redis_key,
                    s3_key,
                    s3_bucket_name))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res

        initial_data_num_chars = len(str(initial_data))
        initial_size_value = None
        initial_size_str = None
        if initial_data_num_chars < ae_consts.PREPARE_DATA_MIN_SIZE:
            err = (
                '{} not enough data={} in redis_key={} or '
                's3_key={} in bucket={}'.format(
                    label,
                    initial_data_num_chars,
                    redis_key,
                    s3_key,
                    s3_bucket_name))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res
        else:
            initial_size_value = initial_data_num_chars / 1024000
            initial_size_str = ae_consts.to_f(initial_size_value)
            if ae_consts.ev('DEBUG_PREPARE', '0') == '1':
                log.info(
                    '{} initial - redis_key={} data={}'.format(
                        label,
                        redis_key,
                        str(initial_data)))
            else:
                log.info(
                    '{} initial - redis_key={} data size={} MB'.format(
                        label,
                        redis_key,
                        initial_size_str))
        # end of trying to get initial_data

        rec['initial_data'] = initial_data
        rec['initial_size'] = initial_data_num_chars

        prepare_data = None

        try:
            if ae_consts.ev('DEBUG_PREPARE', '0') == '1':
                log.info(
                    '{} data={} - flatten - {} MB from '
                    'redis_key={}'.format(
                        label,
                        ae_consts.ppj(initial_data),
                        initial_size_str,
                        redis_key))
            else:
                log.info(
                    '{} flatten - {} MB from '
                    'redis_key={}'.format(
                        label,
                        initial_size_str,
                        redis_key))
            prepare_data = dict_to_csv.flatten_dict(
                data=initial_data)
        except Exception as e:
            prepare_data = None
            err = (
                '{} flatten - convert to csv failed with ex={} '
                'redis_key={}'.format(
                    label,
                    e,
                    redis_key))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res
        # end of try/ex

        if not prepare_data:
            err = (
                '{} flatten - did not return any data from redis_key={} '
                'or s3_key={} in bucket={}'.format(
                    label,
                    redis_key,
                    s3_key,
                    s3_bucket_name))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res
        # end of prepare_data

        prepare_data_num_chars = len(str(prepare_data))
        prepare_size_value = None

        if prepare_data_num_chars < ae_consts.PREPARE_DATA_MIN_SIZE:
            err = (
                '{} prepare - there is not enough data={} in redis_key={}'
                ''.format(
                    label,
                    prepare_data_num_chars,
                    redis_key))
            log.error(err)
            res = build_result.build_result(
                status=ae_consts.ERR,
                err=err,
                rec=rec)
            return res
        else:
            prepare_size_value = prepare_data_num_chars / 1024000
            prepare_size_str = ae_consts.to_f(prepare_size_value)
            if ae_consts.ev('DEBUG_PREPARE', '0') == '1':
                log.info(
                    '{} data={} - prepare - redis_key={}'.format(
                        label,
                        redis_key,
                        ae_consts.ppj(prepare_data)))
            else:
                log.info(
                    '{} prepare - redis_key={} data size={} MB'.format(
                        label,
                        redis_key,
                        prepare_size_str))
        # end of trying to the size of the prepared data

        rec['prepared_data'] = prepare_data
        rec['prepared_size'] = prepare_data_num_chars

        res = build_result.build_result(
            status=ae_consts.SUCCESS,
            err=None,
            rec=rec)

        rc = None

    except Exception as e:
        res = build_result.build_result(
            status=ae_consts.ERR,
            err=(
                'failed - prepare_pricing_dataset '
                'dict={} with ex={}').format(
                    work_dict,
                    e),
            rec=rec)
        log.error(
            '{} - {}'.format(
                label,
                res['err']))
    # end of try/ex

    log.info(
        'task - prepare_pricing_dataset done - '
        '{} - status={}'.format(
            label,
            ae_consts.get_status(res['status'])))

    return get_task_results.get_task_results(
        work_dict=work_dict,
        result=res)
def publish_from_s3_to_redis(self, work_dict):
    """publish_from_s3_to_redis

    Publish Ticker Data from S3 to Redis

    :param work_dict: dictionary for key/values
    """

    label = 'pub-s3-to-redis'

    log.info('task - {} - start ' 'work_dict={}'.format(label, work_dict))

    ticker = ae_consts.TICKER
    ticker_id = ae_consts.TICKER_ID
    rec = {
        'ticker': None,
        'ticker_id': None,
        's3_enabled': True,
        'redis_enabled': True,
        's3_bucket': None,
        's3_key': None,
        'redis_key': None,
        'updated': None
    }
    res = build_result.build_result(status=ae_consts.NOT_RUN,
                                    err=None,
                                    rec=rec)

    try:
        ticker = work_dict.get('ticker', ae_consts.TICKER)
        ticker_id = int(work_dict.get('ticker_id', ae_consts.TICKER_ID))

        if not ticker:
            res = build_result.build_result(status=ae_consts.ERR,
                                            err='missing ticker',
                                            rec=rec)
            return res

        s3_key = work_dict.get('s3_key', None)
        s3_bucket_name = work_dict.get('s3_bucket', 'pricing')
        redis_key = work_dict.get('redis_key', None)
        updated = work_dict.get('updated', None)
        serializer = work_dict.get('serializer', 'json')
        encoding = work_dict.get('encoding', 'utf-8')
        label = work_dict.get('label', label)

        enable_s3_read = True
        enable_redis_publish = True

        rec['ticker'] = ticker
        rec['ticker_id'] = ticker_id
        rec['s3_bucket'] = s3_bucket_name
        rec['s3_key'] = s3_key
        rec['redis_key'] = redis_key
        rec['updated'] = updated
        rec['s3_enabled'] = enable_s3_read
        rec['redis_enabled'] = enable_redis_publish

        data = None

        if enable_s3_read:

            log.info('{} parsing s3 values'.format(label))
            access_key = work_dict.get('s3_access_key',
                                       ae_consts.S3_ACCESS_KEY)
            secret_key = work_dict.get('s3_secret_key',
                                       ae_consts.S3_SECRET_KEY)
            region_name = work_dict.get('s3_region_name',
                                        ae_consts.S3_REGION_NAME)
            service_address = work_dict.get('s3_address', ae_consts.S3_ADDRESS)
            secure = work_dict.get('s3_secure', ae_consts.S3_SECURE) == '1'

            endpoint_url = 'http://{}'.format(service_address)
            if secure:
                endpoint_url = 'https://{}'.format(service_address)

            log.info('{} building s3 endpoint_url={} '
                     'region={}'.format(label, endpoint_url, region_name))

            s3 = boto3.resource(
                's3',
                endpoint_url=endpoint_url,
                aws_access_key_id=access_key,
                aws_secret_access_key=secret_key,
                region_name=region_name,
                config=boto3.session.Config(signature_version='s3v4'))

            try:
                log.info('{} checking bucket={} exists'.format(
                    label, s3_bucket_name))
                if s3.Bucket(s3_bucket_name) not in s3.buckets.all():
                    log.info('{} creating bucket={}'.format(
                        label, s3_bucket_name))
                    s3.create_bucket(Bucket=s3_bucket_name)
            except Exception as e:
                log.info('{} failed creating bucket={} '
                         'with ex={}'.format(label, s3_bucket_name, e))
            # end of try/ex for creating bucket

            try:
                log.info('{} reading to s3={}/{} '
                         'updated={}'.format(label, s3_bucket_name, s3_key,
                                             updated))
                data = s3_read_contents_from_key.s3_read_contents_from_key(
                    s3=s3,
                    s3_bucket_name=s3_bucket_name,
                    s3_key=s3_key,
                    encoding=encoding,
                    convert_as_json=True)

                initial_size_value = \
                    len(str(data)) / 1024000
                initial_size_str = ae_consts.to_f(initial_size_value)
                if ae_consts.ev('DEBUG_S3', '0') == '1':
                    log.info('{} read s3={}/{} data={}'.format(
                        label, s3_bucket_name, s3_key, ae_consts.ppj(data)))
                else:
                    log.info('{} read s3={}/{} data size={} MB'.format(
                        label, s3_bucket_name, s3_key, initial_size_str))
            except Exception as e:
                err = ('{} failed reading bucket={} '
                       'key={} ex={}').format(label, s3_bucket_name, s3_key, e)
                log.error(err)
                res = build_result.build_result(status=ae_consts.NOT_RUN,
                                                err=err,
                                                rec=rec)
            # end of try/ex for creating bucket
        else:
            log.info('{} SKIP S3 read bucket={} '
                     'key={}'.format(label, s3_bucket_name, s3_key))
        # end of if enable_s3_read

        if enable_redis_publish:
            redis_address = work_dict.get('redis_address',
                                          ae_consts.REDIS_ADDRESS)
            redis_key = work_dict.get('redis_key', ae_consts.REDIS_KEY)
            redis_password = work_dict.get('redis_password',
                                           ae_consts.REDIS_PASSWORD)
            redis_db = work_dict.get('redis_db', None)
            if not redis_db:
                redis_db = ae_consts.REDIS_DB
            redis_expire = None
            if 'redis_expire' in work_dict:
                redis_expire = work_dict.get('redis_expire',
                                             ae_consts.REDIS_EXPIRE)
            log.info('redis enabled address={}@{} '
                     'key={}'.format(redis_address, redis_db, redis_key))
            redis_host = redis_address.split(':')[0]
            redis_port = redis_address.split(':')[1]
            try:
                if ae_consts.ev('DEBUG_REDIS', '0') == '1':
                    log.info('{} publishing redis={}:{} '
                             'db={} key={} '
                             'updated={} expire={} '
                             'data={}'.format(label, redis_host, redis_port,
                                              redis_db, redis_key,
                                              updated, redis_expire,
                                              ae_consts.ppj(data)))
                else:
                    log.info('{} publishing redis={}:{} '
                             'db={} key={} '
                             'updated={} expire={}'.format(
                                 label, redis_host, redis_port, redis_db,
                                 redis_key, updated, redis_expire))
                # end of if/else

                rc = redis.Redis(host=redis_host,
                                 port=redis_port,
                                 password=redis_password,
                                 db=redis_db)

                redis_set_res = redis_set.set_data_in_redis_key(
                    label=label,
                    client=rc,
                    key=redis_key,
                    data=data,
                    serializer=serializer,
                    encoding=encoding,
                    expire=redis_expire,
                    px=None,
                    nx=False,
                    xx=False)

                log.info('{} redis_set status={} err={}'.format(
                    label, ae_consts.get_status(redis_set_res['status']),
                    redis_set_res['err']))

            except Exception as e:
                log.error('{} failed - redis publish to '
                          'key={} ex={}'.format(label, redis_key, e))
            # end of try/ex for creating bucket
        else:
            log.info('{} SKIP REDIS publish '
                     'key={}'.format(label, redis_key))
        # end of if enable_redis_publish

        res = build_result.build_result(status=ae_consts.SUCCESS,
                                        err=None,
                                        rec=rec)

    except Exception as e:
        res = build_result.build_result(
            status=ae_consts.ERR,
            err=('failed - publish_from_s3_to_redis '
                 'dict={} with ex={}').format(work_dict, e),
            rec=rec)
        log.error('{} - {}'.format(label, res['err']))
    # end of try/ex

    log.info('task - publish_from_s3_to_redis done - '
             '{} - status={}'.format(label,
                                     ae_consts.get_status(res['status'])))

    return get_task_results.get_task_results(work_dict=work_dict, result=res)
    def process(self, algo_id, ticker, dataset):
        """process

        Derive custom indicator processing to determine buy and sell
        conditions before placing orders. Just implement your own
        ``process`` method.

        Please refer to the TA Lib guides for details on building indicators:

        - Overlap Studies
          https://mrjbq7.github.io/ta-lib/func_groups/overlap_studies.html
        - Momentum Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/momentum_indicators.html
        - Volume Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volume_indicators.html
        - Volatility Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/volatility_indicators.html
        - Price Transform
          https://mrjbq7.github.io/ta-lib/func_groups/price_transform.html
        - Cycle Indicators
          https://mrjbq7.github.io/ta-lib/func_groups/cycle_indicators.html
        - Pattern Recognition
          https://mrjbq7.github.io/ta-lib/func_groups/pattern_recognition.html
        - Statistic Functions
          https://mrjbq7.github.io/ta-lib/func_groups/statistic_functions.html
        - Math Transform
          https://mrjbq7.github.io/ta-lib/func_groups/math_transform.html
        - Math Operators
          https://mrjbq7.github.io/ta-lib/func_groups/math_operators.html

        :param algo_id: string - algo identifier label for debugging datasets
            during specific dates
        :param ticker: string - ticker
        :param dataset: dictionary of ``pandas.DataFrame(s)`` to process
        """

        # set the algo config indicator 'uses_data' to 'day' or 'minute'
        df_status, self.use_df = self.get_subscribed_dataset(dataset=dataset)

        if df_status == ae_consts.EMPTY:
            self.lg('process end - no data found')
            return

        # notice the self.num_points is now a member variable
        # because the BaseIndicator class's __init__
        # converts any self.config keys into useable
        # member variables automatically in your derived class
        self.lg('process - num_points={} df={}'.format(self.num_points,
                                                       len(self.use_df.index)))
        """
        upperband, middleband, lowerband = BBANDS(
            close,
            timeperiod=5,
            nbdevup=2,
            nbdevdn=2,
            matype=0)
        """
        num_records = len(self.use_df.index)
        if num_records > self.num_points:
            cur_value = self.use_df['close'].iloc[-1]
            first_date = self.use_df['date'].iloc[0]
            end_date = self.use_df['date'].iloc[-1]
            start_row = num_records - self.num_points
            self.use_df = self.use_df[start_row:num_records]
            """
            for idx, row in self.use_df[start_row:-1].iterrows():
                high = row['high']
                low = row['low']
                open_val = row['open']
                close = row['close']
                row_date = row['date']
                self.lg(
                    '{} - high={}, low={}, '
                    'close={}, period={}'.format(
                        row_date,
                        high,
                        low,
                        close,
                        self.num_points))
            """
            closes = self.use_df['close'].values

            (upperbands, middlebands,
             lowerbands) = talib.BBANDS(close=closes,
                                        timeperiod=self.num_points,
                                        nbdevup=self.upper_stdev,
                                        nbdevdn=self.lower_stdev,
                                        matype=self.matype)
            """
            Determine a buy or a sell as a label
            """

            self.upperband = ae_consts.to_f(upperbands[-1])
            self.middleband = ae_consts.to_f(middlebands[-1])
            self.lowerband = ae_consts.to_f(lowerbands[-1])

            if cur_value <= 0:
                self.lg('invalid current_value={}'.format(cur_value))
                return

            self.amount_to_low = ae_consts.to_f(cur_value - self.lowerband)
            self.amount_to_high = ae_consts.to_f(self.upperband - cur_value)

            if self.amount_to_low < 0:
                self.percent_to_low = -1 * ae_consts.to_f(
                    self.amount_to_low / cur_value * 100.0)
            else:
                self.percent_to_low = ae_consts.to_f(self.amount_to_low /
                                                     cur_value * 100.0)

            if self.amount_to_high < 0:
                self.percent_to_high = -1 * ae_consts.to_f(
                    self.amount_to_high / cur_value * 100.0)
            else:
                self.percent_to_high = ae_consts.to_f(self.amount_to_high /
                                                      cur_value * 100.0)

            self.is_buy = ae_consts.INDICATOR_IGNORE
            self.is_sell = ae_consts.INDICATOR_IGNORE

            if self.percent_to_low > self.buy_below_percent:
                self.is_buy = ae_consts.INDICATOR_BUY
            elif self.percent_to_high > self.sell_above_percent:
                self.is_sell = ae_consts.INDICATOR_SELL

            self.lg('process end - {} to {} '
                    'buy_below={} is_buy={} '
                    'sell_above={} is_sell={}'.format(first_date, end_date,
                                                      self.buy_below_percent,
                                                      self.is_buy,
                                                      self.sell_above_percent,
                                                      self.is_sell))
        else:
            self.lg('process end')
def create_column_dnn(predict_feature='close',
                      ticker='',
                      debug=False,
                      use_epochs=10,
                      use_batch_size=10,
                      use_test_size=0.1,
                      use_random_state=1,
                      use_seed=7,
                      use_shuffle=False,
                      model_verbose=True,
                      fit_verbose=True,
                      use_scalers=True,
                      df=[],
                      dnn_config={},
                      compile_config={},
                      s3_bucket='',
                      s3_key='',
                      send_plots_to_slack=False):
    """create_column_dnn

    For scaler-normalized datasets this will
    compile numeric columns and ignore string/non-numeric
    columns as training and test feature columns

    :param predict_feature: Column to create DNN with
    :param ticker: Ticker being used
    :param debug: Debug mode
    :param use_epochs: Epochs times to use
    :param use_batch_size: Batch size to use
    :param use_test_size: Test size to use
    :param use_random_state: Random state to train with
    :param use_seed: Seed used to build scalar datasets
    :param use_shuffle: To shuffle the regression estimator or not
    :param model_verbose: To use a verbose Keras regression model or not
    :param fit_verbose: To use a verbose fitting of the regression estimator
    :param use_scalers: To build using scalars or not
    :param df: Ticker dataset
    :param dnn_config: Deep Neural Net keras model json to build the model
    :param compile_config: Deep Neural Net dictionary of compile options
    :param s3_bucket: S3 Bucket
    :param s3_key: S3 Key
    """

    df_filter = (df[f'{predict_feature}'] >= 0.1)
    first_date = df[df_filter]['date'].iloc[0]
    end_date = df[df_filter]['date'].iloc[-1]

    if 'minute' in df:
        found_valid_minute = df['minute'].iloc[0]
        if found_valid_minute:
            first_date = df[df_filter]['minute'].iloc[0]
            end_date = df[df_filter]['minute'].iloc[-1]

    num_rows = len(df.index)
    log.info(f'prepared training data from '
             f'history {s3_bucket}@{s3_key} '
             f'rows={num_rows} '
             f'dates: {first_date} to {end_date}')

    if debug:
        for i, r in df.iterrows():
            log.info(f'{r["minute"]} - {r["{}".format(predict_feature)]}')
        # end of for loop

        log.info(f'columns: {df.columns.values}')
        log.info(f'rows: {len(df.index)}')
    # end of debug

    use_all_features = use_scalers
    all_features = []
    train_features = []
    if use_all_features:
        for c in df.columns.values:
            if (pandas_types.is_numeric_dtype(df[c])
                    and c not in train_features):
                if c != predict_feature:
                    train_features.append(c)
                if c not in all_features:
                    all_features.append(c)

        dnn_config['layers'][-1]['activation'] = ('sigmoid')
    else:
        temp_choices = choices[:]
        temp_choices.remove(predict_feature)
        train_features = ['open']
        train_features.extend(temp_choices)
        all_features = [f'{predict_feature}'] + train_features

    num_features = len(train_features)
    features_and_minute = ['minute'] + all_features

    log.info('converting columns to floats')

    timeseries_df = df[df_filter][features_and_minute].fillna(-10000.0)
    converted_df = timeseries_df[all_features].astype('float32')

    train_df = None
    test_df = None
    scaler_predictions = None
    if use_all_features:
        scaler_res = build_scaler_datasets.build_datasets_using_scalers(
            train_features=train_features,
            test_feature=predict_feature,
            df=converted_df,
            test_size=use_test_size,
            seed=use_seed)
        if scaler_res['status'] != ae_consts.SUCCESS:
            log.error('failed to build scaler train and test datasets')
            return
        train_df = scaler_res['scaled_train_df']
        test_df = scaler_res['scaled_test_df']
        x_train = scaler_res['x_train']
        x_test = scaler_res['x_test']
        y_train = scaler_res['y_train']
        y_test = scaler_res['y_test']
        scaler_predictions = scaler_res['scaler_test']
    else:
        log.info('building train and test dfs from subset of features')
        train_df = converted_df[train_features]
        test_df = converted_df[[predict_feature]]

        log.info(f'splitting {num_rows} into test and training '
                 f'size={use_test_size}')

        (x_train, x_test, y_train,
         y_test) = tt_split.train_test_split(train_df,
                                             test_df,
                                             test_size=use_test_size,
                                             random_state=use_random_state)

    log.info(f'split breakdown - '
             f'x_train={len(x_train)} '
             f'x_test={len(x_test)} '
             f'y_train={len(y_train)} '
             f'y_test={len(y_test)}')

    def set_model():
        return build_dnn.build_regression_dnn(num_features=num_features,
                                              compile_config=compile_config,
                                              model_config=dnn_config)

    estimator = keras_scikit.KerasRegressor(build_fn=set_model,
                                            epochs=use_epochs,
                                            batch_size=use_batch_size,
                                            verbose=model_verbose)

    log.info(f'fitting estimator - '
             f'predicting={predict_feature} '
             f'epochs={use_epochs} '
             f'batch={use_batch_size} '
             f'test_size={use_test_size} '
             f'seed={use_seed}')

    history = estimator.fit(x_train,
                            y_train,
                            validation_data=(x_train, y_train),
                            epochs=use_epochs,
                            batch_size=use_batch_size,
                            shuffle=use_shuffle,
                            verbose=fit_verbose)

    created_on = (datetime.datetime.now().strftime(
        ae_consts.COMMON_TICK_DATE_FORMAT))
    plot_fit_history.plot_dnn_fit_history(
        df=history.history,
        title=(f'DNN Errors Over Training Epochs\n'
               f'Training Data: s3://{s3_bucket}/{s3_key}\n'
               f'Created: {created_on}'),
        red='mean_squared_error',
        blue='mean_absolute_error',
        green='acc',
        orange='cosine_proximity',
        send_plots_to_slack=send_plots_to_slack)

    # on production use newly fetched pricing data
    # not the training data
    predict_records = []
    if use_all_features:
        prediction_res = build_scaler_df.build_scaler_dataset_from_df(
            df=converted_df[train_features])
        if prediction_res['status'] == ae_consts.SUCCESS:
            predict_records = prediction_res['df']
    else:
        predict_records = converted_df[train_features]

    log.info(f'making predictions: {len(predict_records)}')

    predictions = estimator.model.predict(predict_records, verbose=True)

    np.set_printoptions(threshold=np.nan)
    indexes = tf.argmax(predictions, axis=1)
    data = {}
    data['indexes'] = indexes
    price_predictions = []
    if use_all_features and scaler_predictions:
        price_predictions = [
            ae_consts.to_f(x) for x in scaler_predictions.inverse_transform(
                predictions.reshape(-1, 1)).reshape(-1)
        ]
    else:
        price_predictions = [ae_consts.to_f(x[0]) for x in predictions]

    timeseries_df[f'predicted_{predict_feature}'] = price_predictions
    timeseries_df['error'] = (timeseries_df[f'{predict_feature}'] -
                              timeseries_df[f'predicted_{predict_feature}'])

    output_features = [
        'minute', f'{predict_feature}', f'predicted_{predict_feature}', 'error'
    ]

    date_str = (f'Dates: {timeseries_df["minute"].iloc[0]} '
                f'to '
                f'{timeseries_df["minute"].iloc[-1]}')

    log.info(f'historical {predict_feature} with predicted {predict_feature}: '
             f'{timeseries_df[output_features]}')
    log.info(date_str)
    log.info(f'Columns: {output_features}')

    average_error = ae_consts.to_f(timeseries_df['error'].sum() /
                                   len(timeseries_df.index))

    log.info(f'Average historical {predict_feature} '
             f'vs predicted {predict_feature} error: '
             f'{average_error}')

    log.info(
        f'plotting historical {predict_feature} vs predicted {predict_feature}'
        f' from training with columns={num_features}')

    ts_filter = (timeseries_df[f'{predict_feature}'] > 0.1)
    latest_feature = (timeseries_df[ts_filter][f'{predict_feature}'].iloc[-1])
    latest_predicted_feature = (
        timeseries_df[ts_filter][f'predicted_{predict_feature}'].iloc[-1])

    log.info(f'{end_date} {predict_feature}={latest_feature} '
             f'with '
             f'predicted_{predict_feature}={latest_predicted_feature}')

    plot_trading_history.plot_trading_history(
        title=(f'{ticker} - Historical {predict_feature.title()} vs '
               f'Predicted {predict_feature.title()}\n'
               f'Number of Training Features: {num_features}\n'
               f'{date_str}'),
        df=timeseries_df,
        red=f'{predict_feature}',
        blue=f'predicted_{predict_feature}',
        green=None,
        orange=None,
        date_col='minute',
        date_format='%d %H:%M:%S\n%b',
        xlabel='minute',
        ylabel=(f'Historical {predict_feature.title()} vs '
                f'Predicted {predict_feature.title()}'),
        df_filter=ts_filter,
        width=8.0,
        height=8.0,
        show_plot=True,
        dropna_for_all=False,
        send_plots_to_slack=send_plots_to_slack)
Exemplo n.º 10
0
def build_trade_history_entry(ticker,
                              num_owned,
                              close,
                              balance,
                              commission,
                              date,
                              trade_type,
                              algo_start_price,
                              original_balance,
                              minute=None,
                              high=None,
                              low=None,
                              open_val=None,
                              volume=None,
                              ask=None,
                              bid=None,
                              today_high=None,
                              today_low=None,
                              today_open_val=None,
                              today_close=None,
                              today_volume=None,
                              stop_loss=None,
                              trailing_stop_loss=None,
                              buy_hold_units=None,
                              sell_hold_units=None,
                              spread_exp_date=None,
                              spread_id=None,
                              low_strike=None,
                              low_bid=None,
                              low_ask=None,
                              low_volume=None,
                              low_open_int=None,
                              low_delta=None,
                              low_gamma=None,
                              low_theta=None,
                              low_vega=None,
                              low_rho=None,
                              low_impl_vol=None,
                              low_intrinsic=None,
                              low_extrinsic=None,
                              low_theo_price=None,
                              low_theo_volatility=None,
                              low_max_covered=None,
                              low_exp_date=None,
                              high_strike=None,
                              high_bid=None,
                              high_ask=None,
                              high_volume=None,
                              high_open_int=None,
                              high_delta=None,
                              high_gamma=None,
                              high_theta=None,
                              high_vega=None,
                              high_rho=None,
                              high_impl_vol=None,
                              high_intrinsic=None,
                              high_extrinsic=None,
                              high_theo_price=None,
                              high_theo_volatility=None,
                              high_max_covered=None,
                              high_exp_date=None,
                              prev_balance=None,
                              prev_num_owned=None,
                              total_buys=None,
                              total_sells=None,
                              buy_triggered=None,
                              buy_strength=None,
                              buy_risk=None,
                              sell_triggered=None,
                              sell_strength=None,
                              sell_risk=None,
                              num_indicators_buy=None,
                              num_indicators_sell=None,
                              min_buy_indicators=None,
                              min_sell_indicators=None,
                              net_gain=None,
                              net_value=None,
                              ds_id=None,
                              note=None,
                              err=None,
                              entry_spread_dict=None,
                              version=1):
    """build_trade_history_entry

    Build a dictionary for tracking an algorithm profitability per ticker
    and for ``TRADE_SHARES``, ``TRADE_VERTICAL_BULL_SPREAD``, or
    ``TRADE_VERTICAL_BEAR_SPREAD`` trading types.

    .. note:: setting the ``minute`` is required to build
        a minute-by-minute ``Trading History``

    :param ticker: string ticker or symbol
    :param num_owned: integer current owned
        number of ``shares`` for this asset or number of
        currently owned ``contracts`` for an options
        spread.
    :param close: float ``close`` price of the
        underlying asset
    :param balance: float amount of available capital
    :param commission: float for commission costs
    :param date: string trade date for that row usually
        ``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param minute: optional - string for recording the minute
        the trade was place, and the format is
        ``COMMON_TICK_DATE_FORMAT`` (``YYYY-MM-DD HH:MM:SS``)
        this is optional if the algorithm is set up to
        trade using a ``day`` value for timeseries.
    :param trade_type: type of the trade - supported values:
            ``TRADE_SHARES``,
            ``TRADE_VERTICAL_BULL_SPREAD``,
            ``TRADE_VERTICAL_BEAR_SPREAD``
    :param algo_start_price: float starting close/contract price
        for this algo
    :param original_balance: float starting original account
        balance for this algo
    :param high: optional - float underlying stock asset ``high`` price
    :param low: optional - float underlying stock asset ``low`` price
    :param open_val: optional - float underlying stock asset ``open`` price
    :param volume: optional - integer underlying stock asset ``volume``
    :param ask: optional - float ``ask`` price of the
        stock (for buying ``shares``)
    :param bid: optional - float ``bid`` price of the
        stock (for selling ``shares``)
    :param today_high: optional - float ``high`` from
        the daily dataset (if available)
    :param today_low: optional - float ``low`` from
        the daily dataset (if available)
    :param today_open_val: optional - float ``open`` from
        the daily dataset (if available)
    :param today_close: optional - float ``close`` from
        the daily dataset (if available)
    :param today_volume: optional - float ``volume`` from
        the daily dataset (if available)
    :param stop_loss: optional - float ``stop_loss`` price of the
        stock/spread (for selling ``shares`` vs ``contracts``)
    :param trailing_stop_loss: optional - float ``trailing_stop_loss``
        price of the stock/spread (for selling ``shares`` vs ``contracts``)
    :param buy_hold_units: optional - number of units
        to hold buys - helps with algorithm tuning
    :param sell_hold_units: optional - number of units
        to hold sells - helps with algorithm tuning
    :param spread_exp_date: optional - string spread contract
        expiration date (``COMMON_DATE_FORMAT`` (``YYYY-MM-DD``)
    :param spread_id: optional - spread identifier for reviewing
        spread performances
    :param low_strike: optional
        - only for vertical bull/bear trade types
        ``low leg strike price`` of the spread
    :param low_bid: optional
        - only for vertical bull/bear trade types
        ``low leg bid`` of the spread
    :param low_ask: optional
        - only for vertical bull/bear trade types
        ``low leg ask`` of the spread
    :param low_volume: optional
        - only for vertical bull/bear trade types
        ``low leg volume`` of the spread
    :param low_open_int: optional
        - only for vertical bull/bear trade types
        ``low leg open interest`` of the spread
    :param low_delta: optional
        - only for vertical bull/bear trade types
        ``low leg delta`` of the spread
    :param low_gamma: optional
        - only for vertical bull/bear trade types
        ``low leg gamma`` of the spread
    :param low_theta: optional
        - only for vertical bull/bear trade types
        ``low leg theta`` of the spread
    :param low_vega: optional
        - only for vertical bull/bear trade types
        ``low leg vega`` of the spread
    :param low_rho: optional
        - only for vertical bull/bear trade types
        ``low leg rho`` of the spread
    :param low_impl_vol: optional
        - only for vertical bull/bear trade types
        ``low leg implied volatility`` of the spread
    :param low_intrinsic: optional
        - only for vertical bull/bear trade types
        ``low leg intrinsic`` of the spread
    :param low_extrinsic: optional
        - only for vertical bull/bear trade types
        ``low leg extrinsic`` of the spread
    :param low_theo_price: optional
        - only for vertical bull/bear trade types
        ``low leg theoretical price`` of the spread
    :param low_theo_volatility: optional
        - only for vertical bull/bear trade types
        ``low leg theoretical volatility`` of the spread
    :param low_max_covered: optional
        - only for vertical bull/bear trade types
        ``low leg max covered returns`` of the spread
    :param low_exp_date: optional
        - only for vertical bull/bear trade types
        ``low leg expiration date`` of the spread
    :param high_strike: optional
        - only for vertical bull/bear trade types
        ``high leg strike price`` of the spread
    :param high_bid: optional
        - only for vertical bull/bear trade types
        ``high leg bid`` of the spread
    :param high_ask: optional
        - only for vertical bull/bear trade types
        ``high leg ask`` of the spread
    :param high_volume: optional
        - only for vertical bull/bear trade types
        ``high leg volume`` of the spread
    :param high_open_int: optional
        - only for vertical bull/bear trade types
        ``high leg open interest`` of the spread
    :param high_delta: optional
        - only for vertical bull/bear trade types
        ``high leg delta`` of the spread
    :param high_gamma: optional
        - only for vertical bull/bear trade types
        ``high leg gamma`` of the spread
    :param high_theta: optional
        - only for vertical bull/bear trade types
        ``high leg theta`` of the spread
    :param high_vega: optional
        - only for vertical bull/bear trade types
        ``high leg vega`` of the spread
    :param high_rho: optional
        - only for vertical bull/bear trade types
        ``high leg rho`` of the spread
    :param high_impl_vol: optional
        - only for vertical bull/bear trade types
        ``high leg implied volatility`` of the spread
    :param high_intrinsic: optional
        - only for vertical bull/bear trade types
        ``high leg intrinsic`` of the spread
    :param high_extrinsic: optional
        - only for vertical bull/bear trade types
        ``high leg extrinsic`` of the spread
    :param high_theo_price: optional
        - only for vertical bull/bear trade types
        ``high leg theoretical price`` of the spread
    :param high_theo_volatility: optional
        - only for vertical bull/bear trade types
        ``high leg theoretical volatility`` of the spread
    :param high_max_covered: optional
        - only for vertical bull/bear trade types
        ``high leg max covered returns`` of the spread
    :param high_exp_date: optional
        - only for vertical bull/bear trade types
        ``high leg expiration date`` of the spread
    :param prev_balance: optional - previous balance
        for this algo
    :param prev_num_owned: optional - previous num of
        ``shares`` or ``contracts``
    :param total_buys: optional - total buy orders
        for this algo
    :param total_sells: optional - total sell orders
        for this algo
    :param buy_triggered: optional - bool
        ``buy`` conditions in the algorithm triggered
    :param buy_strength: optional - float
        custom strength/confidence rating for tuning
        algorithm performance for desirable
        sensitivity and specificity
    :param buy_risk: optional - float
        custom risk rating for tuning algorithm
        peformance for avoiding custom risk for buy
        conditions
    :param sell_triggered: optional - bool
        ``sell`` conditions in the algorithm triggered
    :param sell_strength: optional - float
        custom strength/confidence rating for tuning
        algorithm performance for desirable
        sensitivity and specificity
    :param sell_risk: optional - float
        custom risk rating for tuning algorithm
        peformance for avoiding custom risk for buy
        conditions
    :param num_indicators_buy: optional - integer
        number of indicators the ``IndicatorProcessor``
        processed and said to ``buy`` an asset
    :param num_indicators_sell: optional - integer
        number of indicators the ``IndicatorProcessor``
        processed and said to ``sell`` an asset
    :param min_buy_indicators: optional - integer
        minimum number of indicators required to trigger
        a ``buy`` order
    :param min_sell_indicators: optional - integer
        minimum number of indicators required to trigger
        a ``sell`` order
        net_gain=None,
        net_value=None,
    :param net_value: optional - float total value the algorithm
        has left remaining since starting trading. this includes
        the number of ``self.num_owned`` shares with the
        ``self.latest_close`` price included
    :param net_gain: optional - float amount the algorithm has
        made since starting including owned shares
        with the ``self.latest_close`` price included

    :param ds_id: optional - datset id for debugging
    :param note: optional - string for tracking high level
        testing notes on algorithm indicator ratings and
        internal message passing during an algorithms's
        ``self.process`` method
    :param err: optional - string for tracking errors
    :param entry_spread_dict: optional - on exit spreads
        the calculation of net gain can use the entry
        spread to determine specific performance metrics
        (work in progress)
    :param version: optional - version tracking order history
    """
    status = ae_consts.NOT_RUN
    algo_status = ae_consts.NOT_RUN
    err = None
    balance_net_gain = 0.0
    breakeven_price = None
    max_profit = None  # only for option spreads
    max_loss = None  # only for option spreads
    exp_date = None  # only for option spreads

    # latest price - start price of the algo
    price_change_since_start = close - algo_start_price

    if close:
        if close < 0.01:
            status = ae_consts.INVALID

    history_dict = {
        'ticker': ticker,
        'algo_start_price': ae_consts.to_f(algo_start_price),
        'algo_price_change': ae_consts.to_f(price_change_since_start),
        'original_balance': ae_consts.to_f(original_balance),
        'status': status,
        'algo_status': algo_status,
        'buy_now': buy_triggered,
        'buy_strength': buy_strength,
        'buy_risk': buy_risk,
        'sell_now': sell_triggered,
        'sell_strength': sell_strength,
        'sell_risk': sell_risk,
        'num_indicators_buy': num_indicators_buy,
        'num_indicators_sell': num_indicators_sell,
        'min_buy_indicators': min_buy_indicators,
        'min_sell_indicators': min_sell_indicators,
        'ds_id': ds_id,
        'num_owned': num_owned,
        'close': ae_consts.to_f(close),
        'balance': ae_consts.to_f(balance),
        'commission': ae_consts.to_f(commission),
        'date': date,
        'minute': minute,
        'trade_type': trade_type,
        'high': ae_consts.to_f(high),
        'low': ae_consts.to_f(low),
        'open': ae_consts.to_f(open_val),
        'volume': volume,
        'ask': ae_consts.to_f(ask),
        'bid': ae_consts.to_f(bid),
        'today_high': ae_consts.to_f(today_high),
        'today_low': ae_consts.to_f(today_low),
        'today_open_val': ae_consts.to_f(today_open_val),
        'today_close': ae_consts.to_f(today_close),
        'today_volume': ae_consts.to_f(today_volume),
        'stop_loss': ae_consts.to_f(stop_loss),
        'trailing_stop_loss': ae_consts.to_f(trailing_stop_loss),
        'buy_hold_units': buy_hold_units,
        'sell_hold_units': sell_hold_units,
        'low_strike': low_strike,
        'low_bid': ae_consts.to_f(low_bid),
        'low_ask': ae_consts.to_f(low_ask),
        'low_volume': low_volume,
        'low_open_int': low_open_int,
        'low_delta': low_delta,
        'low_gamma': low_gamma,
        'low_theta': low_theta,
        'low_vega': low_vega,
        'low_rho': low_rho,
        'low_impl_vol': low_impl_vol,
        'low_intrinsic': low_intrinsic,
        'low_extrinsic': low_extrinsic,
        'low_theo_price': low_theo_price,
        'low_theo_volatility': low_theo_volatility,
        'low_max_covered': low_max_covered,
        'low_exp_date': low_exp_date,
        'high_strike': high_strike,
        'high_bid': ae_consts.to_f(high_bid),
        'high_ask': ae_consts.to_f(high_ask),
        'high_volume': high_volume,
        'high_open_int': high_open_int,
        'high_delta': high_delta,
        'high_gamma': high_gamma,
        'high_theta': high_theta,
        'high_vega': high_vega,
        'high_rho': high_rho,
        'high_impl_vol': high_impl_vol,
        'high_intrinsic': high_intrinsic,
        'high_extrinsic': high_extrinsic,
        'high_theo_price': high_theo_price,
        'high_theo_volatility': high_theo_volatility,
        'high_max_covered': high_max_covered,
        'high_exp_date': high_exp_date,
        'spread_id': spread_id,
        'net_gain': net_gain,
        'net_value': net_value,
        'breakeven_price': breakeven_price,
        'max_profit': ae_consts.to_f(max_profit),
        'max_loss': ae_consts.to_f(max_loss),
        'exp_date': exp_date,
        'prev_balance': ae_consts.to_f(prev_balance),
        'prev_num_owned': ae_consts.to_f(prev_num_owned),
        'total_buys': total_buys,
        'total_sells': total_sells,
        'note': note,
        'err': err,
        'version': version
    }

    # evaluate if the algorithm is gaining
    # cash over the test
    if balance and original_balance:
        # net change on the balance
        # note this needs to be upgraded to
        # support orders per ticker
        # single tickers will work for v1
        balance_net_gain = balance - original_balance
        if balance_net_gain > 0.0:
            algo_status = ae_consts.ALGO_PROFITABLE
        else:
            algo_status = ae_consts.ALGO_NOT_PROFITABLE
    else:
        history_dict['err'] = (
            f'{ticker} ds_id={ds_id} missing balance={balance} and '
            f'original_balance={original_balance}')
        algo_status = ae_consts.ALGO_ERROR
    # if starting balance and original_balance exist
    # to determine algorithm trade profitability

    # if there are no shares to sell then
    # there's no current trade open
    if num_owned and num_owned < 1:
        status = ae_consts.TRADE_NO_SHARES_TO_SELL
    else:
        if close < 0.01:
            history_dict['err'] = (
                f'{ticker} ds_id={ds_id} close={close} must be greater '
                f'than 0.01')
            status = ae_consts.TRADE_ERROR
        elif algo_start_price < 0.01:
            history_dict['err'] = (
                f'{ticker} ds_id={ds_id} '
                f'algo_start_price={algo_start_price} must be greater '
                f'than 0.01')
            status = ae_consts.TRADE_ERROR
        else:
            price_net_gain = close - algo_start_price
            if price_net_gain > 0.0:
                status = ae_consts.TRADE_PROFITABLE
            else:
                status = ae_consts.TRADE_NOT_PROFITABLE
    # if starting price when algo started and close exist
    # determine if this trade profitability

    # Assign calculated values:
    history_dict['net_gain'] = net_gain
    history_dict['balance_net_gain'] = balance_net_gain
    history_dict['breakeven_price'] = breakeven_price
    history_dict['max_profit'] = max_profit
    history_dict['max_loss'] = max_loss
    history_dict['exp_date'] = exp_date

    # assign statuses
    history_dict['status'] = status
    history_dict['algo_status'] = algo_status

    use_date = minute
    if not use_date:
        use_date = date

    log.debug(
        f'{ticker} ds_id={ds_id} {use_date} '
        f'algo={ae_consts.get_status(status=history_dict["algo_status"])} '
        f'trade={ae_consts.get_status(status=history_dict["status"])} '
        f'history={ae_consts.ppj(history_dict)}')

    return history_dict
def build_option_spread_details(trade_type, spread_type, option_type, close,
                                num_contracts, low_strike, low_ask, low_bid,
                                high_strike, high_ask, high_bid):
    """build_option_spread_details

    Calculate pricing information for supported spreads
    including ``max loss``, ``max profit``, and ``mid price`` (break
    even coming soon)

    :param trade_type: entry (``TRADE_ENTRY``) or
        exit (``TRADE_EXIT``) of a spread position
    :param spread_type: vertical bull (``SPREAD_VERTICAL_BULL``)
        and vertical bear (``SPREAD_VERTICAL_BEAR``)
        are the only supported calculations for now
    :param option_type: call (``OPTION_CALL``) or put
        (``OPTION_PUT``)
    :param close: closing price of the underlying
        asset
    :param num_contracts: integer number of contracts
    :param low_strike: float - strike for
        the low leg of the spread
    :param low_ask: float - ask price for
        the low leg of the spread
    :param low_bid: float - bid price for
        the low leg of the spread
    :param high_strike: float - strike  for
        the high leg of the spread
    :param high_ask: float - ask price for
        the high leg of the spread
    :param high_bid: float - bid price for
        the high leg of the spread
    """

    details = {
        'status': NOT_RUN,
        'trade_type': trade_type,
        'spread_type': spread_type,
        'option_type': option_type,
        'num_contracts': num_contracts,
        'low_strike': low_strike,
        'low_bid': low_bid,
        'low_ask': low_ask,
        'high_strike': high_strike,
        'high_bid': high_bid,
        'high_ask': high_ask,
        'cost': None,
        'revenue': None,
        'low_bidask_mid': None,
        'high_bidask_mid': None,
        'mid_price': None,
        'nat_price': None,
        'strike_width': None,
        'break_even': None,
        'max_loss': None,
        'max_profit': None,
        'spread_id': None
    }

    low_distance = int(close) - low_strike
    high_distance = high_strike - int(close)
    details['strike_width'] = to_f(high_strike - low_strike)
    details['spread_id'] = 'S_{}_O_{}_low_{}_high_{}_w_{}'.format(
        trade_type, spread_type, option_type, low_distance, high_distance,
        details['strike_width'])
    details['low_bidask_mid'] = to_f(low_bid + low_ask / 2.0)
    details['high_bidask_mid'] = to_f(high_bid + high_ask / 2.0)
    details['mid_price'] = to_f(
        abs(details['low_bidask_mid'] - details['high_bidask_mid']))
    details['nat_price'] = to_f(
        abs(details['low_bidask_mid'] - details['high_bidask_mid']))

    cost_of_contracts_at_mid_price = None
    revenue_of_contracts_at_mid_price = None

    if trade_type == TRADE_ENTRY:
        cost_of_contracts_at_mid_price = to_f(100.0 * num_contracts *
                                              details['mid_price'])
        revenue_of_contracts_at_mid_price = to_f(
            100.0 * num_contracts *
            (details['strike_width'] - details['mid_price']))
        if spread_type == SPREAD_VERTICAL_BULL:
            if option_type == OPTION_CALL:  # debit spread
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
            else:
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
        else:  # bear
            if option_type == OPTION_CALL:  # debit spread
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price
            else:
                details['max_loss'] = cost_of_contracts_at_mid_price
                details['max_profit'] = revenue_of_contracts_at_mid_price

    else:  # trade exit calculations:
        revenue_of_contracts_at_mid_price = to_f(100.0 * num_contracts *
                                                 details['mid_price'])
        cost_of_contracts_at_mid_price = to_f(
            100.0 * num_contracts *
            (details['strike_width'] - details['mid_price']))
        if spread_type == SPREAD_VERTICAL_BULL:
            if option_type == OPTION_CALL:  # credit spread
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
            else:
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
        else:  # bear
            if option_type == OPTION_CALL:  # credit spread
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
            else:
                details['max_profit'] = revenue_of_contracts_at_mid_price
                details['max_loss'] = cost_of_contracts_at_mid_price
    # end of supported types of spreads

    details['cost'] = cost_of_contracts_at_mid_price
    details['revenue'] = revenue_of_contracts_at_mid_price

    log.debug(
        'type={} spread={} option={} close={} spread_id={} '
        'revenue={} cost={} mid={} width={} '
        'max_profit={} max_loss={}'.format(
            get_status(status=trade_type), get_status(status=spread_type),
            get_status(status=option_type), close, details['spread_id'],
            revenue_of_contracts_at_mid_price, cost_of_contracts_at_mid_price,
            details['mid_price'], details['strike_width'],
            details['max_profit'], details['max_loss']))

    return details