Example #1
0
    def test_timedelta(self):
        converter = lambda x: pd.to_timedelta(x, unit="ms")

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, "timedelta64[ns]")
        # index will be float dtype
        assert_series_equal(s, pd.read_json(s.to_json(), typ="series").apply(converter), check_index_type=False)

        s = Series([timedelta(23), timedelta(seconds=5)], index=pd.Index([0, 1], dtype=float))
        self.assertEqual(s.dtype, "timedelta64[ns]")
        assert_series_equal(s, pd.read_json(s.to_json(), typ="series").apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, "timedelta64[ns]")
        assert_frame_equal(
            frame, pd.read_json(frame.to_json()).apply(converter), check_index_type=False, check_column_type=False
        )

        frame = DataFrame(
            {
                "a": [timedelta(days=23), timedelta(seconds=5)],
                "b": [1, 2],
                "c": pd.date_range(start="20130101", periods=2),
            }
        )

        result = pd.read_json(frame.to_json(date_unit="ns"))
        result["a"] = pd.to_timedelta(result.a, unit="ns")
        result["c"] = pd.to_datetime(result.c)
        assert_frame_equal(frame, result, check_index_type=False)
Example #2
0
    def test_timedelta(self):
        converter = lambda x: pd.to_timedelta(x, unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, 'timedelta64[ns]')

        result = pd.read_json(s.to_json(), typ='series').apply(converter)
        assert_series_equal(result, s)

        s = Series([timedelta(23), timedelta(seconds=5)],
                   index=pd.Index([0, 1]))
        self.assertEqual(s.dtype, 'timedelta64[ns]')
        result = pd.read_json(s.to_json(), typ='series').apply(converter)
        assert_series_equal(result, s)

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, 'timedelta64[ns]')
        assert_frame_equal(frame, pd.read_json(frame.to_json())
                           .apply(converter))

        frame = DataFrame({'a': [timedelta(days=23), timedelta(seconds=5)],
                           'b': [1, 2],
                           'c': pd.date_range(start='20130101', periods=2)})

        result = pd.read_json(frame.to_json(date_unit='ns'))
        result['a'] = pd.to_timedelta(result.a, unit='ns')
        result['c'] = pd.to_datetime(result.c)
        assert_frame_equal(frame, result)
Example #3
0
    def test_timedelta(self):
        converter = lambda x: pd.to_timedelta(x, unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, 'timedelta64[ns]')

        result = pd.read_json(s.to_json(), typ='series').apply(converter)
        assert_series_equal(result, s)

        s = Series([timedelta(23), timedelta(seconds=5)],
                   index=pd.Index([0, 1]))
        self.assertEqual(s.dtype, 'timedelta64[ns]')
        result = pd.read_json(s.to_json(), typ='series').apply(converter)
        assert_series_equal(result, s)

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, 'timedelta64[ns]')
        assert_frame_equal(frame, pd.read_json(frame.to_json())
                           .apply(converter))

        frame = DataFrame({'a': [timedelta(days=23), timedelta(seconds=5)],
                           'b': [1, 2],
                           'c': pd.date_range(start='20130101', periods=2)})

        result = pd.read_json(frame.to_json(date_unit='ns'))
        result['a'] = pd.to_timedelta(result.a, unit='ns')
        result['c'] = pd.to_datetime(result.c)
        assert_frame_equal(frame, result)
Example #4
0
    def test_series_non_unique_index(self):
        s = Series(["a", "b"], index=[1, 1])

        self.assertRaises(ValueError, s.to_json, orient="index")

        assert_series_equal(s, read_json(s.to_json(orient="split"), orient="split", typ="series"))
        unser = read_json(s.to_json(orient="records"), orient="records", typ="series")
        np.testing.assert_equal(s.values, unser.values)
Example #5
0
    def test_series_non_unique_index(self):
        s = Series(['a', 'b'], index=[1, 1])

        self.assertRaises(ValueError, s.to_json, orient='index')

        assert_series_equal(s, read_json(s.to_json(orient='split'),
                                         orient='split', typ='series'))
        unser = read_json(s.to_json(orient='records'),
                          orient='records', typ='series')
        tm.assert_numpy_array_equal(s.values, unser.values)
Example #6
0
 def test_w_date(date, date_unit=None):
     ts = Series(Timestamp(date), index=self.ts.index)
     ts.ix[1] = pd.NaT
     ts.ix[5] = pd.NaT
     if date_unit:
         json = ts.to_json(date_format='iso', date_unit=date_unit)
     else:
         json = ts.to_json(date_format='iso')
     result = read_json(json, typ='series')
     assert_series_equal(result, ts)
Example #7
0
    def test_series_non_unique_index(self):
        s = Series(['a', 'b'], index=[1, 1])

        self.assertRaises(ValueError, s.to_json, orient='index')

        assert_series_equal(s, read_json(s.to_json(orient='split'),
                            orient='split', typ='series'))
        unser = read_json(s.to_json(orient='records'),
                          orient='records', typ='series')
        np.testing.assert_equal(s.values, unser.values)
    def test_series_non_unique_index(self):
        s = Series(['a', 'b'], index=[1, 1])

        pytest.raises(ValueError, s.to_json, orient='index')

        assert_series_equal(s, read_json(s.to_json(orient='split'),
                                         orient='split', typ='series'))
        unser = read_json(s.to_json(orient='records'),
                          orient='records', typ='series')
        tm.assert_numpy_array_equal(s.values, unser.values)
Example #9
0
 def test_w_date(date, date_unit=None):
     ts = Series(Timestamp(date), index=self.ts.index)
     ts.ix[1] = pd.NaT
     ts.ix[5] = pd.NaT
     if date_unit:
         json = ts.to_json(date_format='iso', date_unit=date_unit)
     else:
         json = ts.to_json(date_format='iso')
     result = read_json(json, typ='series')
     assert_series_equal(result, ts)
Example #10
0
    def test_datetime_tz(self):
        # GH4377 df.to_json segfaults with non-ndarray blocks
        tz_range = pd.date_range("20130101", periods=3, tz="US/Eastern")
        tz_naive = tz_range.tz_convert("utc").tz_localize(None)

        df = DataFrame({"A": tz_range, "B": pd.date_range("20130101", periods=3)})

        df_naive = df.copy()
        df_naive["A"] = tz_naive
        expected = df_naive.to_json()
        self.assertEqual(expected, df.to_json())

        stz = Series(tz_range)
        s_naive = Series(tz_naive)
        self.assertEqual(stz.to_json(), s_naive.to_json())
Example #11
0
    def test_typ(self):

        s = Series(lrange(6),
                   index=['a', 'b', 'c', 'd', 'e', 'f'],
                   dtype='int64')
        result = read_json(s.to_json(), typ=None)
        assert_series_equal(result, s)
Example #12
0
    def test_datetime_tz(self):
        # GH4377 df.to_json segfaults with non-ndarray blocks
        tz_range = pd.date_range('20130101', periods=3, tz='US/Eastern')
        tz_naive = tz_range.tz_convert('utc').tz_localize(None)

        df = DataFrame({
            'A': tz_range,
            'B': pd.date_range('20130101', periods=3)})

        df_naive = df.copy()
        df_naive['A'] = tz_naive
        expected = df_naive.to_json()
        self.assertEqual(expected, df.to_json())

        stz = Series(tz_range)
        s_naive = Series(tz_naive)
        self.assertEqual(stz.to_json(), s_naive.to_json())
Example #13
0
    def test_datetime_tz(self):
        # GH4377 df.to_json segfaults with non-ndarray blocks
        tz_range = pd.date_range('20130101', periods=3, tz='US/Eastern')
        tz_naive = tz_range.tz_convert('utc').tz_localize(None)

        df = DataFrame({
            'A': tz_range,
            'B': pd.date_range('20130101', periods=3)})

        df_naive = df.copy()
        df_naive['A'] = tz_naive
        expected = df_naive.to_json()
        self.assertEqual(expected, df.to_json())

        stz = Series(tz_range)
        s_naive = Series(tz_naive)
        self.assertEqual(stz.to_json(), s_naive.to_json())
Example #14
0
    def test_timedelta(self):
        from datetime import timedelta

        converter = lambda x: pd.to_timedelta(x, unit="ms")

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, "timedelta64[ns]")
        assert_series_equal(s, pd.read_json(s.to_json(), typ="series").apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, "timedelta64[ns]")
        assert_frame_equal(frame, pd.read_json(frame.to_json()).apply(converter))
Example #15
0
    def test_timedelta(self):
        from datetime import timedelta
        converter = lambda x: pd.to_timedelta(x, unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, 'timedelta64[ns]')
        assert_series_equal(
            s,
            pd.read_json(s.to_json(), typ='series').apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, 'timedelta64[ns]')
        assert_frame_equal(frame,
                           pd.read_json(frame.to_json()).apply(converter))
Example #16
0
    def test_timedelta(self):
        tm._skip_if_not_numpy17_friendly()

        from datetime import timedelta
        converter = lambda x: pd.to_timedelta(x,unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype,'timedelta64[ns]')
        assert_series_equal(s, pd.read_json(s.to_json(),typ='series').apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype,'timedelta64[ns]')
        assert_frame_equal(
            frame, pd.read_json(frame.to_json()).apply(converter))
Example #17
0
    def test_timedelta(self):
        if _np_version_under1p7:
            raise nose.SkipTest("numpy < 1.7")

        from datetime import timedelta
        converter = lambda x: pd.to_timedelta(x,unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype,'timedelta64[ns]')
        assert_series_equal(s, pd.read_json(s.to_json(),typ='series').apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype,'timedelta64[ns]')
        assert_frame_equal(
            frame, pd.read_json(frame.to_json()).apply(converter))
Example #18
0
    def test_date_format(self):

        df = self.tsframe.copy()
        df["date"] = Timestamp("20130101")
        df_orig = df.copy()

        json = df.to_json(date_format="iso")
        result = read_json(json)
        assert_frame_equal(result, df_orig)

        # make sure that we did in fact copy
        assert_frame_equal(df, df_orig)

        ts = Series(Timestamp("20130101"), index=self.ts.index)
        json = ts.to_json(date_format="iso")
        result = read_json(json, typ="series")
        assert_series_equal(result, ts)
Example #19
0
    def test_date_format(self):

        df = self.tsframe.copy()
        df['date'] = Timestamp('20130101')
        df_orig = df.copy()

        json = df.to_json(date_format='iso')
        result = read_json(json)
        assert_frame_equal(result, df_orig)

        # make sure that we did in fact copy
        assert_frame_equal(df, df_orig)

        ts = Series(Timestamp('20130101'), index=self.ts.index)
        json = ts.to_json(date_format='iso')
        result = read_json(json, typ='series')
        assert_series_equal(result, ts)
Example #20
0
    def test_date_format(self):
        
        df = self.tsframe.copy()
        df['date'] = Timestamp('20130101')
        df_orig = df.copy()

        json = df.to_json(date_format='iso')
        result = read_json(json)
        assert_frame_equal(result,df_orig)

        # make sure that we did in fact copy
        assert_frame_equal(df,df_orig)

        ts = Series(Timestamp('20130101'),index=self.ts.index)
        json = ts.to_json(date_format='iso')
        result = read_json(json,typ='series')
        assert_series_equal(result,ts)
Example #21
0
def perf_per_month(df: pd.Series) -> json:
    """Returns monthly performance of strategy

    Arguments:
        df -- Series of NAV with datetime as the index

    Returns:
        Monthly returns and datetime as the index
    """
    df = df.to_frame()
    df.index = pd.to_datetime(df.index, format="%Y-%m-%d %H:%M:%S").date
    df['eom'] = df.index + MonthEnd(0)
    df.drop_duplicates('eom', keep='last', inplace=True)
    df = df.loc[df.index == df['eom']]
    df['m_rets'] = df['nav'] / df['nav'].shift(1) - 1
    df.drop(columns=['eom', 'nav'], inplace=True)
    return df.to_json(orient='index')
Example #22
0
    def test_timedelta(self):
        if _np_version_under1p7:
            raise nose.SkipTest("numpy < 1.7")

        from datetime import timedelta
        converter = lambda x: pd.to_timedelta(x, unit='ms')

        s = Series([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(s.dtype, 'timedelta64[ns]')
        assert_series_equal(
            s,
            pd.read_json(s.to_json(), typ='series').apply(converter))

        frame = DataFrame([timedelta(23), timedelta(seconds=5)])
        self.assertEqual(frame[0].dtype, 'timedelta64[ns]')
        assert_frame_equal(frame,
                           pd.read_json(frame.to_json()).apply(converter))
def _json_hash_encode(row: Series) -> Series:
    """
    Take a DataFrame row, add serialized JSON and hash

    Parameters
    ----------
    row: Series
        a DataFrame row

    Returns
    -------
    Series
        the row with the json and hash columns added
    """
    json = row.to_json()
    row["Json"] = json
    row["Hash"] = xxhash.xxh64_hexdigest(json.encode("utf-8"))
    return row
    def mapRandomColors(series):
        unq = series.unique()
        key = "-".join(unq)
        if key in ColorPalette.alreadyMapped:
            return ColorPalette.alreadyMapped[key]

        n = len(unq)

        if n > len(THEME.colors_light):
            raise Exception(
                f"{n} : Number of Categories is greater than {len(THEME.colors_light)}."
            )

        colors = random.choice(THEME.colors_light, n, replace=False)
        s = Series(colors, index=unq, name="color")
        colorDict = json.loads(s.to_json(orient="index"))
        ColorPalette.alreadyMapped[key] = colorDict
        return ColorPalette.alreadyMapped[key]
Example #25
0
    def test_parse_dates(self):

        # frame
        df = self.tsframe.copy()
        df['date'] = Timestamp('20130101')

        json = df.to_json()
        result = read_json(json,parse_dates=True)
        assert_frame_equal(result,df)

        df['foo'] = 1.
        json = df.to_json()
        result = read_json(json,parse_dates=True)
        assert_frame_equal(result,df)

        # series
        ts = Series(Timestamp('20130101'),index=self.ts.index)
        json = ts.to_json()
        result = read_json(json,typ='series',parse_dates=True)
        assert_series_equal(result,ts)
Example #26
0
    def test_convert_dates(self):

        # frame
        df = self.tsframe.copy()
        df['date'] = Timestamp('20130101')

        json = df.to_json()
        result = read_json(json)
        assert_frame_equal(result, df)

        df['foo'] = 1.
        json = df.to_json(date_unit='ns')
        result = read_json(json, convert_dates=False)
        expected = df.copy()
        expected['date'] = expected['date'].values.view('i8')
        expected['foo'] = expected['foo'].astype('int64')
        assert_frame_equal(result, expected)

        # series
        ts = Series(Timestamp('20130101'), index=self.ts.index)
        json = ts.to_json()
        result = read_json(json, typ='series')
        assert_series_equal(result, ts)
Example #27
0
    def test_convert_dates(self):

        # frame
        df = self.tsframe.copy()
        df['date'] = Timestamp('20130101')

        json = df.to_json()
        result = read_json(json)
        assert_frame_equal(result, df)

        df['foo'] = 1.
        json = df.to_json(date_unit='ns')
        result = read_json(json, convert_dates=False)
        expected = df.copy()
        expected['date'] = expected['date'].values.view('i8')
        expected['foo'] = expected['foo'].astype('int64')
        assert_frame_equal(result, expected)

        # series
        ts = Series(Timestamp('20130101'), index=self.ts.index)
        json = ts.to_json()
        result = read_json(json, typ='series')
        assert_series_equal(result, ts)
Example #28
0
    def test_convert_dates(self):

        # frame
        df = self.tsframe.copy()
        df["date"] = Timestamp("20130101")

        json = df.to_json()
        result = read_json(json)
        assert_frame_equal(result, df)

        df["foo"] = 1.0
        json = df.to_json(date_unit="ns")
        result = read_json(json, convert_dates=False)
        expected = df.copy()
        expected["date"] = expected["date"].values.view("i8")
        expected["foo"] = expected["foo"].astype("int64")
        assert_frame_equal(result, expected)

        # series
        ts = Series(Timestamp("20130101"), index=self.ts.index)
        json = ts.to_json()
        result = read_json(json, typ="series")
        assert_series_equal(result, ts)
 def _get_row_hash(row: pd.Series,) -> str:
     """Get the SHA256 hash for the json equivalent of a pandas record"""
     row_bytes = row.to_json(orient='records').encode()
     digest = hashlib.sha256(row_bytes).hexdigest()
     return digest
Example #30
0
    def test_typ(self):

        s = Series(lrange(6), index=['a','b','c','d','e','f'], dtype='int64')
        result = read_json(s.to_json(),typ=None)
        assert_series_equal(result,s)
Example #31
0
 def test_series_from_json_precise_float(self):
     s = Series([4.56, 4.56, 4.56])
     result = read_json(s.to_json(), typ='series', precise_float=True)
     assert_series_equal(result, s)
Example #32
0
    async def run(
        self,
        symbol: str,
        position: int,
        minute_history: df,
        now: datetime,
        portfolio_value: float = None,
        trading_api: tradeapi = None,
        debug: bool = False,
        backtesting: bool = False,
    ) -> Tuple[bool, Dict]:
        data = minute_history.iloc[-1]
        prev_minute = minute_history.iloc[-2]
        if await self.is_buy_time(now) and not position:
            # Check for buy signals
            lbound = config.market_open
            ubound = lbound + timedelta(minutes=15)
            try:
                high_15m = minute_history[lbound:ubound]["high"].max(
                )  # type: ignore

                if data.vwap < high_15m:
                    return False, {}
            except Exception as e:
                # Because we're aggregating on the fly, sometimes the datetime
                # index can get messy until it's healed by the minute bars
                tlog(
                    f"[{self.name}] error aggregation {e} - maybe should use nearest?"
                )
                return False, {}

            back_time = ts(config.market_open)
            back_time_index = minute_history["close"].index.get_loc(
                back_time, method="nearest")
            close = (minute_history["close"]
                     [back_time_index:-1].dropna().between_time(
                         "9:30", "16:00").resample("5min").last()).dropna()
            open = (minute_history["open"]
                    [back_time_index:-1].dropna().between_time(
                        "9:30", "16:00").resample("5min").first()).dropna()
            high = (minute_history["high"]
                    [back_time_index:-1].dropna().between_time(
                        "9:30", "16:00").resample("5min").max()).dropna()
            low = (minute_history["low"]
                   [back_time_index:-1].dropna().between_time(
                       "9:30", "16:00").resample("5min").min()).dropna()
            volume = (minute_history["volume"]
                      [back_time_index:-1].dropna().between_time(
                          "9:30", "16:00").resample("5min").sum()).dropna()

            _df = concat(
                [
                    open.rename("open"),
                    high.rename("high"),
                    low.rename("low"),
                    close.rename("close"),
                    volume.rename("volume"),
                ],
                axis=1,
            )

            if not add_daily_vwap(_df):
                tlog(f"[{now}]failed add_daily_vwap")
                return False, {}

            if debug:
                tlog(
                    f"\n[{now}]{symbol} {tabulate(_df[-10:], headers='keys', tablefmt='psql')}"
                )
            vwap_series = _df["average"]

            if (
                    # data.vwap > close_series[-1] > close_series[-2]
                    # and round(data.average, 2) > round(vwap_series[-1], 2)
                    # and data.vwap > data.average
                    # and
                    data.low > data.average
                    and close[-1] > vwap_series[-1] > vwap_series[-2] > low[-2]
                    and close[-1] > high[-2]
                    and prev_minute.close > prev_minute.open
                    and data.close > data.open
                    and low[-2] < vwap_series[-2] - 0.2):
                stop_price = find_stop(
                    data.close if not data.vwap else data.vwap,
                    minute_history,
                    now,
                )
                # upperband, middleband, lowerband = BBANDS(
                #    minute_history["close"], timeperiod=20
                # )

                # stop_price = min(
                #    prev_minute.close,
                #    data.average - 0.01,
                #    lowerband[-1] - 0.03,
                # )
                target = (3 * (data.close - stop_price) + data.close
                          )  # upperband[-1]

                # if target - stop_price < 0.05:
                #    tlog(
                #        f"{symbol} target price {target} too close to stop price {stop_price}"
                #    )
                #    return False, {}
                # if target - data.close < 0.05:
                #    tlog(
                #        f"{symbol} target price {target} too close to close price {data.close}"
                #    )
                #    return False, {}

                stop_prices[symbol] = stop_price
                target_prices[symbol] = target

                patterns: Dict[ts, Dict[int, List[str]]] = {}
                pattern_functions = talib.get_function_groups(
                )["Pattern Recognition"]
                for pattern in pattern_functions:
                    pattern_value = getattr(talib, pattern)(open, high, low,
                                                            close)
                    result = pattern_value.to_numpy().nonzero()
                    if result[0].size > 0:
                        for timestamp, value in pattern_value.iloc[
                                result].items():
                            t = ts(timestamp)
                            if t not in patterns:
                                patterns[t] = {}
                            if value not in patterns[t]:
                                patterns[t][value] = [pattern]
                            else:
                                patterns[t][value].append(pattern)

                tlog(f"{symbol} found conditions for VWAP strategy now:{now}")
                candle_s = Series(patterns)
                candle_s = candle_s.sort_index()

                tlog(f"{symbol} 5-min VWAP {vwap_series}")
                tlog(f"{symbol} 5-min close values {close}")
                tlog(f"{symbol} {candle_s}")
                tlog(
                    f"\n{tabulate(minute_history[-10:], headers='keys', tablefmt='psql')}"
                )

                if candle_s.size > 0 and -100 in candle_s[-1]:
                    tlog(
                        f"{symbol} Bullish pattern does not exists -> should skip"
                    )
                    # return False, {}

                if portfolio_value is None:
                    if trading_api:
                        portfolio_value = float(
                            trading_api.get_account().portfolio_value)
                    else:
                        raise Exception(
                            "VWAPLong.run(): both portfolio_value and trading_api can't be None"
                        )

                shares_to_buy = (
                    portfolio_value * 20.0 * config.risk // data.close
                    # // (data.close - stop_prices[symbol])
                )
                print(
                    f"shares to buy {shares_to_buy} {data.close} {stop_prices[symbol]}"
                )
                if not shares_to_buy:
                    shares_to_buy = 1
                shares_to_buy -= position

                if shares_to_buy > 0:
                    tlog(
                        f"[{self.name}] Submitting buy for {shares_to_buy} shares of {symbol} at {data.close} target {target_prices[symbol]} stop {stop_prices[symbol]}"
                    )
                    buy_indicators[symbol] = {
                        # bbrand_lower": lowerband[-5:].tolist(),
                        # "bbrand_middle": middleband[-5:].tolist(),
                        # "bbrand_upper": upperband[-5:].tolist(),
                        "average": round(data.average, 2),
                        "vwap": round(data.vwap, 2),
                        "patterns": candle_s.to_json(),
                    }

                    return (
                        True,
                        {
                            "side": "buy",
                            "qty": str(shares_to_buy),
                            "type": "limit",
                            "limit_price": str(data.close),
                        },
                    )
            elif debug:
                tlog(f"[{now}]{symbol} failed vwap strategy")
                if not (data.low > data.average):
                    tlog(
                        f"[{now}]{symbol} failed data.low {data.low} > data.average {data.average}"
                    )
                if not (close[-1] > vwap_series[-1] > vwap_series[-2] >
                        low[-2]):
                    tlog(
                        f"[{now}]{symbol} failed close[-1] {close[-1]} > vwap_series[-1] {vwap_series[-1]} > vwap_series[-2]{ vwap_series[-2]} > low[-2] {low[-2]}"
                    )
                if not (prev_minute.close > prev_minute.open):
                    tlog(
                        f"[{now}]{symbol} failed prev_minute.close {prev_minute.close} > prev_minute.open {prev_minute.open}"
                    )
                if not (close[-1] > high[-2]):
                    tlog(
                        f"[{now}]{symbol} failed close[-1] {close[-1]} > high[-2] {high[-2]}"
                    )
                if not (data.close > data.open):
                    tlog(
                        f"[{now}]{symbol} failed data.close {data.close} > data.open {data.open}"
                    )
                if not low[-2] < vwap_series[-2] - 0.2:
                    tlog(
                        f"[{now}]{symbol} failed low[-2] {low[-2]} < vwap_series[-2] {vwap_series[-2] } - 0.2"
                    )

        elif (await super().is_sell_time(now) and position > 0
              and symbol in latest_cost_basis
              and last_used_strategy[symbol].name == self.name):
            if open_orders.get(symbol) is not None:
                tlog(f"vwap_long: open order for {symbol} exists, skipping")
                return False, {}

            if data.vwap <= data.average - 0.02:
                sell_indicators[symbol] = {
                    "reason": "below VWAP",
                    "average": data.average,
                    "vwap": data.vwap,
                }
                return (
                    True,
                    {
                        "side": "sell",
                        "qty": str(position),
                        "type": "market"
                    },
                )

        return False, {}
def assert_series_and_dict_equal(left: pd.Series, right: dict):
    left_json = left.to_json(orient="index")
    left_dict = json.loads(left_json)
    assert left_dict == pytest.approx(right)
    async def run(
        self,
        symbol: str,
        position: int,
        minute_history: df,
        now: datetime,
        portfolio_value: float = None,
        trading_api: tradeapi = None,
        debug: bool = False,
        backtesting: bool = False,
    ) -> Tuple[bool, Dict]:
        data = minute_history.iloc[-1]
        prev_min = minute_history.iloc[-2]

        morning_rush = (True if (now - config.market_open).seconds // 60 < 30
                        else False)

        if (await super().is_buy_time(now) and not position
                and not await self.should_cool_down(symbol, now)):
            # Check for buy signals
            lbound = config.market_open
            ubound = lbound + timedelta(minutes=15)

            if debug:
                tlog(f"15 schedule {lbound}/{ubound}")
            try:
                high_15m = minute_history[lbound:ubound]["high"].max(
                )  # type: ignore
                if debug:
                    tlog(f"{minute_history[lbound:ubound]}")  # type: ignore
            except Exception as e:
                # Because we're aggregating on the fly, sometimes the datetime
                # index can get messy until it's healed by the minute bars
                tlog(
                    f"[{self.name}] error aggregation {e} - maybe should use nearest?"
                )
                return False, {}

            if debug:
                tlog(f"15 minutes high:{high_15m}")

            # Get the change since yesterday's market close
            if data.close > high_15m or (
                    hasattr(config, "bypass_market_schedule")
                    and config.bypass_market_schedule
            ):  # and volume_today[symbol] > 30000:
                if debug:
                    tlog(
                        f"[{now}]{symbol} {data.close} above 15 minute high {high_15m}"
                    )

                last_30_max_close = minute_history[-30:]["close"].max()
                last_30_min_close = minute_history[-30:]["close"].min()

                if ((now - config.market_open).seconds // 60 > 90 and
                    (last_30_max_close - last_30_min_close) / last_30_min_close
                        > 0.1
                        and (not hasattr(config, "bypass_market_schedule")
                             or not config.bypass_market_schedule)):
                    tlog(
                        f"[{self.name}][{now}] too sharp {symbol} increase in last 30 minutes, can't trust MACD, cool down for 15 minutes"
                    )
                    cool_down[symbol] = now.replace(
                        second=0, microsecond=0) + timedelta(minutes=15)
                    return False, {}

                serie = (minute_history["close"].dropna().between_time(
                    "9:30", "16:00"))

                if data.vwap:
                    serie[-1] = data.vwap
                macds = MACD(serie)
                sell_macds = MACD(serie, 13, 21)
                macd1 = macds[0]
                macd_signal = macds[1]
                round_factor = (2 if macd1[-1] >= 0.1 or macd_signal[-1] >= 0.1
                                else 3)

                minute_shift = 0 if morning_rush or debug else -1

                if debug:
                    if macd1[-1 + minute_shift].round(round_factor) > 0:
                        tlog(f"[{now}]{symbol} MACD > 0")
                    if (macd1[-3 + minute_shift].round(round_factor) <
                            macd1[-2 + minute_shift].round(round_factor) <
                            macd1[-1 + minute_shift].round(round_factor)):
                        tlog(f"[{now}]{symbol} MACD trending")
                    else:
                        tlog(
                            f"[{now}]{symbol} MACD NOT trending -> failed {macd1[-3 + minute_shift].round(round_factor)} {macd1[-2 + minute_shift].round(round_factor)} {macd1[-1 + minute_shift].round(round_factor)}"
                        )
                    if (macd1[-1 + minute_shift] >
                            macd_signal[-1 + minute_shift]):
                        tlog(f"[{now}]{symbol} MACD above signal")
                    else:
                        tlog(f"[{now}]{symbol} MACD BELOW signal -> failed")
                    if data.close >= data.open:
                        tlog(f"[{now}]{symbol} above open")
                    else:
                        tlog(
                            f"[{now}]{symbol} close {data.close} BELOW open {data.open} -> failed"
                        )
                if (macd1[-1 + minute_shift].round(round_factor) > 0
                        and macd1[-3 + minute_shift].round(round_factor) <
                        macd1[-2 + minute_shift].round(round_factor) <
                        macd1[-1 + minute_shift].round(round_factor)
                        and macd1[-1 + minute_shift] >
                        macd_signal[-1 + minute_shift]
                        and sell_macds[0][-1 + minute_shift] > 0
                        and data.vwap > data.open
                        and data.close > prev_min.close
                        and data.close > data.open):
                    if symbol in voi and voi[symbol][-1] < 0:
                        tlog(
                            f"[{self.name}][{now}] Don't buy {symbol} on negative voi {voi[symbol]}"
                        )
                        return False, {}
                    if symbol in voi and voi[symbol][-1] < voi[symbol][-2]:
                        tlog(
                            f"[{self.name}][{now}] Don't buy {symbol} if voi not trending up {voi[symbol]}"
                        )
                        return False, {}

                    if symbol in voi:
                        tlog(
                            f"[{self.name}][{now}] {symbol} voi {voi[symbol]}")
                    tlog(
                        f"[{self.name}][{now}] MACD(12,26) for {symbol} trending up!, MACD(13,21) trending up and above signals"
                    )

                    if False:  # not morning_rush:
                        back_time = ts(config.market_open)
                        back_time_index = minute_history[
                            "close"].index.get_loc(back_time, method="nearest")
                        close = (
                            minute_history["close"]
                            [back_time_index:].dropna().between_time(
                                "9:30",
                                "16:00").resample("5min").last()).dropna()
                        open = (
                            minute_history["open"]
                            [back_time_index:].dropna().between_time(
                                "9:30",
                                "16:00").resample("5min").first()).dropna()
                        if close[-1] > open[-1]:
                            tlog(
                                f"[{now}] {symbol} confirmed 5-min candle bull"
                            )

                            if debug:
                                tlog(
                                    f"[{now}] {symbol} open={open[-5:]} close={close[-5:]}"
                                )
                        else:
                            tlog(
                                f"[{now}] {symbol} did not confirm 5-min candle bull {close[-5:]} {open[-5:]}"
                            )
                            return False, {}

                    macd2 = MACD(serie, 40, 60)[0]
                    # await asyncio.sleep(0)
                    if (macd2[-1 + minute_shift] >= 0
                            and np.diff(macd2)[-1 + minute_shift] >= 0):
                        tlog(
                            f"[{self.name}][{now}] MACD(40,60) for {symbol} trending up!"
                        )
                        # check RSI does not indicate overbought
                        rsi = RSI(serie, 14)

                        if not (rsi[-1 + minute_shift] > rsi[-2 + minute_shift]
                                > rsi[-3 + minute_shift]):
                            tlog(
                                f"[{self.name}][{now}] {symbol} RSI counter MACD trend ({rsi[-1+minute_shift]},{rsi[-2+minute_shift]},{rsi[-3+minute_shift]})"
                            )
                            # return False, {}

                        # await asyncio.sleep(0)
                        tlog(
                            f"[{self.name}][{now}] {symbol} RSI={round(rsi[-1+minute_shift], 2)}"
                        )

                        rsi_limit = 71 if not morning_rush else 80
                        if rsi[-1 + minute_shift] <= rsi_limit:
                            tlog(
                                f"[{self.name}][{now}] {symbol} RSI {round(rsi[-1+minute_shift], 2)} <= {rsi_limit}"
                            )

                            enforce_resistance = (
                                False  # (True if not morning_rush else False)
                            )

                            if enforce_resistance:
                                resistances = await find_resistances(
                                    symbol,
                                    self.name,
                                    min(
                                        data.low, prev_min.close
                                    ),  # data.close if not data.vwap else data.vwap,
                                    minute_history,
                                    debug,
                                )

                                supports = await find_supports(
                                    symbol,
                                    self.name,
                                    min(
                                        data.low, prev_min.close
                                    ),  # data.close if not data.vwap else data.vwap,
                                    minute_history,
                                    debug,
                                )
                                if resistances is None or resistances == []:
                                    tlog(
                                        f"[{self.name}] no resistance for {symbol} -> skip buy"
                                    )
                                    cool_down[symbol] = now.replace(
                                        second=0, microsecond=0)
                                    return False, {}
                                if supports is None or supports == []:
                                    tlog(
                                        f"[{self.name}] no supports for {symbol} -> skip buy"
                                    )
                                    cool_down[symbol] = now.replace(
                                        second=0, microsecond=0)
                                    return False, {}

                                next_resistance = None
                                for potential_resistance in resistances:
                                    if potential_resistance > data.close:
                                        next_resistance = potential_resistance
                                        break

                                if not next_resistance:
                                    tlog(
                                        f"[{self.name}] did not find resistance above {data.close}"
                                    )
                                    return False, {}

                                if next_resistance - data.close < 0.05:
                                    tlog(
                                        f"[{self.name}] {symbol} at price {data.close} too close to resistance {next_resistance}"
                                    )
                                    return False, {}
                                if data.close - supports[-1] < 0.05:
                                    tlog(
                                        f"[{self.name}] {symbol} at price {data.close} too close to support {supports[-1]} -> trend not established yet"
                                    )
                                    return False, {}
                                if (next_resistance - data.close) / (
                                        data.close - supports[-1]) < 0.8:
                                    tlog(
                                        f"[{self.name}] {symbol} at price {data.close} missed entry point between support {supports[-1]} and resistance {next_resistance}"
                                    )
                                    cool_down[symbol] = now.replace(
                                        second=0, microsecond=0)
                                    return False, {}

                                tlog(
                                    f"[{self.name}] {symbol} at price {data.close} found entry point between support {supports[-1]} and resistance {next_resistance}"
                                )
                                # Stock has passed all checks; figure out how much to buy
                                stop_price = find_stop(
                                    data.close if not data.vwap else data.vwap,
                                    minute_history,
                                    now,
                                )
                                stop_prices[symbol] = min(
                                    stop_price, supports[-1] - 0.05)
                                target_prices[symbol] = (
                                    data.close +
                                    (data.close - stop_prices[symbol]) * 2)
                                symbol_resistance[symbol] = next_resistance

                                if next_resistance - data.vwap < 0.05:
                                    tlog(
                                        f"[{self.name}] {symbol} at price {data.close} too close to resistance {next_resistance}"
                                    )
                                    return False, {}
                                # if data.vwap - support < 0.05:
                                #    tlog(
                                #        f"[{self.name}] {symbol} at price {data.close} too close to support {support} -> trend not established yet"
                                #    )
                                #    return False, {}
                                if (next_resistance - data.vwap) / (
                                        data.vwap - stop_prices[symbol]) < 0.8:
                                    tlog(
                                        f"[{self.name}] {symbol} at price {data.close} missed entry point between support {stop_prices[symbol] } and resistance {next_resistance}"
                                    )
                                    cool_down[symbol] = now.replace(
                                        second=0, microsecond=0)
                                    return False, {}

                                tlog(
                                    f"[{self.name}] {symbol} at price {data.close} found entry point between support {stop_prices[symbol]} and resistance {next_resistance}"
                                )

                                resistance = next_resistance
                                support = target_prices[symbol]
                            else:
                                stop_price = find_stop(
                                    data.close if not data.vwap else data.vwap,
                                    minute_history,
                                    now,
                                )
                                target_price = (3 * (data.close - stop_price) +
                                                data.close)
                                target_prices[symbol] = target_price
                                stop_prices[symbol] = stop_price
                                resistance = target_price
                                support = stop_price
                                symbol_resistance[symbol] = target_price

                            if portfolio_value is None:
                                if trading_api:
                                    portfolio_value = float(
                                        trading_api.get_account(
                                        ).portfolio_value)
                                else:
                                    raise Exception(
                                        "MomentumLong.run(): both portfolio_value and trading_api can't be None"
                                    )

                            shares_to_buy = (
                                portfolio_value * config.risk //
                                (data.close - stop_prices[symbol]))
                            if not shares_to_buy:
                                shares_to_buy = 1
                            shares_to_buy -= position
                            if shares_to_buy > 0:
                                buy_price = max(data.close, data.vwap)
                                tlog(
                                    f"[{self.name}] Submitting buy for {shares_to_buy} shares of {symbol} at {buy_price} target {target_prices[symbol]} stop {stop_prices[symbol]}"
                                )

                                # await asyncio.sleep(0)
                                buy_indicators[symbol] = {
                                    "rsi":
                                    rsi[-1 + minute_shift].tolist(),
                                    "macd":
                                    macd1[-5 + minute_shift:].tolist(),
                                    "macd_signal":
                                    macd_signal[-5 + minute_shift:].tolist(),
                                    "slow macd":
                                    macd2[-5 + minute_shift:].tolist(),
                                    "sell_macd":
                                    sell_macds[0][-5 + minute_shift:].tolist(),
                                    "sell_macd_signal":
                                    sell_macds[1][-5 + minute_shift:].tolist(),
                                    "resistances": [resistance],
                                    "supports": [support],
                                    "vwap":
                                    data.vwap,
                                    "avg":
                                    data.average,
                                    "position_ratio":
                                    str(
                                        round(
                                            (resistance - data.vwap) /
                                            (data.vwap - support),
                                            2,
                                        )),
                                }
                                if symbol in voi:
                                    buy_indicators[symbol]["voi"] = voi[symbol]

                                return (
                                    True,
                                    {
                                        "side": "buy",
                                        "qty": str(shares_to_buy),
                                        "type": "limit",
                                        "limit_price": str(buy_price),
                                    } if not morning_rush else {
                                        "side": "buy",
                                        "qty": str(shares_to_buy),
                                        "type": "market",
                                    },
                                )

                    else:
                        tlog(f"[{self.name}] failed MACD(40,60) for {symbol}!")

        if (await super().is_sell_time(now) and position > 0
                and symbol in latest_cost_basis
                and last_used_strategy[symbol].name == self.name):
            if open_orders.get(symbol) is not None:
                tlog(
                    f"momentum_long: open order for {symbol} exists, skipping")
                return False, {}

            # Check for liquidation signals
            # Sell for a loss if it's fallen below our stop price
            # Sell for a loss if it's below our cost basis and MACD < 0
            # Sell for a profit if it's above our target price
            macds = MACD(
                minute_history["close"].dropna().between_time("9:30", "16:00"),
                13,
                21,
            )
            # await asyncio.sleep(0)
            macd = macds[0]
            macd_signal = macds[1]
            rsi = RSI(
                minute_history["close"].dropna().between_time("9:30", "16:00"),
                14,
            )
            movement = (data.close - latest_scalp_basis[symbol]
                        ) / latest_scalp_basis[symbol]
            macd_val = macd[-1]
            macd_signal_val = macd_signal[-1]

            round_factor = (2 if macd_val >= 0.1 or macd_signal_val >= 0.1 else
                            3)
            # await asyncio.sleep(0)
            if (symbol_resistance and symbol in symbol_resistance
                    and symbol_resistance[symbol]):
                scalp_threshold = (symbol_resistance[symbol] +
                                   latest_scalp_basis[symbol]) / 2.0
            else:
                scalp_threshold = (target_prices[symbol] +
                                   latest_scalp_basis[symbol]) / 2.0
            bail_threshold = (latest_scalp_basis[symbol] +
                              scalp_threshold) / 2.0
            macd_below_signal = round(macd_val, round_factor) < round(
                macd_signal_val, round_factor)
            open_rush = (False if (now - config.market_open).seconds // 60 > 45
                         else True)
            bail_out = (
                # movement > min(0.02, movement_threshold) and macd_below_signal
                (movement > 0.01 or data.vwap > bail_threshold
                 )  # or open_rush)
                and macd_below_signal and
                round(macd[-1], round_factor) < round(macd[-2], round_factor))
            bail_on_rsi = (movement > 0.01
                           or data.vwap > bail_threshold) and rsi[-2] < rsi[-3]

            if debug and not bail_out:
                tlog(
                    f"[{now}]{symbol} don't bail: data.vwap={data.vwap} bail_threshold={bail_threshold} macd_below_signal={macd_below_signal} macd[-1]={ macd[-1]} macd[-2]={macd[-2]}"
                )

            scalp = (movement > 0.02 or data.vwap > scalp_threshold) and (
                symbol not in voi
                or voi[symbol][-1] < voi[symbol][-2] < voi[symbol][-3])
            below_cost_base = data.vwap < latest_cost_basis[symbol]
            rsi_limit = 79 if not morning_rush else 85
            to_sell = False
            partial_sell = False
            limit_sell = False
            sell_reasons = []
            if data.close <= stop_prices[symbol]:
                to_sell = True
                sell_reasons.append("stopped")
            elif (below_cost_base and round(macd_val, 2) < 0
                  and rsi[-1] < rsi[-2]
                  and round(macd[-1], 2) < round(macd[-2], 2)):
                to_sell = True
                sell_reasons.append(
                    "below cost & macd negative & RSI trending down")
            elif data.close >= target_prices[symbol] and macd[-1] <= 0:
                to_sell = True
                sell_reasons.append("above target & macd negative")
            elif rsi[-1] >= rsi_limit:
                to_sell = True
                sell_reasons.append("rsi max, cool-down for 5 minutes")
                cool_down[symbol] = now.replace(
                    second=0, microsecond=0) + timedelta(minutes=5)
            elif bail_out:
                to_sell = True
                sell_reasons.append("bail")
            elif bail_on_rsi:
                to_sell = True
                sell_reasons.append("bail_on_rsi")
            elif scalp:
                partial_sell = True
                to_sell = True
                sell_reasons.append("scale-out")
            elif (symbol in voi and voi[symbol][-1] < 0
                  and voi[symbol][-1] < voi[symbol][-2] < voi[symbol][-3]):
                tlog(f"[{now}] {symbol} bail-on-voi identified but not acted")
                # to_sell = True
                # sell_reasons.append("bail on voi")
                # limit_sell = True

            # Check patterns
            if debug:
                tlog(
                    f"[{now}] {symbol} min-2 = {minute_history.iloc[-2].open} {minute_history.iloc[-2].high}, {minute_history.iloc[-2].low}, {minute_history.iloc[-2].close}"
                )

            if self.check_patterns:
                if (now - buy_time[symbol] > timedelta(minutes=1)
                        and gravestone_doji(
                            prev_min.open,
                            prev_min.high,
                            prev_min.low,
                            prev_min.close,
                        ) and data.close < data.open and data.vwap < data.open
                        and prev_min.close > latest_cost_basis[symbol]):
                    tlog(
                        f"[{now}]{symbol} identified gravestone doji {prev_min.open, prev_min.high, prev_min.low, prev_min.close}"
                    )
                    to_sell = True
                    partial_sell = False
                    sell_reasons.append("gravestone_doji")

                elif (now - buy_time[symbol] > timedelta(minutes=2)
                      and spinning_top_bearish_followup(
                          (
                              minute_history.iloc[-3].open,
                              minute_history.iloc[-3].high,
                              minute_history.iloc[-3].low,
                              minute_history.iloc[-3].close,
                          ),
                          (
                              minute_history.iloc[-2].open,
                              minute_history.iloc[-2].high,
                              minute_history.iloc[-2].low,
                              minute_history.iloc[-2].close,
                          ),
                      ) and data.vwap < data.open):
                    tlog(
                        f"[{now}] {symbol} identified bullish spinning top followed by bearish candle {(minute_history.iloc[-3].open, minute_history.iloc[-3].high,minute_history.iloc[-3].low, minute_history.iloc[-3].close), (minute_history.iloc[-2].open, minute_history.iloc[-2].high, minute_history.iloc[-2].low, minute_history.iloc[-2].close)}"
                    )
                    to_sell = True
                    partial_sell = False
                    sell_reasons.append("bull_spinning_top_bearish_followup")

                elif (now - buy_time[symbol] > timedelta(minutes=2)
                      and bullish_candle_followed_by_dragonfly(
                          (
                              minute_history.iloc[-3].open,
                              minute_history.iloc[-3].high,
                              minute_history.iloc[-3].low,
                              minute_history.iloc[-3].close,
                          ),
                          (
                              minute_history.iloc[-2].open,
                              minute_history.iloc[-2].high,
                              minute_history.iloc[-2].low,
                              minute_history.iloc[-2].close,
                          ),
                      ) and data.vwap < data.open):
                    tlog(
                        f"[{now}] {symbol} identified bullish candle followed by dragonfly candle {(minute_history.iloc[-3].open, minute_history.iloc[-3].high,minute_history.iloc[-3].low, minute_history.iloc[-3].close), (minute_history.iloc[-2].open, minute_history.iloc[-2].high, minute_history.iloc[-2].low, minute_history.iloc[-2].close)}"
                    )
                    to_sell = True
                    partial_sell = False
                    sell_reasons.append("bullish_candle_followed_by_dragonfly")
                elif (now - buy_time[symbol] > timedelta(minutes=2)
                      and morning_rush and bearish_candle(
                          minute_history.iloc[-3].open,
                          minute_history.iloc[-3].high,
                          minute_history.iloc[-3].low,
                          minute_history.iloc[-3].close,
                      ) and bearish_candle(
                          minute_history.iloc[-2].open,
                          minute_history.iloc[-2].high,
                          minute_history.iloc[-2].low,
                          minute_history.iloc[-2].close,
                      ) and minute_history.iloc[-2].close <
                      minute_history.iloc[-3].close):
                    tlog(
                        f"[{now}] {symbol} identified two consequtive bullish candles during morning rush{(minute_history.iloc[-3].open, minute_history.iloc[-3].high, minute_history.iloc[-3].low, minute_history.iloc[-3].close), (minute_history.iloc[-2].open, minute_history.iloc[-2].high, minute_history.iloc[-2].low, minute_history.iloc[-2].close)}"
                    )
                    # to_sell = True
                    # partial_sell = False
                    # sell_reasons.append("two_bears_in_the_morning")

            if to_sell:
                close = minute_history["close"][-10:].dropna()
                open = minute_history["open"][-10:].dropna()
                high = minute_history["high"][-10:].dropna()
                low = minute_history["low"][-10:].dropna()

                patterns: Dict[ts, Dict[int, List[str]]] = {}
                pattern_functions = talib.get_function_groups(
                )["Pattern Recognition"]
                for pattern in pattern_functions:
                    pattern_value = getattr(talib, pattern)(open, high, low,
                                                            close)
                    result = pattern_value.to_numpy().nonzero()
                    if result[0].size > 0:
                        for timestamp, value in pattern_value.iloc[
                                result].items():
                            t = ts(timestamp)
                            if t not in patterns:
                                patterns[t] = {}
                            if value not in patterns[t]:
                                patterns[t][value] = [pattern]
                            else:
                                patterns[t][value].append(pattern)
                candle_s = Series(patterns)
                candle_s = candle_s.sort_index()

                sell_indicators[symbol] = {
                    "rsi": rsi[-3:].tolist(),
                    "movement": movement,
                    "sell_macd": macd[-5:].tolist(),
                    "sell_macd_signal": macd_signal[-5:].tolist(),
                    "vwap": data.vwap,
                    "avg": data.average,
                    "reasons":
                    " AND ".join([str(elem) for elem in sell_reasons]),
                    "patterns":
                    candle_s.to_json() if candle_s.size > 0 else None,
                }

                if symbol in voi:
                    sell_indicators[symbol]["voi"] = voi[symbol]

                if not partial_sell:

                    if not limit_sell:
                        tlog(
                            f"[{self.name}] Submitting sell for {position} shares of {symbol} at market"
                        )
                        return (
                            True,
                            {
                                "side": "sell",
                                "qty": str(position),
                                "type": "market",
                            },
                        )
                    else:
                        tlog(
                            f"[{self.name}] Submitting sell for {position} shares of {symbol} at {data.close}"
                        )
                        return (
                            True,
                            {
                                "side": "sell",
                                "qty": str(position),
                                "type": "limit",
                                "limit_price": str(data.close),
                            },
                        )
                else:
                    qty = int(position / 2) if position > 1 else 1
                    tlog(
                        f"[{self.name}] Submitting sell for {str(qty)} shares of {symbol} at limit of {data.close}"
                    )
                    return (
                        True,
                        {
                            "side": "sell",
                            "qty": str(qty),
                            "type": "limit",
                            "limit_price": str(data.close),
                        },
                    )

        return False, {}
Example #35
0
 def test_series_from_json_precise_float(self):
     s = Series([4.56, 4.56, 4.56])
     result = read_json(s.to_json(), typ='series', precise_float=True)
     assert_series_equal(result, s, check_index_type=False)
Example #36
0
 def test_series_from_json_precise_float(self):
     s = Series([4.56, 4.56, 4.56])
     result = read_json(s.to_json(), typ="series", precise_float=True)
     assert_series_equal(result, s, check_index_type=False)
Example #37
0
class GwSeries:
    """ 
    Groundwater heads time series management

    Methods
    -------
    from_dinogws(filepath)
        read heads series from dinoloket csv file

    from_json(filepath)
        read heads series from json file

    to_json(filepath)
        read heads series from json file

    to_csv(filepath)
        read heads series from json file

    heads(ref,freq)
        return timeseries with measured heads

    name()
        return heads series name

    locprops(minimal)
        return location properties, optional minimal=True

    tubeprops(last)
        return tube properties, optinal only last row (last=True)

    stats(ref)
        return descriptice statistics

    describe()
        return selection of properties and descriptive statistics

    gxg()
        return tabel with gxg (desciptive statistics for groundwater 
        series used in the Netherlands)

    Examples
    --------
    To create a GwSeries object from file:  
    >>>gw = GwSeries.from_dinogws(<filepath to dinocsv file>)  
    >>>gw = GwSeries.from_json(<filepath to acequia json file>)  

    To get GwSeries properties:  
    >>>GwSeries.heads()  
    >>>GwSeries.locprops()  
    >>>GwSeries.name()  
    >>>GwSeries.heads1428()  

    To export GwSeries data:  
    >>>GwSeries.to_csv(<filename>)  
    >>>GwSeries.To_json(<filename>)  

    Notes
    -----
    Head measurements are stored in meters relatieve to welltopStores
    and served in several units: mwelltop,mref,msurfacelevel.

    Valid row names for locprops and column names for tubeprops are
    stored in class variables locprops_names and tubeprops_names:
    >>> print(acequia.GwSeries.locprops_names)
    >>> print(acequia.GwSeries.tubeprops_names)
    """
    _headprops_names = [
        'headdatetime','headmp','headnote','remarks'
        ]

    _locprops_names = [
        'locname','filname','alias','xcr','ycr','height_datum',
        'grid_reference'
        ]

    _locprops_minimal = [
        'locname','filname','alias','xcr','ycr'
        ]

    _tubeprops_names = [
        'startdate','mplevel','filtop','filbot','surfacedate',
        'surfacelevel'
        ]

    _tubeprops_minimal = [
        'mplevel','surfacelevel','filbot',
        ]

    _tubeprops_numcols = [
        'mplevel','surfacelevel','filtop','filbot'
        ]

    _reflevels = [
        'datum','surface','mp',
        ]

    _mapping_dinoheadprops = OrderedDict([
        ("headdatetime","peildatum"),("headmp","standcmmp"),
        ("headnote","bijzonderheid"),("remarks","opmerking"),
        ])

    _mapping_dinolocprops = OrderedDict([
        ('locname','nitgcode'),
        ('filname','filter'),
        ('alias','tnocode'),
        ('xcr','xcoor'),
        ('ycr','ycoor'),
        ('height_datum','NAP'),
        ('grid_reference','RD'),
        ])

    _mapping_dinotubeprops = OrderedDict([
        ('startdate','startdatum'),
        ('mplevel','mpcmnap'),
        ('filtop','filtopcmnap'),
        ('filbot','filbotcmnap'),
        ('surfacedate','mvdatum'),
        ('surfacelevel','mvcmnap'),
        ])


    def __repr__(self):
        return (f'{self.name()} (n={len(self._heads)})')


    def __init__(self,heads=None,locprops=None,tubeprops=None):
        """
        Parameters
        ----------
        heads : pandas.DataFrame
            timeseries with groundwater heads
        locprops : pandas.Series
            series with location properties
        tubprops : pandas.DataFrame
            dataframe with tube properties in time

        """

        if locprops is None:
            self._locprops = Series(index=self._locprops_names,
                dtype='object')
        elif isinstance(locprops,pd.Series):
            self._locprops = locprops
        else:
            raise TypeError(f'locprops is not a pandas Series but '
                f'{type(locprops)}')

        if tubeprops is None:
            self._tubeprops = DataFrame(columns=self._tubeprops_names)
        elif isinstance(tubeprops,pd.DataFrame):
            self._tubeprops = tubeprops
        else:
            raise TypeError(f'tubeprops is not a pandas DataFrame '
                f'but {type(tubeprops)}')

        if heads is None: 
            self._heads = pd.DataFrame(columns=self._headprops_names) #Series()
            self._heads_original = self._heads.copy()

        elif isinstance(heads,pd.DataFrame):
            self._heads = heads
            self._heads_original = self._heads.copy()

        else:
            raise TypeError(f'heads is not a pandas DataFrame but {type(heads)}')


    def _validate_reference(self,ref):

        if ref is None:
            return self._reflevels[0]

        if ref not in self._reflevels:
            warnings.warn((f'Reference level {ref} is not valid.'
                f'Reference level {self._reflevels[0]} is assumed.'),
                stacklevel=2)
            return self._reflevels[0]

        return ref


    @classmethod
    def from_dinogws(cls,filepath):
        """ 
        Read tno dinoloket csvfile with groundwater measurements and return data as gwseries object

        Parameters
        ----------
        filepath : str
            path to dinocsv file with measured groundwater heads

        Returns
        -------
        result : GwSeries object

        Example
        -------
        gw = GwSeries.from_dinogws(<filepath>)
        jsondict = gw.to_json(<filepath>)
        gw.from_json(<filepath>)
        
        """

        # read dinofile to DinoGws object
        ##dn = GwSeries.read.dinogws.DinoGws(filepath=filepath)
        dn = DinoGws(filepath=filepath)

        dinoprops = list(dn.header().columns)

        # get location metadata
        locprops = Series(index=cls._locprops_names,dtype='object')

        for propname in cls._locprops_names:
            dinoprop = cls._mapping_dinolocprops[propname]
            if dinoprop in dinoprops:
                locprops[propname] = dn.header().at[0,dinoprop]

        locprops['grid_reference'] = 'RD'
        locprops['height_datum'] = 'mNAP'
        locprops = Series(locprops)

        # get piezometer metadata
        tubeprops = DataFrame(columns=cls._tubeprops_names)
        for prop in cls._tubeprops_names:
            dinoprop = cls._mapping_dinotubeprops[prop]
            if dinoprop in dinoprops:
                tubeprops[prop] = dn.header()[dinoprop]

        for col in cls._tubeprops_numcols:
                tubeprops[col] = pd.to_numeric(tubeprops[col],
                                 errors='coerce')/100.

        # get head measurements
        dinoprops = list(dn.headdata().columns)
        heads = DataFrame(columns=cls._headprops_names)
        for prop in cls._headprops_names:
            dinoprop = cls._mapping_dinoheadprops[prop]
            if dinoprop in dinoprops:
                heads[prop] = dn.headdata()[dinoprop]
        heads['headmp'] = heads['headmp']/100.

        return cls(heads=heads,locprops=locprops,tubeprops=tubeprops)

    @classmethod
    def from_json(cls,filepath=None):
        """ Read gwseries object from json file """

        with open(filepath) as json_file:
            json_dict = json.load(json_file)

        locprops = DataFrame.from_dict(json_dict['locprops'],
                                        orient='index')
        locprops = Series(data=locprops[0],index=locprops.index,
                                        name='locprops')

        tubeprops = DataFrame.from_dict(json_dict['tubeprops'],
                    orient='index')
        tubeprops.name = 'tubeprops'
        tubeprops['startdate'] = pd.to_datetime(tubeprops['startdate']) #.dt.date

        heads = DataFrame.from_dict(json_dict['heads'],orient='index')

        return cls(heads=heads,locprops=locprops,tubeprops=tubeprops)


    def to_json(self,dirpath=None):
        """ 
        Create json string from GwWeries object and optionally write 
        to file.

        Parameters
        ----------
        dirpath : str
           directory json file will be written to
           (if dirpath is not given no textfile will be written and 
           only OrderedDict with valid JSON wil be retruned)

        Returns
        -------
        OrderedDict with valid json

        Notes
        -----
        If no value for dirpath is given, a valid json string is
        returned. If a value for dirpath is given, nothing is returned 
        and a json file will be written to a file with the series name
        in dirpath.
        """
        json_locprops = json.loads(
            self._locprops.to_json()
            )
        json_tubeprops = json.loads(
            self._tubeprops.to_json(date_format='iso',orient='index')
            )
        json_heads = json.loads(
            self._heads.to_json(date_format='iso',orient='index',
            date_unit='s')
            )

        json_dict = OrderedDict()
        json_dict['locprops'] = json_locprops
        json_dict['tubeprops'] = json_tubeprops
        json_dict['heads'] = json_heads
        json_formatted_str = json.dumps(json_dict, indent=2)

        if isinstance(dirpath,str):
            try:
                filepath = os.path.join(dirpath,self.name()+'.json')
                with open(filepath,"w") as f:
                    f.write(json_formatted_str)
            except FileNotFoundError:
                print("Filepath {} does not exist".format(filepath))
                return None
            #finally:
            #    json_dict

        return json_dict


    def to_csv(self,path=None,ref=None):
        """
        Export groundwater heads series to simple csv file.

        Parameters
        ----------
        path : str
            csv file wil be exported to path, if path is a directory,
            series will be saved as <path><name>.csv.
            if path is not given, file is saved in present directory.

        Examples
        --------
        Save heads to simple csv:

        >>>aq.GwSeries.to_csv(<dirpath>)

        Read back with standard Pandas:

        >>>pd.read_csv(<filepath>,  parse_dates=['date'], 
                  index_col='date', squeeze=True) 
        
        """
        self._csvpath = path
        self._csvref = ref

        if self._csvpath is None:
            self._csvpath = f'{self.name()}.csv'

        if os.path.isdir(self._csvpath):
            filename = f'{self.name()}.csv'
            self._csvpath = os.path.join(self._csvpath,filename)

        try:
            sr = self.heads(ref=self._csvref)
            sr.to_csv(self._csvpath,index=True,index_label='datetime',
                header=['head'])
        except FileNotFoundError:
            msg = f'Filepath {self._csvpath} not found'
            warnings.warn(msg)
            result = None
        else:
            result = sr

        return result


    def name(self):
        """ Return groundwater series name """
        location = str(self._locprops['locname'])
        filter = str(self._locprops['filname'])
        return location+'_'+filter


    def locname(self):
        """Return series location name"""
        srname = self.locprops().index[0]
        locname = self.locprops().loc[srname,'locname']
        return locname


    def locprops(self,minimal=False):
        """
        Return location properties as pd.DataFrame.

        Parameters
        ---------=
        minimal : bool, default=False
            return only minimal selection of columns

        Returns
        -------
        pd.DataFrame
        """

        sr = self._locprops
        sr.name = self.name()
        locprops = DataFrame(sr).T
        if minimal:
            locprops = locprops[self._locprops_minimal]
        return locprops


    def tubeprops(self,last=False,minimal=False):
        """
        Return tube properties.

        Parameters
        ----------
        last : booean, default False
            retun only the last row of tube properties without date

        minimal : bool, default False
            return only minimal selection of columns

        Returns
        -------
        pd.DataFrame
        """
        tps = DataFrame(self._tubeprops[self._tubeprops_names]).copy()
        #tps['startdate'] = tps['startdate'].dt.date
        tps['startdate'].apply(pd.to_datetime, errors='coerce')

        if minimal:
            tps = tps[self._tubeprops_minimal]

        tps.insert(0,'series',self.name())

        if last:
            #tps = tps.iloc[[-1]]
            tps = tps.tail(1)
            #tps = tps.set_index('series')

        return tps

    def surface(self):
        """Return last known surface level"""
        return self._tubeprops['surfacelevel'].iat[-1]


    def heads(self,ref='datum',freq=None):
        """ 
        Return groundwater head measurements.

        Parameters
        ----------
        ref  : {'mp','datum','surface'}, default 'datum'
               choosen reference for groundwater heads
        freq : None or any valid Pandas Offset Alias
                determine frequency of time series

        Returns
        -------
        result : pandas time Series

        Notes
        ----=
        Parameter 'ref' determines the reference level for the heads:
        'mp'   : elative to well top ('measurement point')
        'datum': relative to chosen level (would be meter +NAP for the
                 Netherlands, or TAW for Belgium)
        'surface' : relative to surface level (meter min maaiveld)

        Parameter 'freq' determines the time series frequency by setting
        the Pandas Offset Alias. When 'freq' is None, no resampling is
        applied.
        Valid values for 'freq' would be:
        'H' : hourly frequency
        'D' : calender day frequency
        'W' : weekly frequency
        'M' : month end frequency
        'MS': month start freuency
        'Q' : quarter end frequency
        'QS': quarter start frequency
        'A' : year end frequency
        'AS': year start frequency
        """
        if not ref:
            ref = 'datum'

        if ref not in self._reflevels:
            msg = f'{ref} is not a valid reference level name'
            raise ValueError(msg)

        heads = self._heads[['headdatetime','headmp']]
        heads = heads.set_index('headdatetime',drop=True) ##.squeeze()
        heads.name = self.name()

        headscopy = heads.copy()

        if ref in ['datum','surface']:

            srvals = headscopy.values.flatten()
            srvals2 = headscopy.values.flatten()
            for index,props in self._tubeprops.iterrows():

                mask = heads.index>=props['startdate']
                if ref=='datum':

                    if not pd.api.types.is_number(props['mplevel']):
                        msg = f'{self.name()} tubeprops mplevel is None.'
                        warnings.warn(msg)
                        mp = 0
                    else:
                        mp = props['mplevel']

                    srvals2 = np.where(mask,mp-srvals,srvals2)


                if ref=='surface':
                    if not pd.isnull(props['surfacelevel']):
                        surfref = round(props['mplevel']-props['surfacelevel'],2)
                        srvals2 = np.where(mask,srvals-surfref,srvals2)

                    else:
                        msg = f'{self.name()} surface level is None'
                        warnings.warn(msg)
                        srvals2 = np.where(mask,srvals,srvals)

            heads = Series(srvals2,index=heads.index)
            heads.name = self.name()

        if freq is not None:
            heads = heads.resample(freq).mean()
            heads.index = heads.index.tz_localize(None)

        return heads


    def timestats(self,ref=None):
        """
        Return descriptive statistics for heads time series.

        Parameters
        ----------
        ref  : {'mp','datum','surface'}, default 'datum'
            reference level for groundwater heads

        Returns
        -------
        pd.Series
        """

        self._ref = self._validate_reference(ref)

        ts = self.heads(ref=ref)
        gwstats = GwTimeStats(ts)

        return gwstats.stats()


    def describe(self,ref='datum',gxg=False,minimal=True):
        """
        Return selection of properties and descriptive statistics.

        Parameters
        ----------
        ref  : {'mp','datum','surface'}, default 'datum'
            choosen reference level for groundwater heads
        gxg : bool, default False
            add GxG descriptive statistics
        minimal : bool, default True
            return minimal selection of statistics

        Returns
        -------
        pd.Series
        """

        self._ref = self._validate_reference(ref)

        srlist = []

        srlist.append(self._locprops[self._locprops_minimal])

        tubeprops = (self._tubeprops[self._tubeprops_minimal].tail(1
            ).iloc[0,:])
        srlist.append(tubeprops)

        timestats = self.timestats(ref=self._ref)
        srlist.append(timestats)

        if gxg==True:
            gxg = self.gxg(ref=self._ref,minimal=minimal)
            srlist.append(gxg)

        sr = pd.concat(srlist,axis=0)
        sr.name = self.name()

        if self._ref=='surface':

            for key in ['filbot']:
                sr[key] = (sr['surfacelevel']-sr[key])*100

            for key in ['mean','median','q05','q95','dq0595']:
                sr[key] = sr[key]*100

            for key in ['filbot','mean','median','q05','q95',
                'dq0595','n1428']:
                if not np.isnan(sr[key]):
                    sr[key] = math.floor(sr[key])

        """
        locprops = self.locprops(minimal=minimal)
        tubeprops = self.tubeprops(last=True,minimal=True)
        tubeprops = tubeprops.set_index('series')

        tbl = pd.merge(locprops,tubeprops,left_index=True,right_index=True,how='outer')

        srstats = self.timestats(ref=ref)
        tbl = pd.merge(tbl,srstats,left_index=True,right_index=True,how='outer')

        if gxg==True:
            gxg = self.gxg()
            tbl = pd.merge(tbl,gxg,left_index=True,right_index=True,how='left')
        """
        return sr


    def tubeprops_changes(self,proptype='mplevel'):
        """
        Return timeseries with tubeprops changes.

        Parameters
        ----------
        proptype : ['mplevel','surfacelevel','filtop','filbot'
            Tubeproperty that is shown in reference cange graph.

        Returns
        -------
        pd.Series
        """
        if proptype in ['mplevel','surfacelevel','filtop','filbot']:
            mps = self._tubeprops[proptype].values
        else:
            mps = self._tubeprops['mplevel']
            # TODO: add userwarning

        idx = pd.to_datetime(self._tubeprops['startdate'])
        sr1 = Series(mps,index=idx)

        idx = sr1.index[1:]-pd.Timedelta(days=1)
        lastdate = self.heads().index[-1]
        idx = idx.append(pd.to_datetime([lastdate]))
        sr2 = Series(mps,index=idx)

        sr12 = pd.concat([sr1,sr2]).sort_index()
        sr12 = sr12 - sr12[0]

        return sr12


    def plotheads(self,proptype=None,filename=None):
        """
        Plot groundwater heads time series.

        Parameters
        ----------
        proptype : ['mplevel','surfacelevel','filtop','filbot'
            Tubeproperty that is shown in reference cange graph.
            If not given, no reference plot will be shown.
        """
        if proptype in ['mplevel','surfacelevel','filtop','filbot']:
            ##mps = self._tubeprops[proptype].values
            mps = self.tubeprops_changes(proptype=proptype)
            self.headsplot = PlotHeads(ts=[self.heads()],mps=mps)

        if proptype is None:
            self.headsplot = PlotHeads(ts=[self.heads()])

        if filename is not None:
            self.headsplot.save(filename)


    def gxg(self,ref='datum',minimal=True,name=True):
        """
        Return tables with desciptive statistics GxG and xG.

        Parameters
        ----------
        ref : {'datum','surface'}, default 'datum'
            Reference level for gxg statistics.
        minimal : bool, default False
            Return minimal set of statistics.
        name : bool, default True
            Include series name in multiindex of xg.

        Returns
        -------
        gxg : pd.Series
            gxg descriptive statistics
        """
        if not hasattr(self,'_gxg'):
            self._gxg = GxgStats(self)            

        gxg = self._gxg.gxg(reference=ref,minimal=minimal)
        
        return gxg


    def xg(self,ref='datum',name=True):
        """
        Return tables with xg desciptive statistics for each year.

        Parameters
        ----------
        ref : {'datum','surface'}, default 'datum'
            Reference level for gxg statistics.
        name : bool, default True
            Include series name in multiindex of xg.

        Returns
        -------
        xg : pd.DataFrame
        """
        if not hasattr(self,'_gxg'):
            self._gxg = GxgStats(self)            

        xg = self._gxg.xg(reference=ref,name=name)

        return xg
Example #38
0
 def test_series_with_dtype(self):
     # GH 21986
     s = Series([4.56, 4.56, 4.56])
     result = read_json(s.to_json(), typ='series', dtype=np.int64)
     expected = Series([4] * 3)
     assert_series_equal(result, expected)
Example #39
0
    def test_typ(self):

        s = Series(lrange(6), index=["a", "b", "c", "d", "e", "f"], dtype="int64")
        result = read_json(s.to_json(), typ=None)
        assert_series_equal(result, s)