Exemple #1
0
from loguru import logger
from ta_scanner.data.data import load_and_cache
from ta_scanner.data.ib import IbDataFetcher
import datetime

ib_data_fetcher = IbDataFetcher()

# symbols = ["/MES", "/MNQ", "/MGC"]
symbols = ["/MES"]

get_last_n_days = 5
sd = datetime.date.today() - datetime.timedelta(days=get_last_n_days)
ed = datetime.date.today() - datetime.timedelta(days=1)

for symbol in symbols:
    params = dict(start_date=sd, end_date=ed, use_rth=False, groupby_minutes=1,)
    df = load_and_cache(symbol, ib_data_fetcher, **params)

logger.info("Done")
def fetch_data():
    sd = datetime.date(2020, 7, 1)
    ed = datetime.date(2020, 8, 15)
    load_and_cache(instrument_symbol, ib_data_fetcher, **gen_params(sd, ed))
Exemple #3
0
from datetime import date
from loguru import logger
from ta_scanner.data.data import load_and_cache
from ta_scanner.data.ib import IbDataFetcher

ib_data_fetcher = IbDataFetcher()

symbols = ["SPY", "QQQ", "AAPL"]

for symbol in symbols:
    df = load_and_cache(
        symbol,
        ib_data_fetcher,
        start_date=date(2020, 6, 1),
        end_date=date(2020, 6, 4),
        use_rth=False,
        groupby_minutes=15,
    )
    logger.info(f"{symbol} - {len(df.index)}")
from ta_scanner.signals import Signal
from ta_scanner.filters import FilterCumsum, FilterOptions, FilterNames
from ta_scanner.reports import BasicReport
from ta_scanner.models import gen_engine

# mute the noisy data debug statements
logger.remove()
logger.add(sys.stderr, level="INFO")

ib_data_fetcher = IbDataFetcher()

symbol = "/MGC"

df_original = load_and_cache(
    symbol,
    ib_data_fetcher,
    start_date=datetime.date(2020, 8, 1),
    end_date=datetime.date(2020, 8, 23),
)


def query_data(engine, symbol, sd, ed, groupby_minutes):
    df = db_data_fetch_between(engine, symbol, sd, ed)
    df.set_index("ts", inplace=True)
    df = aggregate_bars(df, groupby_minutes=groupby_minutes)
    df["ts"] = df.index
    return df


engine = gen_engine()
sd, ed = datetime.date(2020, 8, 1), datetime.date(2020, 8, 23)
interval = 1
from datetime import datetime, date
from loguru import logger

from ta_scanner.data.data import load_and_cache
from ta_scanner.data.ib import IbDataFetcher
from ta_scanner.indicators import IndicatorSmaCrossover, IndicatorParams
from ta_scanner.signals import Signal
from ta_scanner.filters import FilterCumsum, FilterOptions, FilterNames
from ta_scanner.reports import BasicReport


ib_data_fetcher = IbDataFetcher()
df = load_and_cache(
    "/MES",
    ib_data_fetcher,
    start_date=date(2020, 7, 10),
    end_date=date(2020, 7, 20),
    use_rth=True,
)

# store signals in this field
field_name = "moving_avg_cross"

# Moving Average Crossover, 20 vs 50
indicator_params = {
    IndicatorParams.fast_sma: 30,
    IndicatorParams.slow_sma: 60,
}

# init
indicator_sma_cross = IndicatorSmaCrossover(
from ta_scanner.data.ib import IbDataFetcher
from ta_scanner.indicators import IndicatorSmaCrossover, IndicatorParams
from ta_scanner.signals import Signal
from ta_scanner.filters import FilterCumsum, FilterOptions, FilterNames
from ta_scanner.reports import BasicReport

# mute the noisy data debug statements
logger.remove()
logger.add(sys.stderr, level="INFO")

# get SPY data
ib_data_fetcher = IbDataFetcher()
df_original = load_and_cache(
    "SPY",
    ib_data_fetcher,
    start_date=date(2020, 7, 1),
    end_date=date(2020, 7, 20),
    use_rth=True,
)

# store signals in this field
field_name = "moving_avg_cross"
result_field_name = f"{field_name}_pnl"


def run_cross(fast_sma: int, slow_sma: int):
    df = df_original.copy()

    indicator_sma_cross = IndicatorSmaCrossover(
        field_name=field_name,
        params={