def test_time_slices_1(): dt_from = DateTime(2020, 8, 28) dt_to = DateTime(2020, 8, 28, 10, 12, 10) slices = [(slice_from.format(), slice_to.format()) for slice_from, slice_to in tool.time_slices( dt_from, dt_to, tool.INTERVAL_1H, 14)] assert slices == [('2020-08-28 01:00:00', '2020-08-28 10:12:10')]
def test_time_slices_0(): dt_from = DateTime(2020, 8, 28) dt_to = DateTime(2020, 8, 28, 10, 12, 10) slices = [(slice_from.format(), slice_to.format()) for slice_from, slice_to in tool.time_slices( dt_from, dt_to, tool.INTERVAL_1D, 14)] assert slices == []
def find_gaps(): dt_close = DateTime(2020, 7, 10, 21, 0, 0) dt_open = DateTime(2020, 7, 13, 14, 31, 0) pprint(f'datetime: {dt_open.isoformat()}') with requests.Session() as session: session.auth = AUTH url_nyse = f'{URL_EXCHANGES}/NYSE' response = session.get(url=url_nyse) assert response.status_code == 200, response.text stock = [s['id'] for s in response.json() if '/' not in s['id']] gaps = {} for symbol in stock: result_close = query_stock_by_time(session, symbol, dt_close) if result_close: result_open = query_stock_by_time(session, symbol, dt_open) if result_open: price_close = float(result_close['close']) price_open = float(result_open['open']) gap = abs(price_open - price_close) / price_close gaps[symbol] = gap print(f'{symbol}: {gap}') print('RESULTS') pprint(sorted(gaps.items(), key=operator.itemgetter(1), reverse=True))
def run_scheduled_tasks(): for task in TASKS: utc_now = DateTime.now() task.next_run = utc_now.replace(hour=task.hour, minute=task.minute, second=0, microsecond=0) if task.next_run < utc_now: task.next_run += task.interval while flow.wait(60.0): for task in TASKS: if task.next_run < DateTime.now(): try: LOG.info(f'Task: {task.function.__name__} has started') task.running = True task.function() except: LOG.fatal( f'Task: {task.function.__name__} should always catch exceptions' ) finally: LOG.info(f'Task: {task.function.__name__} has finished') if 'interval' in task: task.next_run += task.interval task.last_run = DateTime.now() task.running = False else: TASKS.remove(task)
def read_data_source(engine: Any) -> List: symbol = 'SIE.XETRA' interval = tool.INTERVAL_1D dt_from = DateTime(2020, 8, 3) dt_to = DateTime(2020, 8, 11) with engine.Session() as session: return session.series(symbol, dt_from, dt_to, interval)
def execute(): symbol = 'ABC.NYSE' interval = tool.INTERVAL_1D begin = DateTime(2014, 11, 18).to_timestamp() end = DateTime.now().to_timestamp() show_vma(symbol, interval, begin, end) show_valid_swings(symbol, interval, begin, end) show_candidate_swings(symbol, interval, begin, end) show_strategy(symbol, interval, begin, end)
def test_time_slices(): dt_from = DateTime(2020, 2, 1) dt_to = DateTime(2020, 2, 4) slices = [(start.format(), stop.format()) for start, stop in tool.time_slices(dt_from, dt_to, tool.INTERVAL_1H, 21)] assert slices == [('2020-02-01 01:00:00', '2020-02-01 21:00:00'), ('2020-02-01 22:00:00', '2020-02-02 18:00:00'), ('2020-02-02 19:00:00', '2020-02-03 15:00:00'), ('2020-02-03 16:00:00', '2020-02-04 00:00:00')]
def test_series(): interval = tool.INTERVAL_1D dt_from = DateTime(2020, 2, 2) dt_to = DateTime(2020, 2, 4) with yahoo.Session() as session: series = session.series('XOM.NYSE', dt_from, dt_to, interval) closing_prices = [(DateTime.from_timestamp(s.timestamp).format(), s.close, s.volume) for s in series] assert closing_prices == [('2020-02-03 00:00:00', 60.73, 27397300), ('2020-02-04 00:00:00', 59.970001, 31922100)]
def test_series(): interval = tool.INTERVAL_1D dt_from = DateTime(2020, 2, 2) dt_to = DateTime(2020, 2, 4) with stooq.Session({'WSE': [interval]}) as session: series = session.series('KGH.WSE', dt_from, dt_to, interval) closing_prices = [(DateTime.from_timestamp(s.timestamp).format(), s.close, s.volume) for s in series] assert closing_prices == [('2020-02-03 00:00:00', 92.6, 484464), ('2020-02-04 00:00:00', 96.44, 708829)]
def time_range(self) -> Dict[str, Clazz]: query = ''' FOR datum IN @@collection COLLECT symbol = datum.symbol AGGREGATE ts_from = MIN(datum.timestamp), ts_to = MAX(datum.timestamp) RETURN {symbol, ts_from, ts_to} ''' records = self.tnx_db.aql.execute(query, bind_vars={'@collection': self.name}) return { r['symbol']: Clazz(dt_from=DateTime.from_timestamp(r['ts_from']), dt_to=DateTime.from_timestamp(r['ts_to'])) for r in records }
def test_series(): symbol = 'XOM.NYSE' interval = tool.INTERVAL_1H dt_from = DateTime(2020, 2, 2, 23) dt_to = DateTime(2020, 2, 4, 19) time_ranges = [] closing_prices = [] with exante.Session() as session: for slice_from, slice_to in tool.time_slices(dt_from, dt_to, interval, 15): series = session.series(symbol, slice_from, slice_to, interval) time_ranges += [(slice_from.format(), slice_to.format())] closing_prices += [(DateTime.from_timestamp(s.timestamp).format(), s.close, s.volume) for s in series] assert time_ranges == [('2020-02-03 00:00:00', '2020-02-03 14:00:00'), ('2020-02-03 15:00:00', '2020-02-04 05:00:00'), ('2020-02-04 06:00:00', '2020-02-04 19:00:00')] assert closing_prices == [('2020-02-03 09:00:00', 61.53, 11021), ('2020-02-03 10:00:00', 61.55, 700), ('2020-02-03 11:00:00', 61.61, 7100), ('2020-02-03 12:00:00', 61.69, 9668), ('2020-02-03 13:00:00', 61.53, 64526), ('2020-02-03 14:00:00', 61.011, 4168908), ('2020-02-03 15:00:00', 60.97, 3386157), ('2020-02-03 16:00:00', 60.715, 3654783), ('2020-02-03 17:00:00', 60.665, 2188951), ('2020-02-03 18:00:00', 60.565, 2081716), ('2020-02-03 19:00:00', 60.44, 1960787), ('2020-02-03 20:00:00', 60.72, 4368450), ('2020-02-03 21:00:00', 60.65, 2567304), ('2020-02-03 22:00:00', 60.65, 2450), ('2020-02-03 23:00:00', 60.68, 4489), ('2020-02-04 00:00:00', 60.6899, 1866), ('2020-02-04 09:00:00', 61.63, 1649), ('2020-02-04 10:00:00', 61.79, 4457), ('2020-02-04 11:00:00', 61.57, 3021), ('2020-02-04 12:00:00', 61.59, 19546), ('2020-02-04 13:00:00', 61.61, 124873), ('2020-02-04 14:00:00', 61.0894, 2839556), ('2020-02-04 15:00:00', 60.805, 3411238), ('2020-02-04 16:00:00', 60.715, 2728185), ('2020-02-04 17:00:00', 60.37, 3187416), ('2020-02-04 18:00:00', 60.13, 2743043), ('2020-02-04 19:00:00', 60.06, 2958939)]
def series(self, symbol: str, dt_from: DateTime, dt_to: DateTime, interval: timedelta) -> List[Clazz]: short_symbol, exchange = tool.symbol_split(symbol) if exchange not in ('NYSE', 'NASDAQ'): return [] flow.wait(max(0.6 - config.loop_delay(), 0)) # sleep at least 0.6 including loop in the flow module yahoo_symbol = short_symbol.replace('.', '-') yahoo_from = dt_from.to_timestamp() yahoo_to = (dt_to + interval).to_timestamp() yahoo_interval = interval_to_yahoo(interval) url = SYMBOL_URL.format(symbol=yahoo_symbol) params = dict(period1=yahoo_from, period2=yahoo_to, interval=yahoo_interval, events='history', crumb=self.crumb) response = self.get(url, params=params) if response.status_code in (400, 404): return [] assert response.status_code == 200, f'url: {url} params: {params} reply: {response.text}' data = [datum_from_yahoo(item, symbol) for item in csv.DictReader(StringIO(response.text))] if len(data) == 2 and data[0].timestamp == data[1].timestamp: data = data[0:1] # if yahoo returns 2 rows with duplicated values data = [ datum for datum in data if datum and yahoo_from <= datum.timestamp <= yahoo_to ] return data
def convert_custom_data(date: str, datum: Dict) -> List[Dict]: result = [] for k, v in datum.items(): if k in ('_id', '_key', '_rev', 'timestamp'): pass elif k == 'open_timestamp': result += [{ 'date': date, 'key': 'open-date', 'value': DateTime.from_timestamp(v).format() }] elif k == 'open_long': result += [{ 'date': date, 'key': 'open-long', 'value': round(v, FLOAT_PRECISION) }] elif isinstance(v, float): result += [{ 'date': date, 'key': k, 'value': round(v, FLOAT_PRECISION) }] elif isinstance(v, dict): result += convert_custom_data(date, v) else: result += [{'date': date, 'key': k, 'value': v}] return result
def security_update_by_interval(engine: Any, interval: timedelta): LOG.info(f'>> {security_update.__name__} source: {tool.source_name(engine, interval)}') default_range = Clazz(dt_to=config.datetime_from()) with engine.SecuritySeries(interval) as security_series: time_range = security_series.time_range() LOG.debug(f'Time range entries: {len(time_range)}') for exchange_name in config.EXCHANGES: with store.ExchangeSeries() as exchange_series: securities = exchange_series[exchange_name] with engine.Session() as session: with flow.Progress(f'security-update: {exchange_name}', securities) as progress: for security in securities: progress(security.symbol) dt_from = time_range.get(security.symbol, default_range).dt_to dt_to = tool.last_session(exchange_name, interval, DateTime.now()) for slice_from, slice_to in tool.time_slices(dt_from, dt_to, interval, 4096): time_series = session.series(security.symbol, slice_from, slice_to, interval) with engine.SecuritySeries(interval, editable=True) as security_series: security_series += time_series LOG.info(f'Securities: {len(securities)} updated in the exchange: {exchange_name}')
def series(self, symbol: str, dt_from: DateTime, dt_to: DateTime, interval: timedelta) -> List[Clazz]: short_symbol, exchange = tool.symbol_split(symbol) ts_from = dt_from.to_timestamp() ts_to = dt_to.to_timestamp() zip_path = stooq_zip_path(interval, exchange) with zipfile.ZipFile(zip_path) as zip_io: relative_path = find_symbol_path(short_symbol, interval, exchange, zip_io.namelist()) if relative_path: content = zip_io.read(relative_path).decode('utf-8') data = [ datum_from_stooq(dt, symbol) for dt in csv.DictReader(StringIO(content)) ] return [ datum for datum in data if datum and ts_from <= datum.timestamp <= ts_to ] return []
def security_verify(engine: Any): interval = tool.INTERVAL_1D source_name = tool.source_name(engine, interval) health_name = tool.health_name(engine, interval) LOG.info(f'>> {security_verify.__name__} source: {source_name}') with engine.SecuritySeries(interval) as security_series: time_range = security_series.time_range() with store.File(health_name, editable=True) as health: for exchange_name in config.EXCHANGES: health[exchange_name] = {} last_session = tool.last_session(exchange_name, interval, DateTime.now()) with store.ExchangeSeries() as exchange_series: securities = exchange_series[exchange_name] entries = [] with flow.Progress(health_name, securities) as progress: for security in securities: progress(security.symbol) result = Clazz() symbol_range = time_range.get(security.symbol) if symbol_range: overlap, missing = time_series_verify(engine, security.symbol, symbol_range.dt_from, last_session, interval) if overlap: result.overlap = overlap if missing: result.missing = missing if len(missing) > config.HEALTH_MISSING_LIMIT: result.message = f'The missing limit reached: {len(missing)}' if last_session in missing: result.message = f'The last session {symbol_range.dt_to} < {last_session}' else: result.message = 'There is no time series for this symbol' if result: short_symbol, _ = tool.symbol_split(security.symbol) health[exchange_name][short_symbol] = result entry = security.entry(health_name) entry[health_name] = 'message' not in result entries += [entry] with store.ExchangeSeries(editable=True) as exchange_series: exchange_series |= entries LOG.info(f'Securities: {len(securities)} verified in the exchange: {exchange_name}')
def schedule_endpoint(): if request.method == 'POST': LOG.info(f'Scheduling function {task_daily.__name__}') task = Clazz(next_run=DateTime.now().replace(microsecond=0), running=False, function=task_daily) TASKS.append(task) LOG.info('Listing threads and tasks') threads = [{ 'name': thread.name, 'daemon': thread.daemon, 'alive': thread.is_alive() } for thread in threading.enumerate()] content = dict(threads=threads, tasks=TASKS) return json.dumps(content, option=json.OPT_INDENT_2, default=tool.json_default).decode('utf-8')
def time_series_verify(engine: Any, symbol: str, dt_from: DateTime, dt_to: DateTime, interval: timedelta) -> Tuple[List[DateTime], List[DateTime]]: with engine.SecuritySeries(interval) as security_series: time_series = security_series[symbol] _, exchange = tool.symbol_split(symbol) dates = [DateTime.from_timestamp(s.timestamp) for s in time_series] holidays = tool.exchange_holidays(exchange) overlap = [d for d in dates if d in holidays] missing = [] start = dt_from while start <= dt_to: if start.weekday() in (0, 1, 2, 3, 4): if not (start in dates or start in holidays): missing.append(start) start += interval return overlap, missing
def format_date(timestamp, step=0): if step is None: return DateTime.from_timestamp(timestamp).strftime( '%Y-%m-%d %H:%M:%S') else: return DateTime.from_timestamp(timestamp).strftime('%Y-%m-%d')
def execute(): symbol = 'ABC.NYSE' begin = DateTime(2017, 11, 1).to_timestamp() end = DateTime.now().to_timestamp() show_swings(symbol, begin, end)
def test_last_workday(): dt = DateTime(2020, 1, 21) assert tool.last_workday('NYSE', dt) == DateTime(2020, 1, 17)
def test_last_sunday(): dt = DateTime(2020, 9, 1) assert tool.last_sunday(dt) == DateTime(2020, 8, 30)
def cb_series_graph(d_from, engine_name, interval_name, env_name, score, selected_security, xaxis_range): if d_from and engine_name and interval_name and env_name and selected_security: interval = { '1h': tool.INTERVAL_1H, '1d': tool.INTERVAL_1D }[interval_name] # TODO: support 1h symbol = selected_security['symbol'] if score: description = f"{selected_security['description']} [{100 * swings.limit_ratio(score)}%]" else: description = selected_security['description'] # engine series engine = ENGINES[engine_name] dt_from = DateTime.parse_date(d_from) with engine.SecuritySeries(interval, dt_from=dt_from) as security_series: time_series = security_series[symbol] if time_series: # customize static data fields = ('timestamp', 'vma-50', 'vma-100', 'vma-200', 'volume', env_name) ts, vma_50, vma_100, vma_200, volume, trade = tool.transpose( time_series, fields) dts = [datetime.utcfromtimestamp(t) for t in ts] # customize swing data score_series = swings.display(time_series, score) ts, score_values = tool.transpose(score_series, ('timestamp', 'value')) score_dts = [datetime.utcfromtimestamp(t) for t in ts] score_custom = [s.to_dict() for s in score_series] # customize trade data fields = ('long', 'short', 'profit') long, short, profit = tool.transpose(trade, fields) trade_custom = [t.to_dict() for t in trade] # create traces score_trace = go.Scatter(x=score_dts, y=score_values, customdata=score_custom, name='Score', mode='lines', line=dict(width=1.0), showlegend=False) vma_50_trace = go.Scattergl(x=dts, y=vma_50, name='VMA-50', mode='lines', line=dict(width=1.0), visible='legendonly') vma_100_trace = go.Scattergl(x=dts, y=vma_100, name='VMA-100', mode='lines', line=dict(width=1.0)) vma_200_trace = go.Scattergl(x=dts, y=vma_200, name='VMA-200', mode='lines', line=dict(width=1.0), visible='legendonly') long_trace = go.Scattergl(x=dts, y=long, customdata=trade_custom, name='Long', mode='markers', visible='legendonly') short_trace = go.Scattergl(x=dts, y=short, customdata=trade_custom, name='Short', mode='markers', visible='legendonly') profit_trace = go.Scattergl(x=dts, y=profit, customdata=trade_custom, name='Profit', mode='lines+markers', connectgaps=True, line=dict(width=1.0), showlegend=False) volume_trace = go.Bar(x=dts, y=volume, name='Volume', showlegend=False) # create a graph figure = make_subplots(rows=3, cols=1, shared_xaxes=True, vertical_spacing=0.03, row_heights=[0.6, 0.2, 0.2]) figure.add_trace(score_trace, row=1, col=1) figure.add_trace(vma_50_trace, row=1, col=1) figure.add_trace(vma_100_trace, row=1, col=1) figure.add_trace(vma_200_trace, row=1, col=1) figure.add_trace(long_trace, row=1, col=1) figure.add_trace(short_trace, row=1, col=1) figure.add_trace(profit_trace, row=2, col=1) figure.add_trace(volume_trace, row=3, col=1) figure.update_xaxes(tickformat=XAXIS_FORMAT) figure.update_layout(margin=GRAPH_MARGIN, legend=LEGEND, title_text=description, hovermode='closest', xaxis=SPIKE, yaxis=SPIKE, plot_bgcolor=PLOT_BGCOLOR) figure.update_xaxes(range=xaxis_range or [dts[0], dts[-1]]) return figure return go.Figure(data=[], layout=dict(margin=GRAPH_MARGIN, plot_bgcolor=PLOT_BGCOLOR))
value=config.EXCHANGES[0], placeholder='exchange', className='choice', persistence=True) engine_choice = dcc.Dropdown(id='engine-choice', options=[{ 'label': s, 'value': s } for s in ENGINES], value=list(ENGINES.keys())[0], placeholder='engine', className='choice', persistence=True) datetime_from = DateTime.now() - timedelta(days=3 * 365) date_choice = dcc.DatePickerSingle(id='date-from', date=datetime_from.date(), display_format=DATE_PICKER_FORMAT, className='choice', persistence=True) score_choice = dcc.Input(id='score-choice', type='number', min=1, max=8, step=1, value=3, className='score', persistence=True)
def timestamp_from_stooq(date: str): dt = DateTime.strptime(date, DT_FORMAT) return dt.replace(tzinfo=timezone.utc).to_timestamp()
def format_date(timestamp, step): return DateTime.from_timestamp(timestamp).strftime('%Y-%m-%d')
from datetime import datetime from iexfinance.stocks import get_historical_data from src import config, store from src.tool import DateTime TOKEN = config.iex_auth() start = DateTime(2019, 1, 1) end = datetime.today() stocks = ['AAPL', 'AMZN'] data = get_historical_data(stocks, start, end, token=TOKEN) with store.File('iex_test', editable=True) as series: series.update(data)
def datetime_to_exante(dt: DateTime) -> int: return dt.to_timestamp() * 1000