def add_wave_prediction_data_to_wave_records(self, symbol='', ts_start=0, ts_end=0, scheduled_job=False): """ This job calculates the prediction data for all kind of waves (daily and intraday """ print('Add prediction data to wave records..') access_layer_wave = AccessLayer4Wave(self.db_stock) df_wave_to_process = access_layer_wave.get_wave_data_without_prediction_data( symbol, ts_start, ts_end) update_counter = 0 for wave_index, wave_row in df_wave_to_process.iterrows(): wave_entity = WaveEntity(wave_row) x_data = wave_entity.data_list_for_prediction_x_data prediction_dict = self.sys_config.fibonacci_predictor.get_prediction_as_dict( x_data) data_dict = {} FibonacciWave.update_dict_by_prediction_dict( prediction_dict, data_dict) access_layer_wave.update_record(wave_entity.row_id, data_dict) update_counter += 1 if scheduled_job: self.sys_config.file_log.log_scheduler_process( log_message='Updated: {}/{}'.format( update_counter, df_wave_to_process.shape[0]), process='Update wave records', process_step='add_wave_prediction')
def __init__(self, db_stock: StockDatabase): self._access_layer_wave = AccessLayer4Wave(db_stock) self._index_list = INDICES.get_index_list_for_waves_tab() self._index_config = IndexConfiguration(db_stock, self._index_list) self._tick_key_list_for_retrospection = [] self._period_for_retrospection = '' self._aggregation_for_retrospection = 1
def __init__(self, db_stock: StockDatabase): self._access_layer_wave = AccessLayer4Wave(db_stock) self._period_list = [] self._period = '' self._aggregation = 0 self._limit = 0 self._wave_types = [] self._index_list = INDICES.get_index_list_for_waves_tab() self._index_config = IndexConfiguration(db_stock, self._index_list) self._seconds_unit = 0 self._data_last_fetched_time_stamp = 0 self._tick_key_list_for_retrospection = [] # either date_str from Daily or timestamp for Intraday self._daily_index_wave_type_number_dict = {} # contains itself a dictionary {date: number, ..} self._intraday_index_wave_type_number_dict = {} # contains itself a dictionary {ts: number, ..} self._df_wave_dict = {}
def __init__(self, app: Dash, sys_config: SystemConfiguration): MyPatternDashBaseTab.__init__(self, app, sys_config) self.sys_config = sys_config self._db = self.sys_config.db_stock self._dd_handler = DBTabDropDownHandler() self._access_layer_process = AccessLayer4Process( self.sys_config.db_stock) self._access_layer_wave = AccessLayer4Wave(self.sys_config.db_stock) self._access_layer_pattern = AccessLayer4Pattern( self.sys_config.db_stock) self._selected_table_name = STBL.PROCESS self._selected_limit = 10 self._selected_date_range = DTRG.TODAY self._where_clause_entered = '' self._db_table = self._db.get_table_by_name(self._selected_table_name) self._df_for_grid_table = None self._db_grid_table = None
def __init__(self, app: Dash, sys_config: SystemConfiguration): MyPatternDashBaseTab.__init__(self, app, sys_config) self.sys_config = self.__get_adjusted_sys_config_copy__(sys_config) self.exchange_config = self.sys_config.exchange_config self._dd_handler = LogTabDropDownHandler() self._log_data_frame_dict = {} self._access_layer_process = AccessLayer4Process( self.sys_config.db_stock) self._access_layer_wave = AccessLayer4Wave(self.sys_config.db_stock) self._access_layer_pattern = AccessLayer4Pattern( self.sys_config.db_stock) self.__fill_log_data_frame_dict__() self._selected_log_type = LOGT.MESSAGE_LOG self._selected_process = '' self._selected_process_step = '' self._selected_date_range = DTRG.TODAY self._refresh_button_clicks = 0 self._log_table = LogTable(self._log_data_frame_dict, self._selected_log_type, self._selected_date_range)
def delete_inconsistent_wave_records(self, scheduled_job=False) -> int: """ We have to take care that the waves are reasonable to avoid problems with the fibonacci predictions: :param scheduled_job: :return: """ print('Delete inconsistent wave records..') access_layer_wave = AccessLayer4Wave(self.db_stock) df_wave_to_process = access_layer_wave.get_inconsistent_waves_as_data_frame( ) delete_counter = 0 for wave_index, wave_row in df_wave_to_process.iterrows(): access_layer_wave.delete_record_by_rowid(wave_row[DC.ROWID]) delete_counter += 1 if scheduled_job: self.sys_config.file_log.log_scheduler_process( log_message='Deleted: {}/{}'.format( delete_counter, df_wave_to_process.shape[0]), process='Update wave records', process_step='delete_inconsistent_wave_records') return delete_counter
def __init__(self, db_stock: StockDatabase, compression_classes=4): print('Initializing FibonacciPredictor') self._db_stock = db_stock self._compression_classes = compression_classes self._labels = [ DC.WAVE_END_FLAG, DC.WAVE_MAX_RETR_TS_PCT, DC.WAVE_MAX_RETR_PCT ] self._access_layer_wave = AccessLayer4Wave(db_stock) self._access_layer_metric = AccessLayer4Metric(db_stock) self._access_layer_prediction = AccessLayerPrediction(db_stock) self._classifier_model_dict = LearningMachineFactory.get_classifier_learning_machine_dict( False) self._regression_model_dict = LearningMachineFactory.get_regression_learning_machine_dict( ) self._df_waves_for_prediction = self.__get_df_waves_for_prediction__() self._x_train = self.__get_x_train__() self._y_train_dict = self.__get_y_train_dict__() self._trained_model_dict = {} self.__fill_trained_model_dict__() self._best_trained_classifier_model_dict = {} self._best_trained_regression_model_dict = {} self.__fill_best_classifier_model_dict__() self.__fill_best_regression_model_dict__()
""" Description: This module contains test cases for access layer methods Author: Josef Sertl Copyright: SERTL Analytics, https://sertl-analytics.com Date: 2018-05-14 """ from pattern_database.stock_access_layer import AccessLayer4Wave, AccessLayer4Stock from pattern_index_configuration import IndexConfiguration from pattern_database.stock_database import StockDatabase from pandas import ExcelWriter from sertl_analytics.constants.pattern_constants import DC, INDICES db_stock = StockDatabase() access_layer_wave = AccessLayer4Wave(db_stock) # access_layer_stock = AccessLayer4Stock(db_stock) # writer = ExcelWriter('PythonExport.xlsx') # # df_return = access_layer_wave.get_wave_data_frame_with_corresponding_daily_wave_data( # period_id=1, days=30) # print(df_return.describe()) # df_return.to_excel(writer,'MultipleWave') # writer.save() index_config = IndexConfiguration(db_stock, [INDICES.CRYPTO_CCY, INDICES.DOW_JONES, INDICES.NASDAQ100]) def get_index_for_symbol(ticker_id: str): print(ticker_id) return index_config.get_index_for_symbol(ticker_id)
def add_wave_end_data_to_wave_records(self, symbol='', ts_start=0, ts_end=0, scheduled_job=False) -> int: """ Some attributes have to be calculated AFTER the waves completes: DC.WAVE_END_FLAG, DC.WAVE_MAX_RETR_PCT, DC.WAVE_MAX_RETR_TS_PCT """ print('Add wave end data to wave records..') access_layer_wave = AccessLayer4Wave(self.db_stock) access_layer_stocks = AccessLayer4Stock(self.db_stock) df_wave_to_process = access_layer_wave.get_wave_data_frame_without_end_data( symbol, ts_start, ts_end) ts_start_stocks = ts_start if ts_end == 0 else ts_end df_stocks = access_layer_stocks.get_stocks_data_frame_for_wave_completing( symbol=symbol, ts_start=ts_start_stocks) tolerance = 0.01 update_counter = 0 ts_now = MyDate.time_stamp_now() for wave_index, wave_row in df_wave_to_process.iterrows(): wave_entity = WaveEntity(wave_row) ts_start, ts_end = wave_entity.get_ts_start_end_for_check_period() ts_start_dt = MyDate.get_date_from_epoch_seconds(ts_start) ts_end_dt = MyDate.get_date_from_epoch_seconds(ts_end) wave_end_value, wave_value_range = wave_entity.wave_end_value, wave_entity.wave_value_range if ts_end < ts_now: wave_end_reached = 1 max_retracement = 0 max_retracement_ts = ts_start max_retracement_dt = ts_start_dt df_filtered_stocks = df_stocks[np.logical_and( df_stocks[DC.SYMBOL] == wave_entity.symbol, np.logical_and(df_stocks[DC.TIMESTAMP] > ts_start, df_stocks[DC.TIMESTAMP] <= ts_end))] for index_stocks, row_stocks in df_filtered_stocks.iterrows(): row_low, row_high, row_ts = row_stocks[DC.LOW], row_stocks[ DC.HIGH], row_stocks[DC.TIMESTAMP] if wave_entity.wave_type == FD.ASC: if wave_end_value < row_high * (1 - tolerance): wave_end_reached = 0 break else: retracement = wave_end_value - row_low else: if wave_end_value > row_low * (1 + tolerance): wave_end_reached = 0 break else: retracement = row_high - wave_end_value if retracement > max_retracement: max_retracement = round(retracement, 2) max_retracement_ts = row_ts max_retracement_dt = row_stocks[DC.DATE] max_retracement_pct = round( max_retracement / wave_value_range * 100, 2) max_retracement_ts_pct = round( (max_retracement_ts - ts_start) / wave_entity.wave_ts_range * 100, 2) data_dict = { DC.WAVE_END_FLAG: wave_end_reached, DC.WAVE_MAX_RETR_PCT: max_retracement_pct, DC.WAVE_MAX_RETR_TS_PCT: max_retracement_ts_pct } access_layer_wave.update_record(wave_entity.row_id, data_dict) update_counter += 1 if scheduled_job: self.sys_config.file_log.log_scheduler_process( log_message='Updated: {}/{}'.format( update_counter, df_wave_to_process.shape[0]), process='Update wave records', process_step='add_wave_end_data') return update_counter
""" from pattern_database.stock_database import StockDatabase from pattern_database.stock_access_layer import AccessLayer4Wave from sertl_analytics.constants.pattern_constants import PRD, INDICES, LOGT, LOGDC, DC, PRDC, PSC, WPDT from sertl_analytics.mydates import MyDate import pandas as pd def change_to_date_str(value): return str(value)[:10] writer = pd.ExcelWriter('Wave_Grouped.xlsx', engine='xlsxwriter') db_stock = StockDatabase() access_layer = AccessLayer4Wave(db_stock) print(MyDate.time_stamp_now()) df = access_layer.get_all_as_data_frame() print(MyDate.time_stamp_now()) offset_date = '2018-05-01' df_grouped_direct_daily = access_layer.get_grouped_by_for_wave_peak_plotting(WPDT.DAILY_DATE, 1, offset_date) df_grouped_direct_daily.to_excel(writer, sheet_name='Daily') print(MyDate.time_stamp_now()) df_grouped_direct_intraday = access_layer.get_grouped_by_for_wave_peak_plotting(WPDT.INTRADAY_DATE, 1, offset_date) df_grouped_direct_intraday.to_excel(writer, sheet_name='Intraday Date') df_grouped_direct_intraday = access_layer.get_grouped_by_for_wave_peak_plotting(WPDT.INTRADAY_15_TS, 15) df_grouped_direct_intraday.to_excel(writer, sheet_name='INTRADAY_15_TS') df_grouped_direct_intraday = access_layer.get_grouped_by_for_wave_peak_plotting(WPDT.INTRADAY_30_TS, 30) df_grouped_direct_intraday.to_excel(writer, sheet_name='INTRADAY_30_TS') writer.save()