def init_tick_key_list_for_retrospection(self, retrospective_ticks: int): offset_time_stamp = MyDate.get_offset_timestamp(days=-retrospective_ticks) self._tick_key_list_for_retrospection = [] while offset_time_stamp < MyDate.time_stamp_now(): offset_time_stamp += self._seconds_unit date_str = str(MyDate.get_date_from_epoch_seconds(offset_time_stamp)) self._tick_key_list_for_retrospection.append(date_str)
def get_status_message(old_message='') -> str: if MyHttpClient.do_we_have_internet_connection(): return 'OK ({})'.format(MyDate.get_time_from_datetime()) else: if old_message.find('NOT') > -1: return old_message return 'NOT ok since {}'.format(MyDate.get_time_from_datetime())
def __init_loop_list_for_ticker__(self): self._loop_list_ticker = LoopList4Dictionaries() if self.sys_config.from_db and self._excel_file_with_test_data is not None: for ind, rows in self._df_test_data.iterrows(): if self._loop_list_ticker.counter >= self._excel_file_with_test_data.row_start: self.sys_config.config.ticker_dic[rows[PSC.TICKER]] = rows[ PSC.NAME] start_date = MyDate.get_date_from_datetime( rows[PSC.BEGIN_PREVIOUS]) date_end = MyDate.get_date_from_datetime( rows[PSC.END] + timedelta(days=rows[PSC.T_NEEDED] + 20)) and_clause = "Date BETWEEN '{}' AND '{}'".format( start_date, date_end) self._loop_list_ticker.append({ LL.TICKER: rows[PSC.TICKER], LL.AND_CLAUSE: and_clause }) if self._loop_list_ticker.counter >= self._excel_file_with_test_data.row_end: break else: for ticker in self.sys_config.ticker_dict: and_clause = self.sys_config.data_provider.and_clause self._loop_list_ticker.append({ LL.TICKER: ticker, LL.AND_CLAUSE: and_clause })
def __init_by_pattern_id__(self, pattern_id_str: str): # Example: 1_1_1_AAPL_12_2015-12-03_00:00_2016-01-07_00:00 parts = pattern_id_str.split('_') self.equity_type_id = int(parts[0]) self.period_id = int(parts[1]) self.period = PRD.get_period(self.period_id) self.aggregation = int(parts[2]) self.ticker_id = parts[3] self.pattern_type_id = int(parts[4]) self.pattern_type = FT.get_pattern_type(self.pattern_type_id) self.range_start_date_str = parts[5] self.range_start_time_str = parts[6] self.range_end_date_str = parts[7] self.range_end_time_str = parts[8] self.date_start = MyDate.get_datetime_object(self.range_start_date_str) self.date_end = MyDate.get_datetime_object(self.range_end_date_str) self.range_length = (self.date_end - self.date_start).days self.range_id = '{}_{}_{}_{}'.format(self.range_start_date_str, self.range_start_time_str, self.range_end_date_str, self.range_end_time_str) self.ts_from = MyDate.get_epoch_seconds_from_datetime(parts[5]) self.ts_to = MyDate.get_epoch_seconds_from_datetime(parts[7]) self.range_length_days = int( (self.ts_to - self.ts_from) / (60 * 60 * 24)) self.and_clause = self.__get_search_and_clause__()
def __run_job__(self): process_step = 'End' self.__init_run_parameters__() print("{}: Thread started at {}...(scheduled: {})".format( self.job_name, MyDate.time_now_str(), self._scheduled_start_time)) if not self._for_test: self._file_log.log_message(self.job_name, process='Scheduler', process_step='Start') try: self.__perform_task__() self._last_run_end_date_time = MyDate.get_datetime_object() except: if self._for_test: print("{}: Error at {}".format(self.job_name, MyDate.time_now_str())) else: self._file_log.log_error() process_step = 'End with error' finally: if not self._for_test: self._file_log.log_message(self.job_name, process='Scheduler', process_step=process_step) self._is_running = False self._job_runtime.stop() self._executor.shutdown(wait=False) self._process.__end_process__() if not self._for_test: self.__write_statistics_to_database__() print("{}: Thread shutdown at {}".format(self.job_name, MyDate.time_now_str())) self.__schedule_next_time__()
def __get_offset_time_stamp__(ticks: int, period: str, aggregation: int): offset_time_stamp = MyDate.get_offset_timestamp_for_period_aggregation( ticks, period, aggregation) if period == PRD.INTRADAY: offset_time_stamp = MyDate.get_time_stamp_rounded_to_previous_hour( offset_time_stamp) return offset_time_stamp
def __get_log_entry_numbers_for_log_type__(self, log_type: str, actual_day=True): today_str = MyDate.get_date_as_string_from_date_time() if log_type not in self._log_data_frame_dict: return 0 df = self._log_data_frame_dict[log_type] if actual_day: if DC.WAVE_END_TS in df.columns: today_ts = MyDate.get_epoch_seconds_for_date( ) - MyDate.get_seconds_for_period(days=1) # minus one day df = df[df[DC.WAVE_END_TS] >= today_ts] # print('max ts = {}, midnight={}'.format(df[DC.WAVE_END_TS].max(), today_ts) elif DC.TS_PATTERN_TICK_LAST in df.columns: today_ts = MyDate.get_epoch_seconds_for_date( ) - MyDate.get_seconds_for_period(days=1) # minus one day df = df[df[DC.TS_PATTERN_TICK_LAST] >= today_ts] elif PRDC.START_DT in df.columns: df = df[df[PRDC.START_DT] == today_str] elif LOGDC.DATE in df.columns: df = df[df[LOGDC.DATE] == today_str] if log_type == LOGT.TRADES: add_number = df[df[LOGDC.PROCESS_STEP] == 'Add'].shape[0] buy_number = df[df[LOGDC.PROCESS_STEP] == 'Buy'].shape[0] return '{}/{}'.format(add_number, buy_number) return df.shape[0]
def __get_graph__(self, ticker_id: str, refresh_interval: int, limit: int = 0): period = self.sys_config.period aggregation = self.sys_config.period_aggregation graph_cache_id = self.sys_config.graph_cache.get_cache_id( ticker_id, period, aggregation, limit) graph = self.sys_config.graph_cache.get_cached_object_by_key( graph_cache_id) if graph is not None: return graph, graph_cache_id if period == PRD.DAILY and self._recommender_table.selected_index != INDICES.CRYPTO_CCY: self.sys_config.data_provider.from_db = True else: self.sys_config.data_provider.from_db = False date_start = MyDate.adjust_by_days(MyDate.get_datetime_object().date(), -limit) and_clause = "Date > '{}'".format(date_start) graph_title = self.sys_config.graph_cache.get_cache_title( ticker_id, period, aggregation, limit) detector = self._pattern_controller.get_detector_for_fibonacci_and_pattern( self.sys_config, ticker_id, and_clause, limit) graph_api = DccGraphApi(graph_cache_id, graph_title) graph_api.ticker_id = ticker_id graph_api.df = detector.pdh.pattern_data.df graph = self.__get_dcc_graph_element__(detector, graph_api) cache_api = self.sys_config.graph_cache.get_cache_object_api( graph_cache_id, graph, period, refresh_interval) self.sys_config.graph_cache.add_cache_object(cache_api) return graph, graph_cache_id
def __print_breakout_details__(print_id: str, breakout_ts: float, time_stamp_since: float): brk_t = MyDate.get_time_from_epoch_seconds(int(breakout_ts)) data_t = MyDate.get_time_from_epoch_seconds(int(time_stamp_since)) print( '{}: breakout since last data update: breakout={}/{}=last_data_update' .format(print_id, brk_t, data_t))
def is_ready(self, last_run_time_stamp: int): if self._is_running or not self._is_active: return False if MyDate.weekday() in self._scheduled_weekdays: now_time_stamp = MyDate.time_stamp_now() start_time_stamp = MyDate.get_epoch_seconds_for_current_day_time(self._scheduled_start_time) return last_run_time_stamp < start_time_stamp <= now_time_stamp return False
def print_order_book(self, prefix=''): if prefix != '': print('\n{}:'.format(prefix)) print('Bids: {}\nBids_price: {}\nBids_amount: {}\nBids_time: {}' \ '\nAsks: {}\nAsks_price: {}\nAsks_amount: {}\nAsks_time: {}'.format( self.bids, self.bids_price, self.bids_amount, MyDate.get_date_time_from_epoch_seconds(self.bids_ts), self.asks, self.asks_price, self.asks_amount, MyDate.get_date_time_from_epoch_seconds(self.asks_ts) ))
def __get_search_and_clause__(self): date_from = MyDate.get_datetime_object(self.range_start_date_str) date_to = MyDate.get_datetime_object(self.range_end_date_str) date_from_adjusted = MyDate.adjust_by_days(date_from, -self.range_length_days) date_to_adjusted = MyDate.adjust_by_days(date_to, self.range_length_days) return "Date BETWEEN '{}' AND '{}'".format(date_from_adjusted, date_to_adjusted)
def init_and_clause(self): if self.period == PRD.INTRADAY: minutes = self.aggregation * self.limit days = int(minutes / (60 * 24)) + 1 dt_start = MyDate.get_date_from_datetime() dt_start = MyDate.adjust_by_days(dt_start, -days) dt_end = MyDate.get_date_from_datetime() dt_end = MyDate.adjust_by_days(dt_end, 1) self._and_clause = self.get_and_clause(dt_start, dt_end)
def update_equity_records(self) -> SalesmanDatabaseUpdateJobResult: result_obj = SalesmanDatabaseUpdateJobResult() access_layer = AccessLayer4Sale(self.db_salesman) dt_today = MyDate.get_date_from_datetime() # dt_today = MyDate.adjust_by_days(dt_today, 40) dt_valid_until = MyDate.adjust_by_days(dt_today, 30) dt_today_str = str(dt_today) dt_valid_until_str = str(dt_valid_until) return result_obj
def __get_date_range_for_index_calculation__(self) -> list: date_range_values = self._access_layer.get_date_range_for_index( self._index) if self._df_index.shape[0] == 0: return date_range_values date_start = self._df_index.iloc[-1][CN.DATE] date_start = MyDate.get_date_str_from_datetime( MyDate.adjust_by_days(date_start, 1)) return [date_start, date_range_values[1]]
def get_cache_object_api(key: str, cache_object: object, period: str, refresh_interval: int) -> MyCacheObjectApi: api = MyCacheObjectApi() api.key = key api.object = cache_object if period == PRD.INTRADAY: api.valid_until_ts = MyDate.time_stamp_now() + refresh_interval else: api.valid_until_ts = MyDate.time_stamp_now() + 10 * 60 * 60 # 10 hours return api
def update_dict_by_prediction_dict(prediction_dict: dict, target_dict: dict): # {DC.WAVE_END_FLAG: [1, 0.78], DC.WAVE_MAX_RETR_PCT: [53, 54.79], DC.WAVE_MAX_RETR_TS_PCT: [40, 25.97]} prefix_pos_dict = {'FC_C_': 0, 'FC_R_': 1} target_dict[DC.FC_TS] = MyDate.time_stamp_now() target_dict[DC.FC_DT] = MyDate.date_time_now_str() for label, value_list in prediction_dict.items(): for prefix, position in prefix_pos_dict.items(): target_dict['{}{}'.format(prefix, label)] = value_list[position]
def __save_balances_to_database__(self): if len(self._balance_saving_times) == 0: # already all done return processed_list = [] ts_now = MyDate.get_epoch_seconds_from_datetime() for ts_saving in self._balance_saving_times: if ts_saving <= ts_now: dt_saving = MyDate.get_date_time_from_epoch_seconds_as_string(ts_saving) self.__save_balances__(ts_saving, dt_saving) processed_list.append(ts_saving) for values in processed_list: self._balance_saving_times.remove(values)
def __print_time_stamps__(tick_end_time_stamp: float, last_refresh_time_stamp: float): flag = tick_end_time_stamp >= last_refresh_time_stamp if not flag: return date_time_end = MyDate.get_date_time_from_epoch_seconds( tick_end_time_stamp) date_time_last_refresh = MyDate.get_date_time_from_epoch_seconds( last_refresh_time_stamp) print( 'was_any_wave_finished_since_time_stamp = {}: tick_end = {} / {} = last_refresh' .format(flag, date_time_end, date_time_last_refresh))
def get_trade_test_api_by_selected_trade_row(row, test_process: str) -> TradeTestApi: api = TradeTestApi() api.trade_id = row[DC.TRADE_ID] if DC.TRADE_ID in row else row[DC.ID] api.pattern_id = row[DC.PATTERN_ID] if DC.PATTERN_ID in row else '' api.test_process = test_process # e.g. TP.TRADE_REPLAY api.pattern_type = row[DC.PATTERN_TYPE] # api.buy_trigger = row[DC.BUY_TRIGGER] api.trade_strategy = row[DC.TRADE_STRATEGY] api.symbol = row[DC.TICKER_ID] api.dt_start = MyDate.adjust_by_days(row[DC.PATTERN_RANGE_BEGIN_DT], -30) api.dt_end = MyDate.adjust_by_days(row[DC.PATTERN_RANGE_END_DT], 30) # we need this correction for a smooth cont. api.and_clause = PatternDataProvider.get_and_clause(api.dt_start, api.dt_end) api.and_clause_unlimited = PatternDataProvider.get_and_clause(api.dt_start) return api
def get_historical_data_by_client(self): self.wrapper.init_historical_data_queue() date_from = MyDate.adjust_by_days(MyDate.get_datetime_object(), -180) date_from_str = MyDate.get_date_time_as_string_from_date_time( date_from, '%Y%m%d %H:%M:%S') print(date_from_str) qqq = Contract() qqq.symbol = 'MMEN' qqq.secType = 'STK' qqq.exchange = 'CSE' qqq.currency = 'CAD' self.reqHistoricalData(4001, qqq, date_from_str, "1 M", "1 day", "MIDPOINT", 1, 1, False, []) return self.wrapper.get_historical_data()
def add_buy_order_status_data_to_pattern_data_dict(self, order_status: OrderStatus, trade_strategy: str): self._data_dict[DC.BUY_ORDER_ID] = order_status.order_id self._data_dict[DC.BUY_ORDER_TPYE] = order_status.type self._data_dict[DC.BUY_ORDER_TPYE_ID] = OT.get_id(order_status.type) self._data_dict[DC.BUY_TIME_STAMP] = int(order_status.time_stamp) self._data_dict[DC.BUY_DT] = MyDate.get_date_from_epoch_seconds(order_status.time_stamp) self._data_dict[DC.BUY_TIME] = str(MyDate.get_time_from_epoch_seconds(order_status.time_stamp)) self._data_dict[DC.BUY_AMOUNT] = order_status.original_amount self._data_dict[DC.BUY_PRICE] = order_status.avg_execution_price self._data_dict[DC.BUY_TOTAL_COSTS] = order_status.value_total self._data_dict[DC.BUY_TRIGGER] = order_status.order_trigger self._data_dict[DC.BUY_TRIGGER_ID] = BT.get_id(order_status.order_trigger) self._data_dict[DC.BUY_COMMENT] = order_status.order_comment self._data_dict[DC.TRADE_STRATEGY] = trade_strategy self._data_dict[DC.TRADE_STRATEGY_ID] = TSTR.get_id(trade_strategy)
def get_balance_as_asset_data_frame(self): if self.balances is None: return None ts = MyDate.get_epoch_seconds_from_datetime() dt_str = MyDate.get_date_time_from_epoch_seconds_as_string(ts) dict_for_data_frame = {} for balance in self.balances: data_dict = AssetDataDictionary( ).get_data_dict_for_target_table_for_balance(balance, ts, dt_str) for key, value in data_dict.items(): if key not in dict_for_data_frame: dict_for_data_frame[key] = [value] else: dict_for_data_frame[key].append(value) return pd.DataFrame.from_dict(dict_for_data_frame)
def __adjust_log_df_to_selected_items__(self): if self._process_column != '' and self._selected_log_process != '': self._log_df = self._log_df[self._log_df[self._process_column] == self._selected_log_process] if self._process_step_column != '' and self._selected_log_process_step != '': self._log_df = self._log_df[self._log_df[self._process_step_column] == self._selected_log_process_step] if self._log_df.shape[0] > 0: if self._selected_date_range != DTRG.ALL: date_obj = MyDate.get_offset_date_for_date_range(self._selected_date_range) offset_ts = MyDate.get_offset_time_stamp_for_date_range(self._selected_date_range) if self._date_column == DC.WAVE_END_TS: self._log_df = self._log_df[self._log_df[self._date_column] >= offset_ts] elif self._date_column == DC.TS_PATTERN_TICK_LAST: self._log_df = self._log_df[self._log_df[self._date_column] >= offset_ts] else: self._log_df = self._log_df[self._log_df[self._date_column] >= str(date_obj)]
def process_optimize_log_files(self): log_types_for_processing = self.__get_log_types_for_process_optimize_log_files__() date_compare = MyDate.get_date_str_from_datetime(MyDate.adjust_by_days(None, -7)) for log_type in log_types_for_processing: file_path = self.get_file_path_for_log_type(log_type) line_to_keep_list = [] with open(file_path, 'r') as file: for line in file.readlines(): log_line = FileLogLine(line) if log_line.is_valid: if log_line.date >= date_compare: line_to_keep_list.append(line) else: print('{}: Line not valid in log file: {}'.format(file_path, line)) MyFile(file_path).replace_file_when_changed(line_to_keep_list)
def get_xy_parameters_for_wave_peak(self, tick: WaveTick, wave_type: str, period: str, aggregation: int): number_waves = tick.get_wave_number_for_wave_type(wave_type) height, head_length, head_width = self.get_arrow_size_parameter_for_wave_peaks( wave_type, number_waves) ts_period = MyDate.get_seconds_for_period(days=1) if period == PRD.DAILY else \ MyDate.get_seconds_for_period(min=aggregation) x = [tick.f_var - ts_period, tick.f_var, tick.f_var + ts_period] if wave_type in [WAVEST.DAILY_ASC, WAVEST.INTRADAY_ASC]: y = [tick.high + height, tick.high, tick.high + height] else: y = [tick.low - height, tick.low, tick.low - height] x.append(x[0]) # close the triangle y.append(y[0]) # close the triangle return list(zip(x, y))
def __init__(self, app: Dash, sys_config: SystemConfiguration, trade_handler_online: PatternTradeHandler): MyPatternDashBaseTab.__init__(self, app, sys_config) self.exchange_config = self.sys_config.exchange_config self._trade_handler_online = trade_handler_online self._df_trade = self.sys_config.db_stock.get_trade_records_for_replay_as_dataframe( ) self._df_trade_for_replay = self._df_trade[ TradeTable.get_columns_for_replay()] self._trade_rows_for_data_table = MyDCC.get_rows_from_df_for_data_table( self._df_trade_for_replay) self._df_pattern = self.sys_config.db_stock.get_pattern_records_for_replay_as_dataframe( ) self._df_pattern_for_replay = self._df_pattern[ PatternTable.get_columns_for_replay()] self._pattern_rows_for_data_table = MyDCC.get_rows_from_df_for_data_table( self._df_pattern_for_replay) self.__init_selected_row__() self.__init_replay_handlers__() self._selected_pattern_trade = None self._selected_buy_trigger = None self._selected_trade_strategy = None self._n_click_restart = 0 self._n_click_cancel_trade = 0 self._n_click_reset = 0 self._replay_speed = 4 self._trades_stored_number = 0 self._trades_online_active_number = 0 self._trades_online_all_number = 0 self._pattern_stored_number = 0 self._cached_trade_table = None self._time_stamp_last_ticker_refresh = MyDate.time_stamp_now() self._check_actual_trades_for_trade_handler_online_n_intervals = -1 self._print_callback_start_details = False
def is_wave_indicator_for_dash(self, period_aggregation: int) -> bool: ts_last_tick = self.w_5.tick_end.time_stamp ts_now = MyDate.get_epoch_seconds_from_datetime() number_ticks = int( 30 / period_aggregation) # we want to be reminded for 1/2 hour # number_ticks = 100 return ts_now - ts_last_tick < period_aggregation * 60 * number_ticks
def test_write_sales_after_checks_to_db(self): sales, sale_master = self.__get_sales_and_master_sale_for_test_run__() self.write_sales_after_checks_to_db(sales, sale_master, enforce_writing=True) self.print_test_results() print( '\nTest: Does the child sale disappear in v_sale when End_Date is set in relation?' ) self.update_sale_relation_end_date(sales[1].sale_id, sale_master.sale_id, MyDate.today_str()) self.print_test_results(['V_SALE', 'SALE_RELATION']) print( '\nTest: Is the relation set back to active after loading again?') self.write_sales_after_checks_to_db(sales, sale_master, enforce_writing=True) self.print_test_results() print('\nTest: Do we get new versions for master and child_01?') sales, sale_master = self.__get_sales_and_master_sale_for_test_run__( ) # we have to start with new ones ... sale_master.set_value(SLDC.TITLE, 'New title for Master') sales[1].set_value(SLDC.TITLE, 'New title for child_02') self.write_sales_after_checks_to_db(sales, sale_master, enforce_writing=True) self.print_test_results()
def get_date_values_as_number_for_date_time_array( date_values: list) -> np.array: number_list = [ MyDate.get_number_for_date_time(date_value) for date_value in date_values ] return np.array(number_list).reshape(-1, 1)