Exemple #1
0
 def __init_by_pattern_id__(self, pattern_id_str: str):
     # Example: 1_1_1_AAPL_12_2015-12-03_00:00_2016-01-07_00:00
     parts = pattern_id_str.split('_')
     self.equity_type_id = int(parts[0])
     self.period_id = int(parts[1])
     self.period = PRD.get_period(self.period_id)
     self.aggregation = int(parts[2])
     self.ticker_id = parts[3]
     self.pattern_type_id = int(parts[4])
     self.pattern_type = FT.get_pattern_type(self.pattern_type_id)
     self.range_start_date_str = parts[5]
     self.range_start_time_str = parts[6]
     self.range_end_date_str = parts[7]
     self.range_end_time_str = parts[8]
     self.date_start = MyDate.get_datetime_object(self.range_start_date_str)
     self.date_end = MyDate.get_datetime_object(self.range_end_date_str)
     self.range_length = (self.date_end - self.date_start).days
     self.range_id = '{}_{}_{}_{}'.format(self.range_start_date_str,
                                          self.range_start_time_str,
                                          self.range_end_date_str,
                                          self.range_end_time_str)
     self.ts_from = MyDate.get_epoch_seconds_from_datetime(parts[5])
     self.ts_to = MyDate.get_epoch_seconds_from_datetime(parts[7])
     self.range_length_days = int(
         (self.ts_to - self.ts_from) / (60 * 60 * 24))
     self.and_clause = self.__get_search_and_clause__()
Exemple #2
0
 def __get_search_and_clause__(self):
     date_from = MyDate.get_datetime_object(self.range_start_date_str)
     date_to = MyDate.get_datetime_object(self.range_end_date_str)
     date_from_adjusted = MyDate.adjust_by_days(date_from,
                                                -self.range_length_days)
     date_to_adjusted = MyDate.adjust_by_days(date_to,
                                              self.range_length_days)
     return "Date BETWEEN '{}' AND '{}'".format(date_from_adjusted,
                                                date_to_adjusted)
 def __run_job__(self):
     process_step = 'End'
     self.__init_run_parameters__()
     print("{}: Thread started at {}...(scheduled: {})".format(
         self.job_name, MyDate.time_now_str(), self._scheduled_start_time))
     if not self._for_test:
         self._file_log.log_message(self.job_name, process='Scheduler', process_step='Start')
     try:
         self.__perform_task__()
         self._last_run_end_date_time = MyDate.get_datetime_object()
     except:
         if self._for_test:
             print("{}: Error at {}".format(self.job_name, MyDate.time_now_str()))
         else:
             self._file_log.log_error()
             process_step = 'End with error'
     finally:
         if not self._for_test:
             self._file_log.log_message(self.job_name, process='Scheduler', process_step=process_step)
         self._is_running = False
         self._job_runtime.stop()
         self._executor.shutdown(wait=False)
         self._process.__end_process__()
         if not self._for_test:
             self.__write_statistics_to_database__()
         print("{}: Thread shutdown at {}".format(self.job_name, MyDate.time_now_str()))
         self.__schedule_next_time__()
    def __get_graph__(self,
                      ticker_id: str,
                      refresh_interval: int,
                      limit: int = 0):
        period = self.sys_config.period
        aggregation = self.sys_config.period_aggregation
        graph_cache_id = self.sys_config.graph_cache.get_cache_id(
            ticker_id, period, aggregation, limit)
        graph = self.sys_config.graph_cache.get_cached_object_by_key(
            graph_cache_id)
        if graph is not None:
            return graph, graph_cache_id

        if period == PRD.DAILY and self._recommender_table.selected_index != INDICES.CRYPTO_CCY:
            self.sys_config.data_provider.from_db = True
        else:
            self.sys_config.data_provider.from_db = False
        date_start = MyDate.adjust_by_days(MyDate.get_datetime_object().date(),
                                           -limit)
        and_clause = "Date > '{}'".format(date_start)
        graph_title = self.sys_config.graph_cache.get_cache_title(
            ticker_id, period, aggregation, limit)
        detector = self._pattern_controller.get_detector_for_fibonacci_and_pattern(
            self.sys_config, ticker_id, and_clause, limit)
        graph_api = DccGraphApi(graph_cache_id, graph_title)
        graph_api.ticker_id = ticker_id
        graph_api.df = detector.pdh.pattern_data.df
        graph = self.__get_dcc_graph_element__(detector, graph_api)
        cache_api = self.sys_config.graph_cache.get_cache_object_api(
            graph_cache_id, graph, period, refresh_interval)
        self.sys_config.graph_cache.add_cache_object(cache_api)
        return graph, graph_cache_id
 def update_wave_records_for_daily_period(self, ticker_id: str, limit: int,
                                          last_days: int):
     self.sys_config.config.save_wave_data = True
     self.sys_config.data_provider.period = PRD.DAILY
     self.sys_config.data_provider.from_db = True
     for k in range(0, last_days + 1):
         date_end = MyDate.adjust_by_days(
             MyDate.get_datetime_object().date(), -k)
         date_start = MyDate.adjust_by_days(
             MyDate.get_datetime_object().date(), -limit - k)
         and_clause = "Date > '{}' AND Date <= '{}'".format(
             date_start, date_end)
         print('update_wave_records_for_daily_period: {} for {}'.format(
             ticker_id, and_clause))
         if self.sys_config.db_stock.is_symbol_loaded(
                 ticker_id, and_clause=and_clause):
             detector = self.pattern_controller.get_detector_for_fibonacci(
                 self.sys_config, ticker_id, and_clause, limit)
             detector.save_wave_data()
         else:
             print('No data available for {} and {}'.format(
                 ticker_id, and_clause))
 def get_historical_data_by_client(self):
     self.wrapper.init_historical_data_queue()
     date_from = MyDate.adjust_by_days(MyDate.get_datetime_object(), -180)
     date_from_str = MyDate.get_date_time_as_string_from_date_time(
         date_from, '%Y%m%d %H:%M:%S')
     print(date_from_str)
     qqq = Contract()
     qqq.symbol = 'MMEN'
     qqq.secType = 'STK'
     qqq.exchange = 'CSE'
     qqq.currency = 'CAD'
     self.reqHistoricalData(4001, qqq, date_from_str, "1 M", "1 day",
                            "MIDPOINT", 1, 1, False, [])
     return self.wrapper.get_historical_data()
 def __add_calculated_graph_to_cache__(self, graph_cache_id: str,
                                       ticker_id: str, period: str,
                                       aggregation: int, limit: int,
                                       indicator: str,
                                       refresh_interval: int):
     date_start = MyDate.adjust_by_days(MyDate.get_datetime_object().date(),
                                        -limit)
     and_clause = "Date > '{}'".format(date_start)
     graph_title = self.sys_config.graph_cache.get_cache_title(
         ticker_id, period, aggregation, limit)
     detector = self._pattern_controller.get_detector_for_fibonacci_and_pattern(
         self.sys_config, ticker_id, and_clause, limit)
     graph_api = DccGraphApi(graph_cache_id, graph_title)
     graph_api.ticker_id = ticker_id
     graph_api.indicator = None if indicator == INDI.NONE else indicator
     graph_api.df = detector.pdh.pattern_data.df
     graph = self.__get_dcc_graph_element__(detector, graph_api)
     cache_api = self.sys_config.graph_cache.get_cache_object_api(
         graph_cache_id, graph, period, refresh_interval)
     self.sys_config.graph_cache.add_cache_object(cache_api)
     return graph
Exemple #8
0
 def __get_start_time_for_testing__():
     dt_now = MyDate.get_datetime_object()
     dt_now = MyDate.adjust_by_seconds(dt_now, 10)
     return str(dt_now.time())[:8]
 def __init_run_parameters__(self):
     self._last_run_start_date_time = MyDate.get_datetime_object()
     self._last_run_end_date_time = None
     self._last_run_runtime_seconds = 0
     self._last_run_processed_details = ''
Author: Josef Sertl
Copyright: SERTL Analytics, https://sertl-analytics.com
Date: 2019-01-23
"""

from sertl_analytics.constants.pattern_constants import PRD, PPR
from pattern_process_manager import PatternProcessManager, PatternProcess
from pattern_scheduling.pattern_job import MyPatternJob, MySecondJob
from pattern_scheduling.pattern_scheduler import MyPatternScheduler
from pattern_dash.my_dash_job_handler import MyDashJobHandler
from time import sleep
from sertl_analytics.mydates import MyDate

process_manager = PatternProcessManager()
scheduler_run_interval_sec = 10
dt_now = MyDate.get_datetime_object()
dt_start_01 = MyDate.adjust_by_seconds(dt_now, 10)
dt_start_02 = MyDate.adjust_by_seconds(dt_start_01, scheduler_run_interval_sec)
start_time_01 = str(dt_start_01.time())[:8]
start_time_02 = str(dt_start_02.time())[:8]
# start_time_list = [start_time_01, start_time_02]
start_time_list = [start_time_02]
weekday_list = [0, 1, 2, 3, 4, 5, 6]

undefined_process = process_manager.get_process_by_name(
    PPR.RUN_UNDEFINED_PROCESS)
update_trade_process = process_manager.get_process_by_name(
    PPR.UPDATE_TRADE_RECORDS)


@undefined_process.process_decorator