예제 #1
0
    def __init__(self,
                 buy_type='current_day_open_price',
                 min_risk=0.01,
                 max_risk=0.15,
                 gain_loss_ration=3,
                 max_days_per_operation=90):

        if buy_type not in ['current_day_open_price', 'last_day_close_price']:
            raise Exception("'buy_type' parameter options: " \
                "'current_day_open_price', 'last_day_close_price'.")
        self._buy_type = buy_type

        self._min_risk = min_risk
        self._max_risk = max_risk
        self._gain_loss_ratio = gain_loss_ration
        self._max_days_per_operation = max_days_per_operation
        self.out_file_path_prefix = Path(__file__).parent / "out_csv_files"

        self._risk_step = 0.002
        self._risk_thresholds = [round(i, 3) for i in \
            np.arange(self.min_risk, self.max_risk + self._risk_step, self._risk_step)]

        cfg_path = Path(__file__).parent / "config.json"

        config_reader = cr.ConfigReader(config_file_path=cfg_path)
        self._tickers_and_dates = config_reader.tickers_and_dates
예제 #2
0
    def __init__(self, min_date_filter=None, max_date_filter=None):

        self._min_date_filter = min_date_filter
        self._max_date_filter = max_date_filter

        try:
            if min_date_filter is not None:
                pd.Timestamp(min_date_filter)
            if max_date_filter is not None:
                pd.Timestamp(max_date_filter)
        except Exception as error:
            logger.error("\'min_date_filter\' and \'max_date_filter\' " \
                "formats are \'YYYY-MM-DD\', error:\n{}".format(error))
            sys.exit(c.MODEL_CREATION_ERR)

        cfg_path = Path(__file__).parent / 'config.json'
        config_reader = cr.ConfigReader(config_file_path=cfg_path)

        self._tickers_and_dates = config_reader.tickers_and_dates
        self.datasets_path_prefix = Path(__file__).parent / 'datasets'
        self.models_path_prefix = Path(__file__).parent / 'models'

        self.supported_models = ('KNeighborsClassifier',
                                 'RandomForestClassifier')
        self.models_folder = ('kneighbors_classifier',
                              'random_forest_classifier')
        self.model_type_folder = 'ticker_oriented_models'

        self.feature_columns = [
            'risk', 'peak_1', 'day_1', 'peak_2', 'day_2', 'peak_3', 'day_3',
            'peak_4', 'day_4', 'ema_17_day', 'ema_72_day', 'ema_72_week'
        ]
예제 #3
0
    def __init__(self,
                 buy_type='current_day_open_price',
                 gain_loss_ratio=3,
                 peaks_pairs_number=2,
                 risk_option='fixed',
                 fixed_risk=0.03,
                 start_range_risk=0.01,
                 step_range_risk=0.002,
                 end_range_risk=0.12,
                 max_days_per_operation=45):

        try:
            if buy_type not in ('current_day_open_price',
                                'last_day_close_price'):
                raise Exception(
                    "'buy_type' parameter options: 'current_day_open_price', 'last_day_close_price'."
                )

            if risk_option not in ('fixed', 'range'):
                raise Exception(
                    "'risk_option' parameter options: 'fixed', 'range'.")

            self._buy_type = buy_type
            self._gain_loss_ratio = gain_loss_ratio

            self._risk_option = risk_option
            self._fixed_risk = fixed_risk
            self._risks = []

            if self._risk_option == 'range':
                self._risks = tuple(
                    round(i, 3) for i in tuple(
                        np.arange(start_range_risk, end_range_risk +
                                  step_range_risk, step_range_risk)))

            # Peaks identification variables
            self._peaks_pairs_number = peaks_pairs_number  # Number of past max and min peaks pairs
            self._peak_delay_days = 9

            cfg_path = Path(__file__).parent
            cfg_path = cfg_path / 'config.json'

            cfg = cr.ConfigReader(config_file_path=cfg_path)
            self._tickers_and_dates = cfg.tickers_and_dates

            self._max_days_per_operation = max_days_per_operation

            self.out_file_path_prefix = Path(__file__).parent / "datasets"

        except Exception as error:
            logger.exception(f"Error processing operations, error:\n{error}")
            sys.exit(c.PROCESSING_OPERATIONS_ERR)
예제 #4
0
    def on_job_change(self, job):
        """Called whenever the job combo box is changed"""
        if job is not None:
            self.current_job_path = os.path.join(self.jobs_dir, job)
            self.configReader = config_reader.ConfigReader(self.current_job_path)
            self.path_label.setText(self.current_job_path)

            # check software support for current job
            if self.configReader.check_software_support(self.software):
                self.extensions = self.get_extensions()
                self.populate_profiles()
            else:
                self.clear_window()
                self.current_software_tools.debug_msg("Project does not include " 
                    "support for this software")
        else:
            self.clear_window()
            self.current_software_tools.debug_msg("No jobs in this directory")
예제 #5
0
def update_tickers():
    logger.info('Program started.')

    # Read Config File
    config_file = Path(__file__).parent.parent / c.CONFIG_PATH / 'config.json'
    config = cr.ConfigReader(config_file)
    ticker_managers = []

    # Create TickerManager objects to update and process then
    for ticker, date in config.tickers_and_dates.items():
        ticker_managers.append(
            TickerManager(ticker, date['start_date'], date['end_date']))

    ticker_managers.append(
        TickerManager('^BVSP',
                      config.min_start_date,
                      config.max_end_date,
                      ordinary_ticker=False))  # IBOVESPA Index
    ticker_managers.append(
        TickerManager('BRL=X',
                      config.min_start_date,
                      config.max_end_date,
                      ordinary_ticker=False))  # USD/BRL

    # Update and generate features
    for tm in ticker_managers:
        tm.holidays = config.holidays
        tm.min_risk = config.min_risk_features
        tm.max_risk = config.max_risk_features

        update_ok = tm.update()
        if update_ok:
            features_ok = tm.generate_features()
            # Remove inconsistent tickers from all strategies
            if features_ok is False:
                for index in range(len(config.strategies)):
                    if tm.ticker in list(
                            config.strategies[index]['tickers'].keys()):
                        config.strategies[index]['tickers'].pop(tm.ticker)

    return config.strategies
예제 #6
0
    def populate_recents(self):
        try:
            recents_list = self.read_local_config()[self.software]
            # If it's not a list, don't try to load
            if not isinstance(recents_list, list):
                return
        except:
            recents_list = []

        recents_str_list = []
        for recent_option in recents_list:
            recent_config_reader = config_reader.ConfigReader(
                os.path.join(recent_option['jobs_dir'], recent_option['job']))
            template_string = recent_config_reader.get_profile_template(
                self.software, recent_option['profile'])
            token_list = recent_config_reader.get_tokens(template_string)
            recent_str = recent_option['job']
            for token in token_list:
                recent_str = recent_str + " / " + recent_option['tokens'][token]
            recents_str_list.append(recent_str)

        self.recents_combo.addItems(recents_str_list)
예제 #7
0
def get_config_reader():
    job_path = os.path.join(get_jobs_dir(), get_job())
    env_config_reader = config_reader.ConfigReader(job_path)
    return env_config_reader
예제 #8
0
 def read_config (self, filereader):
   self.config = config_reader.ConfigReader (filereader)
   self.configMap = self.config.get_map ()
예제 #9
0
 def read_config(self):
     configfilename = os.path.join(os.getenv('HOME'), '.rssdown/config.txt')
     file = open(configfilename, 'r')
     config = config_reader.ConfigReader(file)
     file.close()
     return config
예제 #10
0
    args = parser.parse_args()

    # data dir
    data_dir = args.tiles

    # tiles
    tile_list = os.listdir(data_dir)

    # yaml
    config_file = os.path.dirname(os.path.realpath(__file__)) + os.sep + 'config.yml'

    # geojson
    aoi_file = os.path.dirname(os.path.realpath(__file__)) + os.sep + 'aoi.geojson'

    # extract image metadata
    ard_settings = cfg.ConfigReader(config_file, aoi_file)

    # working directories
    work_dir = "/work"
    output_dir = "/output"
    mosaic_dir = "/output/mosaic"
    average_dir = "/output/average"

    # L1C --> L2A name updates - might be a better solution
    l2a_names = {}

    # PROCESS TILES
    for image_config in ard_settings.image_list:
        input_tile = data_dir + os.sep + image_config.tile_name
        if os.path.isdir(input_tile):
            print('\n----------------------------------------------------------------------\n')
예제 #11
0
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--disable-cache')
# chrome_options.add_argument('--incognito')
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
driver = webdriver.Chrome(chrome_options=chrome_options, executable_path=chrome_driver_path)

hostname = platform.node()

# db = mysql_db.DbConnector("root", "", "response_time")

f = open('report.txt', 'a') # to be deleted in prod
f.write("\n\nRunning the offline version without db..\n\n") # to be deleted in prod

config = config_reader.ConfigReader(config_path)
sources = config.get_urls()

for each in sources:

    driver.get(each['url'])

    navigationStart = driver.execute_script("return window.performance.timing.navigationStart")
    loadEventEnd = driver.execute_script("return window.performance.timing.loadEventEnd")
    requestStart = driver.execute_script("return window.performance.timing.requestStart")
    responseStart = driver.execute_script("return window.performance.timing.responseStart")
    responseEnd = driver.execute_script("return window.performance.timing.responseEnd")
    domLoading = driver.execute_script("return window.performance.timing.domLoading")
    domComplete = driver.execute_script("return window.performance.timing.domComplete")

    pageLoadTime = loadEventEnd - navigationStart
예제 #12
0
                description=
                "Unable to save content due to SQlite3 integrity error. Is the content a duplicate?"
            )
        except Exception as ex:
            logger.exception(ex)
            raise falcon.HTTPInternalServerError()


def auth_tuple_to_middleware(auth_tuple):
    user_loader = lambda username, password: username if (
        username, password) == auth_tuple else None
    auth_backend = BasicAuthBackend(user_loader)
    return FalconAuthMiddleware(auth_backend)


config = config_reader.ConfigReader()
middleware = []

if config.auth:
    middleware.append(auth_tuple_to_middleware(config.auth))

app = falcon.API(middleware=middleware)
# TODO: configure logger instance, add timestamps
logging.basicConfig(format='[%(asctime)s] [%(levelname)s] %(message)s',
                    level=logging.INFO,
                    datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.getLogger()
handler = translation_handler.TranslationHandler()

app.add_route('/content', NewContentRouting())
app.add_route('/content/{source}/{content_id}/', ContentRouting())