def _init_transactions_log_csv_file( self) -> Tuple[Optional[TextIOWrapper], Optional[csv.writer]]: """ Creates a new csv file for every backtest run, writes the header and returns the file handler and writer object """ if self._monitor_settings.issue_transaction_log: output_dir = path.join(get_starting_dir_abs_path(), self._settings.output_directory, self._report_dir) if not path.exists(output_dir): makedirs(output_dir) csv_filename = "%Y_%m_%d-%H%M Transactions.csv" csv_filename = datetime.now().strftime(csv_filename) file_path = path.expanduser(path.join(output_dir, csv_filename)) # Write new file header fieldnames = [ "Timestamp", "Contract symbol", "Security type", "Exchange", "Contract size", "Quantity", "Price", "Commission" ] file_handler = open(file_path, 'a', newline='') writer = csv.DictWriter(file_handler, fieldnames=fieldnames) writer.writeheader() csv_writer = csv.writer(file_handler) return file_handler, csv_writer return None, None
def _inner_setup_logging(logger, level, console_logging, log_dir, log_file_base_name): formatter = logging.Formatter( fmt='%(asctime)s %(levelname)s [%(name)s]: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') logger.setLevel(level) # If not already exists a streamhandler, add one if not any( isinstance(handle, logging.StreamHandler) for handle in logger.handlers): # config logging to console (stdout) if console_logging: stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setFormatter(formatter) logger.addHandler(stream_handler) # config logging to file if log_dir is not None: abs_log_dir = join(get_starting_dir_abs_path(), log_dir) log_file = get_formatted_filename(log_file_base_name, datetime.now(), "txt") if not exists(abs_log_dir): makedirs(abs_log_dir) # If not already exists a filehandler, add one if not any( isinstance(handle, logging.FileHandler) for handle in logger.handlers): file_handler = logging.FileHandler(join(abs_log_dir, log_file)) file_handler.setFormatter(formatter) logger.addHandler(file_handler)
def _issue_config_log(self): if self._monitor_settings.issue_config_log: filename = "%Y_%m_%d-%H%M Config.yml" filename = datetime.now().strftime(filename) output_dir = path.join(get_starting_dir_abs_path(), self._settings.output_directory, self._report_dir) file_path = path.join(output_dir, filename) with open(file_path, "w") as file: ConfigExporter.print_config(file)
def _get_demo_data_provider(frequency: Frequency): frequency_to_data_file = { Frequency.MIN_1: "intraday_data.csv", Frequency.DAILY: "daily_data.csv" } input_file = join(get_starting_dir_abs_path(), "input", frequency_to_data_file[frequency]) data, start_date, end_date = _acquire_data(input_file) return PresetDataProvider(data, start_date, end_date, frequency)
def export_container( self, container: Union[Series, DataFrame], file_path: str, write_mode: WriteMode = WriteMode.CREATE_IF_DOESNT_EXIST, starting_cell: str = 'A1', sheet_name: str = None, include_index: bool = True, include_column_names: bool = False, remove_old_file=False) -> Union[bytes, str]: """ Exports the container (Series, DataFrame) to the excel file. Returns the absolute file path of the exported file. Parameters ---------- container container with data to be exported file_path path (relative to the output root directory) to the file to which data should be exported write_mode mode in which the file should be opened; default: WriteMode.CREATE_IF_DOESNT_EXIST starting_cell the address of the cell which should be the top left corner of the exporter container default: 'A1' sheet_name the name of the sheet to which the container should be exported. If a sheet of this name doesn't exist it will be created. If it does: it will be edited (but not cleared). If no sheet_name is specified, then the currently active one will be picked include_index determines whether the index should be written together with the data. include_column_names determines whether the column names should be written together with the data. For series containers the column names are always "Index" and "Values". remove_old_file if true it first deletes the old file before creating new """ starting_row, starting_column = row_and_column(starting_cell) file_path = join(get_starting_dir_abs_path(), self.settings.output_directory, file_path) # Make sure an old version of this file is removed. if remove_old_file and path.exists(file_path): remove(file_path) work_book = self.get_workbook(file_path, write_mode) work_sheet = self.get_worksheet(work_book, sheet_name) self.write_to_worksheet(container, work_sheet, starting_row, starting_column, include_index, include_column_names) work_book.save(file_path) return file_path
def __init__(self, settings: Settings): super().__init__(settings) if hasattr(settings, 'document_css_directory'): self._document_css_dir = join(get_starting_dir_abs_path(), settings.document_css_directory) else: this_dir_abs_path = abspath(dirname(__file__)) self._document_css_dir = join(this_dir_abs_path, self.DEFAULT_CSS_DIR_NAME) self.logger = qf_logger.getChild(self.__class__.__name__)
def add_file_handler(logger: logging.Logger, logging_level, log_dir: str, log_file_base_name: Optional[str] = ""): """ Adds a FileHandler to the logger instance. Important Note: the function only saves the level on the FileHandler, not on the logger. If you set your logger to the level WARNING, then adding FileHandler with logging_level = DEBUG will still include only logs, which severity is >= WARNING. If you want the DEBUG logs to be tracked by the FileHandler call on your logger object: logger.setLevel(logging.DEBUG). Parameters ----------- logger: logging.Logger logger instance logging_level: minimum logging level, above which all logs will be tracked by the FileHandler and saved to the txt file log_dir: str directory in which all the log files should be stored log_file_base_name: str base name of the file. All log files will be of the form "<current time>_<log_file_base_name>.txt" """ abs_log_dir = Path(get_starting_dir_abs_path()) / log_dir abs_log_dir.mkdir(parents=True, exist_ok=True) log_file = get_formatted_filename(log_file_base_name, datetime.now(), "txt") formatter = logging.Formatter( fmt='%(asctime)s %(levelname)s [%(name)s]: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') # If not already exists a FileHandler, add one if not any( isinstance(handle, logging.FileHandler) for handle in logger.handlers): file_logger = logging.FileHandler(abs_log_dir / log_file) file_logger.setFormatter(formatter) file_logger.setLevel(logging_level) logger.addHandler(file_logger)
def _init_csv_file(self, file_name_template: str) -> TextIOWrapper: """ Creates a new csv file for every backtest run, writes the header and returns the path to the file. """ output_dir = path.join(get_starting_dir_abs_path(), self._settings.output_directory, self._report_dir, "trades") if not path.exists(output_dir): makedirs(output_dir) csv_filename = "{}.csv".format(file_name_template) file_path = path.expanduser(path.join(output_dir, csv_filename)) # Write new file header fieldnames = [ "Timestamp", "Contract", "Quantity", "Price", "Commission" ] file_handler = open(file_path, 'a', newline='') writer = csv.DictWriter(file_handler, fieldnames=fieldnames) writer.writeheader() return file_handler
def _add_header(self): logo_path = join(get_starting_dir_abs_path(), self.settings.logo_path) company_name = self.settings.company_name self.document.add_element( PageHeaderElement(logo_path, company_name, self.title))
def __init__(self, settings: Settings): self._output_root_dir = join(get_starting_dir_abs_path(), settings.output_directory)
def __init__(self, settings: Settings): self.smtp_settings = settings.smtp self.templates_path = path.join(get_starting_dir_abs_path(), settings.email_templates_directory) self.logger = qf_logger.getChild(self.__class__.__name__)