from algos.hybrid_algo.hybrid_config import config import argparse import os import pickle import time from pathlib import Path from sqlalchemy import create_engine # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.21 logger = setup_logging("hybrid_algo") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 # etf stock context.shorting_on = False if context.live_trading is False: schedule_function( rebalance,
from PyQt5.QtCore import QUrl from jinja2 import Environment, FileSystemLoader import os from utils.log_utils import setup_logging, get_results_path try: from PyQt5.QtWebKitWidgets import QWebView except: from PyQt5.QtWebEngineWidgets import QWebEngineView as QWebView from PyQt5.QtPrintSupport import QPrinter logger = setup_logging("exporter_logging") results_path = get_results_path() class PdfGenerator(object): def __init__(self, tabs, analysis_data, app): self.analysis_data = analysis_data self.pdf_file_name = 'backtest_report.pdf' self.html_file_name = 'backtest_report.html' self.tabs = tabs self.template_path = os.path.join(os.path.dirname(__file__), "templates") self.jinja_env = Environment( loader=FileSystemLoader(self.template_path)) self.app = app def generate(self): try: data = {} for key, tab in self.tabs.items():
# import custom trading criteria sys.path.append('C:\zipline_algo\long_term_low_risk') from long_term_low_risk.fnTradingCritera import setPandas, linreg, \ fnFilterInsiderTransactions, fnGetSpyReturns # ---------------------------------------------------------------------------------- # -------------------------------------------------------------------------------- # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.21 logger = setup_logging("long_term_low_risk") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.month_start()) def before_trading_start(context, data): context.pipeline_data = pipeline_output('my_pipeline')
from zipline.api import (attach_pipeline, order_target_percent, order_target, pipeline_output, schedule_function) from utils.log_utils import setup_logging from algos.long_term_low_risk_with_daily_SL.ltlr_config import config import argparse import os import pickle import time # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.21 logger = setup_logging("long_term_low_risk_with_daily_SL") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function( rebalance, date_rule=date_rules.month_start() )
from utils.log_utils import setup_logging from algos.lowrisk_algo.lowrisk_config import config import argparse import os import pickle import time from pathlib import Path from sqlalchemy import create_engine # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.25 logger = setup_logging("lowrisk_algo") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 # etf stock context.shorting_on = False if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.month_start())
import logging import json from utils.log_utils import setup_logging import time from main import state_init from main import manager """ TODO: logger module to clean logger code for each file logging level asyncoronize """ logger = logging.getLogger(__name__) logging.getLogger("requests").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) setup_logging(default_path='utils/logging.json') connected = set() async def ws_server(ws, path): connected.add(ws) print('current: ') print(connected) while True: try: in_data = await ws.recv() logging.info(in_data) state_init(in_data)
os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(currentdir) sys.path.insert(0, parentdir) import pandas as pd from strategy import Strategy from zipline.utils.events import date_rules from zipline.api import (order_target_percent, order_target, schedule_function, symbol) from utils.log_utils import setup_logging from algos.beta.beta_config import config import argparse import os import time logger = setup_logging("beta_algo") def initialize(context): # attach_pipeline(make_pipeline(), 'my_pipeline') context.turnover_count = 0 # etf stock context.shorting_on = True context.longStock = symbol('IVV') context.shortStock = symbol('SH') if context.live_trading is False: schedule_function(monthly_rebalance, date_rule=date_rules.month_start())
import boto3 import os from os import environ import sys from utils.log_utils import setup_logging import yaml import logging from datetime import datetime CONFIG_FILE = os.path.join(sys.path[0], './resources/config.yml') setup_logging(CONFIG_FILE) sns = yaml.load(open(CONFIG_FILE), Loader=yaml.BaseLoader)['sns'] sns_arn = environ.get('sns_arn') or sns['arn'] send_notification = eval(environ.get('send_notification')) if environ.get( 'send_notification') else eval(sns['send_notification']) def send_sns_message(subject, message): try: sns = boto3.client('sns') response = sns.publish(TopicArn=sns_arn, Subject=subject, Message=message) logging.debug("SNS response: {}".format( response['ResponseMetadata']['HTTPStatusCode'])) except Exception as ex: logging.error("Error Sending SNS message {}".format(ex)) exit(1)
import datetime from zipline.utils.events import date_rules from zipline.api import (attach_pipeline, order_target_percent, order_target, pipeline_output, schedule_function) from utils.log_utils import setup_logging from algos.long_term_high_risk.lthr_config import config import argparse import pickle import time # stop loss non addition limit set to 5 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.15 logger = setup_logging("long_term_high_risk") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.month_start()) def before_trading_start(context, data): context.pipeline_data = pipeline_output('my_pipeline')
from zipline.api import (attach_pipeline, pipeline_output, schedule_function) from utils.order_controller import (order_target_percent, order_target) from utils.log_utils import setup_logging from utils.algo_utils import get_run_mode from algos.virtual_broker_sample_ltlr_algo.vb_sample_config import config import argparse import pickle import time # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.21 logger = setup_logging("virtual_broker_sample") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') # context.broker = BacktestBroker() context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.month_start()) def before_trading_start(context, data):
# stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 """ The maximum amount of exposure allowed per sector is configured using max_sector_exposure. All the stocks are divided into 11 sectors and -1 for info not available. This limit ensures that a new stocks is not added to the portfolio if the current exposure for that sector exceeds max_sector_exposure * 100 % The sector exposure is updated each day to ensure the exposure change due to price changes is taken into account """ # max exposure per sector set to 15% max_sector_exposure = 0.21 initial_exposure = 0.07 dma = 200 logger = setup_logging("mid_term_low_risk") def initialize(context): """ :param context: global variable used through the backtest for carrying forwarding the parameter values to next day The initialize method is called only once, at the start of the backtest. It initializes all the parameters required for running the backtest :return: None """ # attach_pipeline, attaches the data pipeline from quandl/quantopian attach_pipeline(make_pipeline(), 'my_pipeline') # initializing variables as part of the context variable, so that it can be easily accessed through the backtest # stop_loss_list: A list of recently removed stocks because of stop loss, stocks part of this list are not bought context.stop_loss_list = pd.Series()
logger.info( str(dataset_config.get("base_directory")) + ';' + str(negative_class_subdir) + ';' + str(positive_class_subdir) + ';' + str(elapsed) + ';' + str(stats.compile(pop)) + ';' + str(hof[0]) + ';' + str(negative_class_correctly_classified_stat) + ';' + str(positive_class_correctly_classified_stat) + ';' + str(both_class_correctly_classified_stat)) if __name__ == "__main__": DATASET_CONFIG_FILE = 'config/dataset_config.ini' PARAMETERS_CONFIG_FILE = 'config/parameters_config.ini' DATASET_PROFILE = ["MOTION_TRACKING"] PARAMETERS_PROFILE = ["MOTION_TRACKING_PARAMETERS"] setup_logging() positionGenerator = PositionGenerator() sizeGenerator = SizeGenerator() bcb.prepare_genetic_tree_structure(positionGenerator, sizeGenerator) for (dataset_profile, parameters_profile) in zip(DATASET_PROFILE, PARAMETERS_PROFILE): (dataset_config, parameters_config) = acquire_configuration( DATASET_CONFIG_FILE, PARAMETERS_CONFIG_FILE, dataset_profile, parameters_profile) logger.info( "Starting computations with following parameters configuration: " + str(PARAMETERS_PROFILE)) logger.info(" and following dataset configuration: " + str(DATASET_PROFILE))
from alpha.alpha_config import config import argparse import os import pickle import time # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.21 # months in which quarterly rebalance will execute quarter_months = [1, 4, 7, 10] logger = setup_logging("alpha") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function(stop_loss, date_rule=date_rules.month_start()) schedule_function(rebalance, date_rule=date_rules.month_start()) def before_trading_start(context, data): context.pipeline_data = pipeline_output('my_pipeline')
from utils.log_utils import setup_logging from algos.highrisk_algo.highrisk_config import config import argparse import os import pickle import time from pathlib import Path from sqlalchemy import create_engine # stop loss non addition limit set to 15 days stop_loss_prevention_days = 15 # max exposure per sector set to 15% max_sector_exposure = 0.25 logger = setup_logging("highrisk_algo") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 # etf stock context.shorting_on = False if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.month_start())
import datetime from zipline.utils.events import date_rules from zipline.api import (attach_pipeline, order_target_percent, order_target, pipeline_output, schedule_function) from utils.log_utils import setup_logging from algos.mid_term_high_risk.mthr_config import config import argparse import pickle import time # stop loss non addition limit set to 15 days stop_loss_prevention_days = 25 # max exposure per sector set to 15% max_sector_exposure = 0.17 logger = setup_logging("mid_term_high_risk") def initialize(context): attach_pipeline(make_pipeline(), 'my_pipeline') context.stop_loss_list = pd.Series() context.count = 0 context.sector_wise_exposure = dict() context.sector_stocks = {} context.turnover_count = 0 if context.live_trading is False: schedule_function(rebalance, date_rule=date_rules.week_start()) def before_trading_start(context, data):