def __init__(self): self.conf = Config() self.dbconn = DbConnection() self.RUNNING = self.conf.get_config('process_params', 'status_running') self.WAITING = self.conf.get_config('process_params', 'status_waiting') self.SUCCESS = self.conf.get_config('process_params', 'status_success') self.ERROR = self.conf.get_config('process_params', 'status_error') self.IsError = False
def __init__(self, param_pct_order_placed, stop_loss_pct): conf = Config() self.param_pct_order_placed = param_pct_order_placed self.API_KEY = conf.get_config('binance', 'api_key') self.API_SECRET = conf.get_config('binance', 'api_secret') self.MAX_DIFF_DATE_HOUR = int( conf.get_config('trading_module_params', 'max_diff_date_hour')) self.stop_loss_pct = stop_loss_pct self.client = Client(self.API_KEY, self.API_SECRET) # lib python-binance self.precision = int(conf.get_config('binance', 'api_amount_precision'))
def __calcul_signals(self): conf = Config() threshold = float(conf.get_config('trading_module_params', 'threshold')) # % for trading_pair, value in self.trading_pairs.items(): df_probs = calcul_signals_for_crypto(self.model, self.X_tests[trading_pair]) self.signals[trading_pair] = df_probs # all signals signal = df_probs.signal_prob > threshold self.all_signals[trading_pair] = df_probs[signal]
def __init__(self): self.conf = Config() # API Key self.API_KEY = self.conf.get_config('cryptocompare_params', 'api_key_cryptocompare') self.HEADERS = { "authorization": "Apikey " + self.API_KEY } # API urls self.URL_COIN_LIST = self.conf.get_config('cryptocompare_params', 'url_coin_list') self.URL_PRICE = self.conf.get_config('cryptocompare_params', 'url_price') self.URL_HIST_PRICE = self.conf.get_config('cryptocompare_params', 'url_hist_price') self.URL_SOCIAL_STATS = self.conf.get_config('cryptocompare_params', 'url_social_stats') self.URL_TRADING_PAIRS = self.conf.get_config('cryptocompare_params', 'url_trading_pairs') self.URL_HISTO_HOUR_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_hour_pair') self.URL_HISTO_DAY_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_day_pair') # DEFAULTS self.CURR = self.conf.get_config('cryptocompare_params', 'default_currency')
def __init__(self): self.conf = Config() dbhost = self.conf.get_config('db', 'dbhost') dbname = self.conf.get_config('db', 'dbname') dbuser = self.conf.get_config('db', 'dbuser') dbpassword = self.conf.get_config('db', 'dbpassword') dbport = str(self.conf.get_config('db', 'dbport')) # manage when not on server AWS EC2 machine_name = socket.gethostname() if machine_name in ['CSA-Server-ML']: # 'DESKTOP-RTOK6M3' dbport = self.conf.get_config('db', 'dbport_ext') try: self.conn = psycopg2.connect(host=dbhost, dbname=dbname, user=dbuser, password=dbpassword, port=dbport) except Exception as e: logging.error("Error : " + str(e) + " - " + "Error while connecting to DB : " + 'postgresql://' + dbuser + ':' + dbpassword + '@' + dbhost + ':' + dbport + '/' + dbname)
from commons.config import Config from commons.utils import utils import logging import pandas.io.sql as psql import pandas as pd from datetime import datetime, timedelta, date from sqlalchemy import create_engine conf = Config() # region Subscribers def calcul_kpi_subscribers_trend(): logging.warning("calcul_kpi_subscribers_trend - start") connection = create_engine(utils.get_connection_string()) # get data with query squery = 'select so.id_cryptocompare, so.reddit_subscribers, so.timestamp from social_stats_reddit_histo so\n' squery += 'inner join coins co on (co.id_cryptocompare = so.id_cryptocompare)\n' squery += 'where so.timestamp > CURRENT_TIMESTAMP - interval \'90 days\';' df = psql.read_sql_query(squery, connection) # set index on column timestamp df.set_index('timestamp', 'id_cryptocompare', inplace=True) # group by crypto df2 = df.groupby('id_cryptocompare')
import os import sys import re import os from commons.config import Config from commons.validations import Validations from commons.database import Database from commons.dns import DNS def error(message): print message sys.exit(-1) if __name__ == "__main__": config = Config() dns = DNS(config.get("domain").get("dns")) db = Database(config.get("database")) validations = Validations(db, config.get("domain")) parser = argparse.ArgumentParser(description="Manage RED redirections with command-line utility") parser.add_argument("action", choices=['add', 'del', 'list', 'check'], help="Action") parser.add_argument("domain", help="Domain name", nargs='?') parser.add_argument("url", nargs='?', help="Destination URL") args = parser.parse_args() domain = args.domain action = args.action if action == 'list': for red in db.get_redirections():
def __init__(self, model, model_term, init_date, end_date, X_tests, close_price, target, thresholds, trading_pairs, cash_asset, trace=True, param_stop_loss_pct=None): conf = Config() self.param_init_amount_cash = float( conf.get_config('backtesting_stragegy_params', 'init_amount_cash')) # $ self.param_fees = float( conf.get_config('backtesting_stragegy_params', 'fees')) # $ self.param_bet_size = float( conf.get_config('trading_module_params', 'bet_size')) # % self.param_min_bet_size = float( conf.get_config('trading_module_params', 'min_bet_size')) # $ self.param_pct_order_placed = float( conf.get_config('trading_module_params', 'pct_order_placed')) # 1% up/down self.param_nb_periods_to_hold_position = int( conf.get_config('trading_module_params', 'nb_periods_to_hold_position')) # 1d self.param_stop_loss_pct = param_stop_loss_pct if self.param_stop_loss_pct is None: self.param_stop_loss_pct = float( conf.get_config('trading_module_params', 'stop_loss_pct')) self.signals = {} self.all_signals = {} self.model = model self.model_term = model_term self.init_date = init_date self.end_date = end_date - timedelta( hours=1 ) # to avoid getting a price unknown at the end of simulation self.X_tests = X_tests self.close_price = close_price self.target = target self.thresholds = thresholds self.trading_pairs = trading_pairs self.cash_asset = cash_asset self.trace = trace self.__calcul_signals() # set init positions init_positions = {self.cash_asset: self.param_init_amount_cash} for key, value in trading_pairs.items(): init_positions[value.base_asset] = 0.0 # trading API (fake one for simulation) trading_api = TradingApiFake(self.param_pct_order_placed, self.param_stop_loss_pct) trading_api.init_from_backtesting_strategy(init_positions, self.param_fees, self.close_price) # trading module self.trading_module = TradingModule( trading_api, self.param_bet_size, self.param_min_bet_size, self.param_pct_order_placed, self.param_nb_periods_to_hold_position, self.trading_pairs, self.cash_asset, self.thresholds, self.trace, self.param_stop_loss_pct)
def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params_new', 'url_prices') self.API_KEY = self.conf.get_config('cmc_params_new', 'api_key')
def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params', 'url_prices') self.URL_GLOBAL_DATA = self.conf.get_config('cmc_params', 'url_global_data')
def get_global_dataset_for_crypto(connection, id_cryptocompare_crypto, older_date=None): # ------------------ PRE-PROCESSING : Retrieve data and prepare ------------------ # id_cryptocompare_crypto = str(id_cryptocompare_crypto) conf = Config() id_cryptocompare_tether = str( conf.get_config('cryptocompare_params', 'id_cryptocompare_tether')) id_cryptocompare_bitcoin = str( conf.get_config('cryptocompare_params', 'id_cryptocompare_bitcoin')) if older_date is None: older_date = str( conf.get_config('data_params', 'older_date_to_retrieve')) # -------------------------------- # OHLCV # -------------------------------- df_ohlcv = PreprocLoad.get_dataset_ohlcv(connection, id_cryptocompare_crypto, older_date) df_ohlcv = PreprocPrepare.clean_dataset_ohlcv_spe(df_ohlcv) min_date = df_ohlcv.index.min() df_ohlcv = PreprocPrepare.get_ohlcv_1h_plus_missing_infos( connection, df_ohlcv, id_cryptocompare_crypto, older_date) df_ohlcv_tether = PreprocLoad.get_dataset_ohlcv( connection, id_cryptocompare_tether, older_date) df_ohlcv_tether = PreprocPrepare.clean_dataset_ohlcv_spe( df_ohlcv_tether) df_ohlcv_tether = PreprocPrepare.get_ohlcv_1h_plus_missing_infos( connection, df_ohlcv_tether, id_cryptocompare_tether, older_date) df_ohlcv_bitcoin = PreprocLoad.get_dataset_ohlcv( connection, id_cryptocompare_bitcoin, older_date) df_ohlcv_bitcoin = PreprocPrepare.clean_dataset_ohlcv_spe( df_ohlcv_bitcoin) df_ohlcv_bitcoin = PreprocPrepare.get_ohlcv_1h_plus_missing_infos( connection, df_ohlcv_bitcoin, id_cryptocompare_bitcoin, older_date) #df_ohlcv = PreprocPrepare.add_ohlcv_missing_infos(connection, df_ohlcv, id_cryptocompare_crypto, older_date) # -------------------------------- # REDDIT SUBSCRIBERS # -------------------------------- df_reddit = PreprocLoad.get_dataset_reddit(connection, id_cryptocompare_crypto, older_date) df_reddit = df_reddit[df_reddit.reddit_subscribers.notnull()] df_reddit = PreprocPrepare.do_timestamp_tasks(df_reddit) df_reddit = df_reddit.resample('1H').interpolate() df_reddit['reddit_subscribers'] = df_reddit[ 'reddit_subscribers'].astype(int) # -------------------------------- # ALL CRYPTOS # -------------------------------- df_all_cryptos = PreprocLoad.get_dataset_all_cryptos( connection, older_date) df_all_cryptos = PreprocPrepare.clean_dataset_ohlcv_std( df_all_cryptos, columns_name=['global_volume_usd_1h', 'global_market_cap_usd']) # -------------------------------- # GOOGLE TREND # -------------------------------- # crypto - last month => Need to import and keep old data df_google_trend_crypto_1m = PreprocLoad.get_dataset_google_trend( connection, id_cryptocompare_crypto, '_1m', older_date) df_google_trend_crypto_1m = PreprocPrepare.clean_dataset_google_trend( df_google_trend_crypto_1m) # crypto - 5 years df_google_trend_crypto_5y = PreprocLoad.get_dataset_google_trend( connection, id_cryptocompare_crypto, '', older_date) df_google_trend_crypto_5y = PreprocPrepare.clean_dataset_google_trend( df_google_trend_crypto_5y) # bitcoin - last month df_google_trend_bitcoin_1m = PreprocLoad.get_dataset_google_trend( connection, id_cryptocompare_bitcoin, '_1m', older_date) df_google_trend_bitcoin_1m = PreprocPrepare.clean_dataset_google_trend( df_google_trend_bitcoin_1m) # bitcoin - 5 years df_google_trend_bitcoin_5y = PreprocLoad.get_dataset_google_trend( connection, id_cryptocompare_bitcoin, '', older_date) df_google_trend_bitcoin_5y = PreprocPrepare.clean_dataset_google_trend( df_google_trend_bitcoin_5y) # merge data df_google_trend_crypto_5y = PreprocPrepare.merge_google_trend_data( df_google_trend_crypto_1m, df_google_trend_crypto_5y) df_google_trend_bitcoin_5y = PreprocPrepare.merge_google_trend_data( df_google_trend_bitcoin_1m, df_google_trend_bitcoin_5y) # ------------------ PRE-PROCESSING : Feature engineering ------------------ # df_reddit = PreprocFeatureEngineering.feature_engineering_reddit( df_reddit) df_ohlcv_fe = PreprocFeatureEngineering.feature_engineering_ohlcv( df_ohlcv) df_ohlcv_tether_fe = PreprocFeatureEngineering.feature_engineering_ohlcv( df_ohlcv_tether) df_ohlcv_bitcoin_fe = PreprocFeatureEngineering.feature_engineering_ohlcv( df_ohlcv_bitcoin) df_technical_analysis = PreprocFeatureEngineering.feature_engineering_technical_analysis( df_ohlcv) df_all_cryptos = PreprocFeatureEngineering.feature_engineering_ohlcv_all_cryptos( df_all_cryptos) df_google_trend_crypto_5y = PreprocFeatureEngineering.feature_engineering_google_trend( df_google_trend_crypto_5y, 'y') df_google_trend_bitcoin_5y = PreprocFeatureEngineering.feature_engineering_google_trend( df_google_trend_bitcoin_5y, 'y') # Join dfs df_ohlcv_fe = df_ohlcv_fe.join(df_ohlcv_tether_fe, rsuffix='_tether') df_ohlcv_fe = df_ohlcv_fe.join(df_ohlcv_bitcoin_fe, rsuffix='_bitcoin') df_global = df_ohlcv_fe.join(df_technical_analysis) df_global = df_global.join(df_reddit) df_global = df_global.join(df_all_cryptos) df_global = df_global.join(df_google_trend_crypto_5y, rsuffix='_crypto_5y') df_global = df_global.join(df_google_trend_bitcoin_5y, rsuffix='_bitcoin_5y') df_global.resample('1H').interpolate() df_global.reddit_subscribers = df_global.reddit_subscribers.interpolate( method='linear', limit_area='outside') # remove data added only to be able to calcul indicators, etc. => we don't want to take it into account df_global = df_global[min_date:df_global.index.max()] # remove 24 first hours (some things can't be extrapolated well) df_global = df_global.iloc[24:] df_global = df_global.interpolate(method='nearest', axis=0).ffill() # drop na if exist df_final = df_global.dropna(axis='rows') diff = df_global.shape[0] - df_final.shape[0] if diff > 0: print(str(diff) + ' rows containing Nan dropped') # index with id_crypto + date df_final['id_cryptocompare'] = id_cryptocompare_crypto df_final.reset_index(drop=False, inplace=True) df_final.set_index(['timestamp', 'id_cryptocompare'], inplace=True) return df_final