def __init__(self): self.conf = Config() self.dbconn = DbConnection() self.RUNNING = self.conf.get_config('process_params', 'status_running') self.WAITING = self.conf.get_config('process_params', 'status_waiting') self.SUCCESS = self.conf.get_config('process_params', 'status_success') self.ERROR = self.conf.get_config('process_params', 'status_error') self.IsError = False
class DbConnection: conn = None conf = None def __init__(self): self.conf = Config() dbhost = self.conf.get_config('db', 'dbhost') dbname = self.conf.get_config('db', 'dbname') dbuser = self.conf.get_config('db', 'dbuser') dbpassword = self.conf.get_config('db', 'dbpassword') dbport = str(self.conf.get_config('db', 'dbport')) # manage when not on server AWS EC2 machine_name = socket.gethostname() if machine_name in ['CSA-Server-ML']: # 'DESKTOP-RTOK6M3' dbport = self.conf.get_config('db', 'dbport_ext') try: self.conn = psycopg2.connect(host=dbhost, dbname=dbname, user=dbuser, password=dbpassword, port=dbport) except Exception as e: logging.error("Error : " + str(e) + " - " + "Error while connecting to DB : " + 'postgresql://' + dbuser + ':' + dbpassword + '@' + dbhost + ':' + dbport + '/' + dbname) def get_query_result(self, query): cursor = self.conn.cursor() try: cursor.execute(query) except Exception as e: logging.error("Error : " + str(e) + " - " + "Error while executing query : " + query) return None rows = cursor.fetchall() cursor.close() return rows def exexute_query(self, query): cursor = self.conn.cursor() try: cursor.execute(query) self.conn.commit() cursor.close() return 0 except Exception as e: logging.error("Error : " + str(e) + " - " + "Error while executing query : " + query) raise def __del__(self): self.conn.close()
class CoinMarketCap: conf = None URL_PRICE_LIST = None URL_GLOBAL_DATA = None def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params', 'url_prices') self.URL_GLOBAL_DATA = self.conf.get_config('cmc_params', 'url_global_data') # region Get prices def get_price_list(self, format_response=False): response = self.query_coinmarketcap(self.URL_PRICE_LIST, False) if format_response: return list(response.keys()) else: return response # endregion # region Get global data def get_global_data(self, format_response=False): response = self.query_coinmarketcap(self.URL_GLOBAL_DATA, False) if format_response: return list(response.keys()) else: return response # endregion # region Utils @staticmethod def query_coinmarketcap(url, error_check=True): try: response = requests.get(url).json() except Exception as e: logging.error("Error getting prices information from CMC. " + str(e)) return None if error_check and 'Response' in response.keys(): logging.warning(response['Message']) return None return response @staticmethod def format_parameter(parameter): if isinstance(parameter, list): return ','.join(parameter) else: return parameter
def __calcul_signals(self): conf = Config() threshold = float(conf.get_config('trading_module_params', 'threshold')) # % for trading_pair, value in self.trading_pairs.items(): df_probs = calcul_signals_for_crypto(self.model, self.X_tests[trading_pair]) self.signals[trading_pair] = df_probs # all signals signal = df_probs.signal_prob > threshold self.all_signals[trading_pair] = df_probs[signal]
def __init__(self): self.conf = Config() # API Key self.API_KEY = self.conf.get_config('cryptocompare_params', 'api_key_cryptocompare') self.HEADERS = { "authorization": "Apikey " + self.API_KEY } # API urls self.URL_COIN_LIST = self.conf.get_config('cryptocompare_params', 'url_coin_list') self.URL_PRICE = self.conf.get_config('cryptocompare_params', 'url_price') self.URL_HIST_PRICE = self.conf.get_config('cryptocompare_params', 'url_hist_price') self.URL_SOCIAL_STATS = self.conf.get_config('cryptocompare_params', 'url_social_stats') self.URL_TRADING_PAIRS = self.conf.get_config('cryptocompare_params', 'url_trading_pairs') self.URL_HISTO_HOUR_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_hour_pair') self.URL_HISTO_DAY_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_day_pair') # DEFAULTS self.CURR = self.conf.get_config('cryptocompare_params', 'default_currency')
def check_sensors(self): """ Sprawdza czy zarejestrowane sensory są nadal aktywne. """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("SELECT sensorID, address, port FROM Sensors") rows = cursor.fetchall() except Exception, e: print e
def __init__(self): self.conf = Config() dbhost = self.conf.get_config('db', 'dbhost') dbname = self.conf.get_config('db', 'dbname') dbuser = self.conf.get_config('db', 'dbuser') dbpassword = self.conf.get_config('db', 'dbpassword') dbport = str(self.conf.get_config('db', 'dbport')) # manage when not on server AWS EC2 machine_name = socket.gethostname() if machine_name in ['CSA-Server-ML']: # 'DESKTOP-RTOK6M3' dbport = self.conf.get_config('db', 'dbport_ext') try: self.conn = psycopg2.connect(host=dbhost, dbname=dbname, user=dbuser, password=dbpassword, port=dbport) except Exception as e: logging.error("Error : " + str(e) + " - " + "Error while connecting to DB : " + 'postgresql://' + dbuser + ':' + dbpassword + '@' + dbhost + ':' + dbport + '/' + dbname)
class CoinMarketCapNew: conf = None URL_PRICE_LIST = None API_KEY = None def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params_new', 'url_prices') self.API_KEY = self.conf.get_config('cmc_params_new', 'api_key') # region Get prices def get_price_list(self): return self.get_dataframe_from_response(self.URL_PRICE_LIST) def get_dataframe_from_response(self, url): df = pd.DataFrame() try: headers = { 'Accept': 'application/json', 'Accept-Encoding': 'deflate, gzip', 'X-CMC_PRO_API_KEY': self.API_KEY, } r = requests.get(url, headers=headers) if r.status_code == 200: response = json.loads(r.text) df = pd.DataFrame(response['data']) else: logging.error("Error getting prices information from CMC. Response status_code= " + str(r.status_code)) logging.error("Message= " + str(r.text)) except Exception as e: logging.error("Error getting prices information from CMC. " + str(e)) return df # endregion
def m_register(self, address, port, uuid): """ Dodaje informacje o nowym monitorze do katalogu.\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() # cursor.execute("INSERT INTO Monitors(address, port, uuid) VALUES(SUBSTRING_INDEX((SELECT host FROM information_schema.processlist WHERE ID=CONNECTION_ID()), ':', 1), %s, %s)", (self.port, MonitorHTTP.monitor.get_id())) cursor.execute("INSERT INTO Monitors(address, port, uuid) VALUES(%s, %s, %s)", (address, port, uuid)) db.commit() except Exception, e: print "Database Error: %s"%e
def del_sensor(self, sid): """ Usuwa sensor z bazy danych.\n sid - numer ID sensora.\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("DELETE FROM Sensors WHERE sensorID = %s", (sid,)) db.commit() except Exception, e: print e
def del_sensor(self, host, port, uuid): """ Usuwa sensor z bazy danych.\n sid - numer ID sensora.\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("DELETE FROM Sensors WHERE address = %s AND port = %s AND monitorUUID = %s", (host, port, uuid)) db.commit() except Exception, e: print e
def __init__(self, param_pct_order_placed, stop_loss_pct): conf = Config() self.param_pct_order_placed = param_pct_order_placed self.API_KEY = conf.get_config('binance', 'api_key') self.API_SECRET = conf.get_config('binance', 'api_secret') self.MAX_DIFF_DATE_HOUR = int( conf.get_config('trading_module_params', 'max_diff_date_hour')) self.stop_loss_pct = stop_loss_pct self.client = Client(self.API_KEY, self.API_SECRET) # lib python-binance self.precision = int(conf.get_config('binance', 'api_amount_precision'))
def del_monitor(self, mid, uuid): """ Usuwa monitor z bazy danych.\n mid - numer ID monitora.\n uuid - UUID monitora.\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("DELETE FROM Sensors WHERE monitorUUID = %s", (uuid,)) cursor.execute("DELETE FROM Monitors WHERE monitorID = %s", (mid,)) db.commit() except Exception, e: print e
def get_monitors(self): """ Zwraca pełną informację o wszystkich monitorach znajdujących się w bazie danych.\n """ db = None monitors = [] try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("SELECT * FROM Monitors") for row in cursor.fetchall(): monitor = {"monitorID" : str(row[0]), "address" : str(row[1]), "port" : str(row[2])} monitors.append(monitor) except Exception, e: print e
def m_register(self, address, port, uuid): """ Dodaje informacje o nowym monitorze do katalogu.\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() #Usuniecie ewentualnych duplikatow cursor.execute("SELECT monitorID, uuid FROM Monitors WHERE address = %s AND port = %s", (address, port)) for row in cursor.fetchall(): cursor.execute("DELETE FROM Monitors WHERE monitorID = %s", (row[0],)) cursor.execute("DELETE FROM Sensors WHERE monitorUUID = %s", (row[1],)) # cursor.execute("INSERT INTO Monitors(address, port, uuid) VALUES(SUBSTRING_INDEX((SELECT host FROM information_schema.processlist WHERE ID=CONNECTION_ID()), ':', 1), %s, %s)", (self.port, MonitorHTTP.monitor.get_id())) cursor.execute("INSERT INTO Monitors(address, port, uuid) VALUES(%s, %s, %s)", (address, port, uuid)) db.commit() except Exception, e: print "Database Error: %s"%e
def s_register(self, uuid, host, port, hostname, cpu = True, ram = True, hdd = True): """ Rejestruje nowy sensor.\n uuid - UUID monitora\n host - adres sensora\n port - port sensora\n hostname - nazwa sensora\n cpu - informuje czy sensor monitoruje obciażenie cpu\n ram - informuje czy sensor monitoruje zużycie ramu\n hdd - informuje czy sensor monitoruje dane o dyskach\n """ db = None try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() cursor.execute("INSERT INTO Sensors(monitorUUID, name, address, port, cpu, ram, hdd) VALUES(%s, %s, %s, %s, %s, %s, %s)", (uuid, hostname, host, port, cpu, ram, hdd)) db.commit() except Exception, e: print e
def get_sensors(self, mid = None): """ Zwraca pełną informację o wszystkich sensorach znajdujących się w bazie danych.\n """ db = None sensors = [] try: db = mysql.connector.Connect(**Config.dbinfo()) cursor = db.cursor() if mid: cursor.execute("SELECT * FROM Sensors WHERE monitorUUID = (SELECT uuid FROM Monitors WHERE monitorID = %s)", (mid,)) else: cursor.execute("SELECT * FROM Sensors") for row in cursor.fetchall(): sensor = {"sensorID" : str(row[0]), "name" : str(row[2]), "address" : str(row[3]), "port" : str(row[4]), "cpu" : str(row[5]), "ram" : str(row[6]), "hdd" : str(row[7])} sensors.append(sensor) except Exception, e: print e
def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params', 'url_prices') self.URL_GLOBAL_DATA = self.conf.get_config('cmc_params', 'url_global_data')
def __init__(self): super().__init__() self.url = 'https://reverse.geocoder.api.here.com/6.2/' self.app_id, self.app_code = Config.get_here_info() self.radius = 10
from routes.deploy import deploy_app from routes.configuration import conf_app from routes.properties import properties_app from routes.api import api_app from commons.config import Config from commons.flask_lsm_auth import LSM import logging import sys import io import time import StringIO import time from ship.logger import ShipLogger from commons.log_emitter import LogEmitter config = Config() webserver_config = config.get('webserver') app = flask.Flask("shipui") app.config['SECRET_KEY'] = webserver_config.get('websockets_secret_key') socketio = SocketIO(app) app.register_blueprint(api_app, url_prefix='/api') app.register_blueprint(deploy_app, url_prefix='/deploy') app.register_blueprint(conf_app, url_prefix='/conf') app.register_blueprint(properties_app, url_prefix='/properties') @app.route("/", methods=["GET"]) def index(): return flask.redirect("/deploy")
from commons.config import Config from slackclient import SlackClient conf = Config() slack_token = conf.get_config('slack', 'slack_api_token') sc = SlackClient(slack_token) def post_message(channel_name, message_content, attachments=''): if attachments == '': sc.api_call("chat.postMessage", channel=channel_name, text=message_content) else: sc.api_call("chat.postMessage", channel=channel_name, text=message_content, attachments=attachments) def post_message_to_bot_alert(message): post_message('bot_alerts', message) def post_message_to_alert_error_import(message): post_message('alert_error_import', message) def post_message_to_alert_error_trading(message): post_message('alert_error_trading', message)
import flask from commons.apps_configuration import AppsConfiguration from commons.config import Config conf_app = flask.Blueprint("conf_app", __name__, template_folder="../templates") config = Config() deploy_config = config.get("deploy") @conf_app.route("/") def page_apps(): apps_conf = AppsConfiguration(deploy_config.get("etcd_environment_url")) conf = apps_conf.get() return flask.render_template("configuration.html", section="conf", conf=conf, user=flask.g.get("user")) @conf_app.route("/<app>", methods=["GET"]) def app_config(app): apps_conf = AppsConfiguration(deploy_config.get("etcd_environment_url")) return apps_conf.serialize_app(app) @conf_app.route("/<app>", methods=["PUT"]) def app_config_save(app): props = flask.request.form.get("props") apps_conf = AppsConfiguration(deploy_config.get("etcd_environment_url")) apps_conf.save_app(app, props) return props
from commons.utils import utils import pandas.io.sql as psql import pandas as pd import numpy as np from sqlalchemy import create_engine from commons.config import Config import matplotlib matplotlib.use('Agg') # Set matplotlib use in backend mode import matplotlib.pyplot as plt import logging from commons.processmanager import ProcessManager import os # Configuration conf = Config() local_images_path = utils.get_path_for_system( conf.get_config('s3_bucket', 'local_generated_images_path_linux'), conf.get_config('s3_bucket', 'local_generated_images_path')) def generate_prices_volumes_images(): logging.warning("generate_prices_volumes_images - start") connection = create_engine(utils.get_connection_string()) # get data with query squery = 'select hi.id_cryptocompare, close_price, hi.volume_aggregated, hi.timestamp from histo_ohlcv hi\n' squery += 'inner join coins co on (co.id_cryptocompare = hi.id_cryptocompare)\n' squery += 'where timestamp > CURRENT_TIMESTAMP - interval \'7 days\'' df = psql.read_sql_query(squery, connection)
import flask from commons.sqlite import Database from commons.nginx import NGINX from commons.config import Config from commons.validations import Validations from commons.dns import DNS from commons.oracle import OracleDatabase config = Config() api_app = flask.Blueprint("api_app", __name__, template_folder="../templates") db = Database(config.get("database")) nginx = NGINX(db, config.get("nginx")) validations = Validations(db, config.get("domain")) oracle_conn = config.get("domain").get("db_conn") dns = DNS(config.get("domain"), OracleDatabase(oracle_conn)) @api_app.route("/api/red/local_domain", methods=["GET"]) def local_domain(): return flask.jsonify({"domain": config.get("domain").get("name", "")}) @api_app.route("/api/red", methods=["GET"]) def index(): return flask.jsonify({"rows": db.get_redirections()}) @api_app.route("/api/red", methods=["POST"]) def add(): domain = flask.request.json["domain"].lower().strip() url = flask.request.json["url"].strip()
def __init__(self, model, model_term, init_date, end_date, X_tests, close_price, target, thresholds, trading_pairs, cash_asset, trace=True, param_stop_loss_pct=None): conf = Config() self.param_init_amount_cash = float( conf.get_config('backtesting_stragegy_params', 'init_amount_cash')) # $ self.param_fees = float( conf.get_config('backtesting_stragegy_params', 'fees')) # $ self.param_bet_size = float( conf.get_config('trading_module_params', 'bet_size')) # % self.param_min_bet_size = float( conf.get_config('trading_module_params', 'min_bet_size')) # $ self.param_pct_order_placed = float( conf.get_config('trading_module_params', 'pct_order_placed')) # 1% up/down self.param_nb_periods_to_hold_position = int( conf.get_config('trading_module_params', 'nb_periods_to_hold_position')) # 1d self.param_stop_loss_pct = param_stop_loss_pct if self.param_stop_loss_pct is None: self.param_stop_loss_pct = float( conf.get_config('trading_module_params', 'stop_loss_pct')) self.signals = {} self.all_signals = {} self.model = model self.model_term = model_term self.init_date = init_date self.end_date = end_date - timedelta( hours=1 ) # to avoid getting a price unknown at the end of simulation self.X_tests = X_tests self.close_price = close_price self.target = target self.thresholds = thresholds self.trading_pairs = trading_pairs self.cash_asset = cash_asset self.trace = trace self.__calcul_signals() # set init positions init_positions = {self.cash_asset: self.param_init_amount_cash} for key, value in trading_pairs.items(): init_positions[value.base_asset] = 0.0 # trading API (fake one for simulation) trading_api = TradingApiFake(self.param_pct_order_placed, self.param_stop_loss_pct) trading_api.init_from_backtesting_strategy(init_positions, self.param_fees, self.close_price) # trading module self.trading_module = TradingModule( trading_api, self.param_bet_size, self.param_min_bet_size, self.param_pct_order_placed, self.param_nb_periods_to_hold_position, self.trading_pairs, self.cash_asset, self.thresholds, self.trace, self.param_stop_loss_pct)
import argparse from commons.config import Config from commons.processmanager import ProcessManager from sqlalchemy import create_engine from commons.utils import utils from commons.slack import slack from ml.utils_ml import get_last_dates_per_trading_pair, calcul_signals_for_crypto, load_obj from trading.trading_api_binance import TradingApiBinance from trading.trading_module import TradingModule from trading.trading_pair import TradingPair from ml.preproc_prepare import PreprocPrepare # region config / process manager / logging / sql connexion # Configuration conf = Config() # Process manager procM = ProcessManager() # Logging params today = datetime.now().strftime("%Y-%m-%d") logging.basicConfig(filename='algo_' + today + '.log', format=conf.get_config('log_params', 'log_format')) # If process can't start because other processes running IdCurrentProcess = conf.get_config('process_params', 'algo_process_id') if not procM.start_process(IdCurrentProcess, 'Algo', sys.argv): sys.exit(1) # connection DB
from commons.config import Config from commons.utils import utils import logging import pandas.io.sql as psql import pandas as pd from datetime import datetime, timedelta, date from sqlalchemy import create_engine conf = Config() # region Subscribers def calcul_kpi_subscribers_trend(): logging.warning("calcul_kpi_subscribers_trend - start") connection = create_engine(utils.get_connection_string()) # get data with query squery = 'select so.id_cryptocompare, so.reddit_subscribers, so.timestamp from social_stats_reddit_histo so\n' squery += 'inner join coins co on (co.id_cryptocompare = so.id_cryptocompare)\n' squery += 'where so.timestamp > CURRENT_TIMESTAMP - interval \'90 days\';' df = psql.read_sql_query(squery, connection) # set index on column timestamp df.set_index('timestamp', 'id_cryptocompare', inplace=True) # group by crypto df2 = df.groupby('id_cryptocompare')
from commons.config import Config from commons.utils import utils import logging import pandas.io.sql as psql import pandas as pd from sqlalchemy import create_engine from datetime import datetime, timedelta, date import numpy as np import matplotlib matplotlib.use('Agg') # Set matplotlib use in backend mode import matplotlib.pyplot as plt from pytz import timezone conf = Config() local_images_path = utils.get_path_for_system(conf.get_config('s3_bucket', 'local_generated_images_path_linux'), conf.get_config('s3_bucket', 'local_generated_images_path')) def calcul_kpi_volumes_trend(): logging.warning("calcul_kpi_volumes_trend - start") # region Retrieve data from database connection = create_engine(utils.get_connection_string()) # get data with query squery = 'select co.id_cryptocompare, volume_aggregated as volume_mean_last_30d, timestamp from histo_ohlcv hi\n' squery += 'inner join coins co on (co.id_cryptocompare = hi.id_cryptocompare)\n' squery += 'where hi.timestamp > CURRENT_TIMESTAMP - interval \'30 days\'\n' squery += 'and hi.volume_aggregated is not null\n' squery += 'order by hi.timestamp'
# -*- coding: utf-8 -*- import os import json import sys sys.path.append('../') from commons.cosmosdb import AssetDB, UserDB, PhotoDB from commons.config import Config config = Config() if __name__ == "__main__": argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s <local.settings.json>' % argvs[0]) quit() print('The content of %s ...\n' % argvs[1]) local_settings_json = argvs[1] with open(local_settings_json) as json_file: data = json.load(json_file) os.environ['COSMOSDB_ENDPOINT'] = data['Values']['COSMOSDB_ENDPOINT'] os.environ['COSMOSDB_KEY'] = data['Values']['COSMOSDB_KEY'] os.environ['COSMOSDB_DATABASE_NAME'] = data['Values'][ 'COSMOSDB_DATABASE_NAME'] os.environ['COSMOSDB_ASSET_COLLECTION_NAME'] = data['Values'][ 'COSMOSDB_ASSET_COLLECTION_NAME'] os.environ['COSMOSDB_USER_COLLECTION_NAME'] = data['Values'][ 'COSMOSDB_USER_COLLECTION_NAME'] os.environ['COSMOSDB_PHOTO_COLLECTION_NAME'] = data['Values'][ 'COSMOSDB_PHOTO_COLLECTION_NAME']
class CryptoCompare: conf = None # region Params / Constructor URL_COIN_LIST = None URL_PRICE = None URL_HIST_PRICE = None URL_SOCIAL_STATS = None URL_TRADING_PAIRS = None URL_HISTO_HOUR_PAIR = None URL_HISTO_DAY_PAIR = None CURR = None def __init__(self): self.conf = Config() # API Key self.API_KEY = self.conf.get_config('cryptocompare_params', 'api_key_cryptocompare') self.HEADERS = { "authorization": "Apikey " + self.API_KEY } # API urls self.URL_COIN_LIST = self.conf.get_config('cryptocompare_params', 'url_coin_list') self.URL_PRICE = self.conf.get_config('cryptocompare_params', 'url_price') self.URL_HIST_PRICE = self.conf.get_config('cryptocompare_params', 'url_hist_price') self.URL_SOCIAL_STATS = self.conf.get_config('cryptocompare_params', 'url_social_stats') self.URL_TRADING_PAIRS = self.conf.get_config('cryptocompare_params', 'url_trading_pairs') self.URL_HISTO_HOUR_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_hour_pair') self.URL_HISTO_DAY_PAIR = self.conf.get_config('cryptocompare_params', 'url_histo_day_pair') # DEFAULTS self.CURR = self.conf.get_config('cryptocompare_params', 'default_currency') # endregion # region Retrieve infos from CryptoCompare def get_coin_list(self, format_response=False): response = self.query_cryptocompare(self.URL_COIN_LIST, False)['Data'] if format_response: return list(response.keys()) else: return response @rate_limited(4, 1) def get_socialstats(self, coin_id): return self.query_cryptocompare(self.URL_SOCIAL_STATS.format(coin_id))['Data'] @rate_limited(5, 1) def get_trading_pairs(self, symbol, max_trading_pairs): url = self.URL_TRADING_PAIRS.format(symbol, max_trading_pairs) data = self.query_cryptocompare(url) return self.__get_data_manage_errors(data, url) @rate_limited(5, 1) def get_histo_hour_pair(self, symbol1, symbol2, limit): if limit > 2000: limit = 2000 url = self.URL_HISTO_HOUR_PAIR.format(symbol1, symbol2, limit) data = self.query_cryptocompare(url) return self.__get_data_manage_errors(data, url) @rate_limited(7, 1) def get_histo_day_pair(self, symbol1): url = self.URL_HISTO_DAY_PAIR.format(symbol1, self.CURR, 2000) data = self.query_cryptocompare(url, False, False) return data.content def __get_data_manage_errors(self, data, url): if data is None: time.sleep(10) data = self.query_cryptocompare(url) if data is None: return None if 'Data' not in data.keys(): time.sleep(5) data = self.query_cryptocompare(url) if 'Data' not in data.keys(): return None return data['Data'] # endregion # region Utils def query_cryptocompare(self, url, error_check=True, json_format=True): try: response = requests.get(url, self.HEADERS) if json_format: response = response.json() except Exception as e: logging.error("Error getting information from cryptocompare. " + str(e)) return None if error_check and 'Response' in response.keys() and response['Response'] != 'Success': logging.warning(response['Message'] + ' | url=' + url) return None return response @staticmethod def format_parameter(parameter): if isinstance(parameter, list): return ','.join(parameter) else: return parameter
def __init__(self): self.conf = Config() # API urls self.URL_PRICE_LIST = self.conf.get_config('cmc_params_new', 'url_prices') self.API_KEY = self.conf.get_config('cmc_params_new', 'api_key')
import sys import datetime import extractdata import logging import argparse from commons.config import Config from commons.processmanager import ProcessManager from commons.s3bucket import s3bucket from commons.utils import utils from commons.slack import slack # Configuration conf = Config() # Process manager procM = ProcessManager() # Logging params today = datetime.datetime.now().strftime("%Y-%m-%d") logging.basicConfig(filename='dataimporter_' + today + '.log', format=conf.get_config('log_params', 'log_format')) # If process can't start because other processes running IdCurrentProcess = conf.get_config('process_params', 'data_importer_process_id') if not procM.start_process(IdCurrentProcess, 'DataImporter', sys.argv): sys.exit(1) try: if __name__ == '__main__': parser = argparse.ArgumentParser(
import flask import flask_lsm_auth from routes.api import api_app import yaml import os from commons.config import Config config = Config() auth_config = config.get("auth") authorizedUsers = auth_config.get("users") app = flask.Flask("redirect") app.register_blueprint(api_app) @app.route("/", methods=["GET"]) def index(): lsm = flask_lsm_auth.LSM(auth_config) return flask.render_template("redirection.html", section="redirection", user=lsm.get_login()) @app.route("/logout", methods=["GET"]) def logout(): lsm = flask_lsm_auth.LSM(auth_config) lsm.logout(flask.request.url_root) return lsm.compose_response() @app.after_request def after_request(res): lsm = flask_lsm_auth.LSM(auth_config) user = lsm.get_login() if not user: lsm.login()
# -*- coding: utf-8 -*- import glob, os, io import sys sys.path.append('../') from commons.config import Config from commons.faceapi import AzureCognitiveFaceAPI from commons.blockblob import AzureStorageBlockBlob config = Config() # Face API # pip install azure-cognitiveservices-vision-face # FaceAPI Python SDK # https://docs.microsoft.com/en-us/azure/cognitive-services/face/quickstarts/python-sdk # https://azure.microsoft.com/en-us/services/cognitive-services/face/ # https://github.com/Azure-Samples/cognitive-services-quickstart-code/blob/master/python/Face/FaceQuickstart.py if __name__ == "__main__": storage_info = AzureStorageBlockBlob.parse_storage_conn_string( config.get_value('AzureWebJobsStorage')) api = AzureCognitiveFaceAPI(config.get_value('FACEAPI_ENDPOINT'), config.get_value('FACEAPI_SUBKEY'), storage_info['AccountName'], storage_info['AccountKey']) # person group id should be lowercase and alphanumeric (dash is ok) person_group_id = "my-unique-person-group00"
def __init__(self): super().__init__() self.url = 'https://maps.googleapis.com/maps/api/geocode/json' self.key = Config.get_gogole_key()
from commons.config import Config from datetime import datetime import tzlocal import decimal import platform import socket conf = Config() DATE_FORMAT = conf.get_config('cryptocompare_params', 'date_format') # create a new context for this task ctx = decimal.Context() ctx.prec = 20 # Format a unix timestamp ex : 1515926107 to timestamp format for database PostgreSQL def format_linux_timestamp_to_db(integer_timestamp): return format_linux_timestamp_to_datetime(integer_timestamp).strftime(DATE_FORMAT) # Format a unix timestamp ex : 1515926107 to datetime def format_linux_timestamp_to_datetime(integer_timestamp): unix_timestamp = float(integer_timestamp) local_timezone = tzlocal.get_localzone() # get pytz timezone return datetime.fromtimestamp(unix_timestamp, local_timezone) # Convert the given float to a string, without resorting to scientific notation def float_to_str(f): d1 = ctx.create_decimal(repr(f)) return format(d1, 'f') def get_connection_string(): dbhost = conf.get_config('db', 'dbhost')
import flask from commons.sqlite import Database from commons.nginx import NGINX from commons.config import Config from commons.validations import Validations from commons.dns import DNS config = Config() api_app = flask.Blueprint("api_app", __name__, template_folder="../templates") db = Database(config.get("database")) nginx = NGINX(db, config.get("nginx")) validations = Validations(db, config.get("domain")) oracle_conn = config.get("domain").get("db_conn") dns = DNS(config.get("domain"), oracle_conn) @api_app.route("/api/red/local_domain", methods=["GET"]) def local_domain(): return flask.jsonify({ "domain": config.get('domain').get('name', '') }) @api_app.route("/api/red", methods=["GET"]) def index(): return flask.jsonify({ "rows": db.get_redirections() }) @api_app.route("/api/red", methods=["POST"]) def add(): domain = flask.request.json["domain"].lower().strip() url = flask.request.json["url"].strip() alternative = flask.request.json["alt"] status = True message = ""
# -*- coding: utf-8 -*- import os, io import sys sys.path.append('../') from commons.config import Config from commons.blockblob import AzureStorageBlockBlob config = Config() # Azure Storage Block blob # pip install azure-storage-blob # pip install azure-storage-common if __name__ == "__main__": storage_info = AzureStorageBlockBlob.parse_storage_conn_string( config.get_value('AzureWebJobsStorage')) blobclient = AzureStorageBlockBlob(storage_info['AccountName'], storage_info['AccountKey']) asset_id_for_train = "imageslocal" photo_files = [] try: photo_files = blobclient.list_blob(asset_id_for_train) for photo_file in photo_files: print(photo_file) except Exception as e: print(str(e)) quit()
import flask from commons.apps_configuration import AppsConfiguration from commons.config import Config api_app = flask.Blueprint('api_app', __name__, template_folder='../templates') config = Config() deploy_config = config.get('deploy') @api_app.route("/apps", methods=['GET']) def all_apps(): apps_conf = AppsConfiguration(deploy_config.get("etcd_environment_url")) return flask.jsonify(apps_conf.get())
import os import sys import re import os from commons.config import Config from commons.validations import Validations from commons.database import Database from commons.dns import DNS def error(message): print message sys.exit(-1) if __name__ == "__main__": config = Config() dns = DNS(config.get("domain").get("dns")) db = Database(config.get("database")) validations = Validations(db, config.get("domain")) parser = argparse.ArgumentParser(description="Manage RED redirections with command-line utility") parser.add_argument("action", choices=['add', 'del', 'list', 'check'], help="Action") parser.add_argument("domain", help="Domain name", nargs='?') parser.add_argument("url", nargs='?', help="Destination URL") args = parser.parse_args() domain = args.domain action = args.action if action == 'list': for red in db.get_redirections():
import datetime import logging from kpi_googletrend import calcul_googletrend_kpi from kpi_reddit import calcul_reddit_kpi from kpi_market import calcul_volumes_kpi from alerts import generate_alerts import argparse import sys from commons.config import Config from commons.processmanager import ProcessManager # Configuration conf = Config() # Process manager procM = ProcessManager() # Logging params today = datetime.datetime.now().strftime("%Y-%m-%d") logging.basicConfig(filename='algokpi_' + today + '.log', format=conf.get_config('log_params', 'log_format')) #slack.post_message_to_bot_alert('lol Steven') # If process can't start because other processes running IdCurrentProcess = conf.get_config('process_params', 'algokpi_process_id') if not procM.start_process(IdCurrentProcess, 'AlgoKPI', sys.argv): sys.exit(1) try: if __name__ == '__main__':
from urllib import request from commons.config import Config import requests from ratelimit import rate_limited import logging conf = Config() URL_REDDITMETRIC = conf.get_config('reddit_params', 'url_redditmetric') URL_REDDIT_START = conf.get_config('reddit_params', 'url_reddit_start') URL_REDDIT_END = conf.get_config('reddit_params', 'url_reddit_end') # region Scraping https://www.reddit.com/r/###SUBREDDIT_NAME###/about.json @rate_limited(5, 1) def get_reddit_infos_real_time(subreddit): url = URL_REDDIT_START + subreddit + URL_REDDIT_END try: response = requests.get(url, headers={ 'User-agent': 'algocryptos' }).json() except Exception as e: logging.error( "Error getting information from get_reddit_infos_real_time." + str(e)) return None if response.get('data'): return response['data'] else: return None
class ProcessManager: dbconn = None conf = None RUNNING = None WAITING = None SUCCESS = None ERROR = None IsError = None def __init__(self): self.conf = Config() self.dbconn = DbConnection() self.RUNNING = self.conf.get_config('process_params', 'status_running') self.WAITING = self.conf.get_config('process_params', 'status_waiting') self.SUCCESS = self.conf.get_config('process_params', 'status_success') self.ERROR = self.conf.get_config('process_params', 'status_error') self.IsError = False # When starting a process def start_process(self, process_id, name, args, retry_count=0): # If no args => Error => Kill process if args is None or len(args) < 2: logging.error("start_process - no args") return False #if help, ok if args[1] == '-h': return True # check if processid should be updated regarding args (specific behaviour for a specific arg) process_id = self.conf.get_config('process_params', args[1].replace('-', ''), fallback=process_id) concatname = name + " " + str(args[1]) logging.warning("------------------------------") logging.warning("START PROCESS - " + concatname) self.__delete_old_processes() blockingprocesses = self.conf.get_config('process_params', str(process_id)) # Check if blocking processes are running (SQL perspective, not linux processes - should be equivalent btw) rows = self.dbconn.get_query_result( 'Select * from process_params where (process_id IN (' + str(blockingprocesses) + ') and status = ' + "'" + self.RUNNING + "')" + ' OR(process_name = \'' + concatname + '\' and status = \'' + self.RUNNING + '\')') if rows is not None and len(rows) > 0: logging.warning('Blocking info : ' + str(rows)) # Check if process should be placed in Waiting if retry_count == 0: if self.__should_be_waiting(process_id, concatname): self.__insert_process(process_id, concatname, self.WAITING) else: logging.error( "START PROCESS - blocking processes running and should not wait : " + concatname) self.setIsError() return False # Try n retries before stopping current process if retry_count < int( self.conf.get_config('process_params', 'max_nb_retries')): logging.warning("START PROCESS - process placed in queue : " + concatname) time.sleep( int( self.conf.get_config('process_params', 'waiting_time_for_retry'))) return self.start_process(process_id, name, args, retry_count + 1) else: logging.error("START PROCESS - blocking processes running : " + concatname) self.setIsError() self.stop_process(process_id, name, args, self.WAITING) return False else: if retry_count > 0: self.__update_process(process_id, concatname) else: self.__insert_process(process_id, concatname, self.RUNNING) # If not return True # When ending a process def stop_process(self, process_id, name, args, status=None): concatname = name + " " + str(args[1]) logging.warning("STOP PROCESS - " + concatname) logging.warning("------------------------------") if status is None: status = self.RUNNING # Save process info into historic self.__insert_process(process_id, concatname, self.ERROR if self.IsError else self.SUCCESS, True) squery = 'Delete from process_params where process_id = ' + str( process_id) squery += ' and status = ' + "'" + status + "'" + ' and process_name = ' + "'" + concatname + "'" + ';' slack.post_message_to_alert_importer_jobs( 'Job *' + concatname + '* :' + str(self.ERROR if self.IsError else self.SUCCESS)) return self.dbconn.exexute_query(squery) == 0 # If process there for too long (shouldn't be), delete process from table def __delete_old_processes(self): max_duration = self.conf.get_config('process_params', 'max_duration_for_process') self.dbconn.exexute_query( "Delete from process_params where timestamp < CURRENT_TIMESTAMP - interval '" + max_duration + "';") # If same process already in status running / waiting => Kill def __should_be_waiting(self, process_id, name): squeryselect = 'Select * from process_params where process_id = ' + str( process_id) + '\n' squeryselect += 'and process_name = ' + "'" + name + "'" rows = self.dbconn.get_query_result(squeryselect) return rows is None or len(rows) == 0 def __insert_process(self, process_id, name, status, is_histo=False): sql_table = 'process_params' if is_histo: sql_table = 'process_params_histo' squeryinsert = 'INSERT INTO ' + sql_table + ' (process_id, process_name, status, timestamp)\n' squeryinsert += 'VALUES(' squeryinsert += str(process_id) + ',' squeryinsert += "'" + name + "'," squeryinsert += "'" + status + "'," squeryinsert += 'current_timestamp)' self.dbconn.exexute_query(squeryinsert) def __update_process(self, process_id, name): squeryupdate = 'UPDATE process_params SET status = ' + "'" + self.RUNNING + "',\n" squeryupdate += 'timestamp = current_timestamp\n' squeryupdate += 'WHERE process_id = ' + str( process_id) + ' AND process_name = ' + "'" + name + "'" self.dbconn.exexute_query(squeryupdate) def setIsError(self): self.IsError = True
import flask import flask_lsm_auth from flask_assets import Environment from routes.api import api_app import yaml import os from commons.config import Config config = Config() auth_config = config.get("auth") authorizedUsers = auth_config.get("users") app = flask.Flask("redirect") app.register_blueprint(api_app) assets = Environment(app) @app.route("/", methods=["GET"]) def index(): lsm = flask_lsm_auth.LSM(auth_config) return flask.render_template("redirection.html", section="redirection", user=lsm.get_login()) @app.route("/logout", methods=["GET"]) def logout(): lsm = flask_lsm_auth.LSM(auth_config) lsm.logout(flask.request.url_root) return lsm.compose_response()