def populate_queue(self): logger.info('populate_queue') lst: List[Action] = self.action_connector.get_action_list() self.actions = sorted(lst, key=lambda x: x.time) logger.info('Get action list : ' + str(self.actions)) for elem in self.actions: self.queue.put(elem)
def __post_init__(self, asset_values: pd.DataFrame, initial_capital: float, ptf_type: PortfolioType): logger.info('Portfolio service init...') if ptf_type is None: raise Exception('ptf_type cannot be None') if asset_values is not None: for i in asset_values.index: if i == 0: ptf = PortfolioShortAllowed( as_of_date=asset_values.at[i, app_config.AS_OF_DATE], money=initial_capital, asset_value=asset_values.at[i, app_config.SPOT], open_value=asset_values.at[i, app_config.OPEN], high_value=asset_values.at[i, app_config.HIGH], low_value=asset_values.at[i, app_config.LOW], close_value=asset_values.at[i, app_config.CLOSE], turnover=asset_values.at[i, app_config.TURNOVER], volatility=asset_values.at[i, app_config.VOLATILITY]) else: ptf = portfolio_factory( self.portfolio[-1], asset_values.at[i, app_config.SPOT], asset_values.at[i, app_config.AS_OF_DATE], ptf_type, open_value=asset_values.at[i, app_config.OPEN], high_value=asset_values.at[i, app_config.HIGH], low_value=asset_values.at[i, app_config.LOW], close_value=asset_values.at[i, app_config.CLOSE], turnover=asset_values.at[i, app_config.TURNOVER], volatility=asset_values.at[i, app_config.VOLATILITY]) self.portfolio.append(ptf)
def make_operation(self, side: Side, qty: float, transaction_fee: float, take_profit: float = None, stop_loss: float = None): if side is Side.OTHER: return if len(self.positions) > 0: self.in_buy_position = True if side is Side.SELL and not self.in_buy_position: raise PortfolioException( 'Cannot Sell something not in the portfolio') elif side is Side.SELL and self.in_buy_position: logger.info('Closing all buy positions because of selling signal') self.close_position(force=True) else: notional = qty * self.asset_value if notional > self.money: raise NotEnoughMoneyException( f'Cannot {side} {qty} because {notional} > {self.money}') self.money -= notional + transaction_fee pos = Position(as_of_date=self.as_of_date, qty=qty, side=side, pos_value=notional, take_profit=take_profit, stop_loss=stop_loss) self.positions.append(pos)
async def login_for_access_token( form_data: OAuth2PasswordRequestForm = Depends(), token_service: TokenService = Depends(get_token_service)): credentials: Credentials = Credentials(form_data.username, form_data.password) logger.info(f'Creating token for {form_data.username}') access_token_expires = timedelta( minutes=app_config.ACCESS_TOKEN_EXPIRE_MINUTES) try: access_token = token_service.create_access_token( form_data.username, credentials=credentials, expires_delta=access_token_expires, scopes=form_data.scopes) except (UserNotFound, UserServiceConnectionError): raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Incorrect username or password", headers={"WWW-Authenticate": "Bearer"}, ) except ScopeNotAllowedException as e: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail=str(e), headers={"WWW-Authenticate": "Bearer"}, ) return { "access_token": access_token, "token_type": "bearer", "expire_in": app_config.ACCESS_TOKEN_EXPIRE_MINUTES }
def compute_vol(self) -> float: if self.vol_annu is None: logger.info('ReportingService computing annual volatility....') df: pd.DataFrame = self.ptf_service.ptf_to_df() df['perf'] = df[app_config.SPOT].pct_change() self.vol_annu = df['perf'].std() * sqrt(252) return self.vol_annu
def put(self, message: str, cron: str) -> Cron: with transaction_context() as session: entry = Cron(message=message, cron=cron) session.add(entry) session.commit() logger.info('Cron added {}'.format(entry)) return entry
def evaluate_all(self): logger.info('Portfolio service evaluate_all...') previous_positions: List[Position] = [] previous_money = None for ptf in self.portfolio: self.evaluate(ptf, previous_positions, previous_money) previous_positions = ptf.positions previous_money = ptf.money
async def populate_queue(): logger.info("populate_queue") while True: try: alert = out_sync_queue.get_nowait() await async_queue.put(alert) except queue.Empty: await asyncio.sleep(1)
def get_by_name(self, name: str, credentials: Credentials) -> User: logger.info(f'GET {app_config.USER_SERVICE_URL}') # TODO: aiohttp r = requests.get(app_config.USER_SERVICE_URL, auth=(credentials.username, credentials.password)) if r.status_code != 200: raise UserServiceConnectionError(f'Status code {r.status_code} for GET {app_config.USER_SERVICE_URL}') user: User = User(**r.json()) return user
def compute_sharpe_ratio(self) -> float: logger.info('ReportingService computing sharpe ratio....') if self.sharpe_ratio is None: self.compute_perf_annu() self.compute_vol() self.sharpe_ratio = (self.perf_annu - self.risk_free_rate) / self.vol_annu return self.sharpe_ratio
def put(self, alert: Alert) -> Alert: with transaction_context() as session: entry = DbAlert(**asdict(alert)) session.add(entry) session.commit() alert = Alert(**entry.serialize) logger.info('Alert added in DB : {}'.format(alert)) return alert
def delete_action(self, action: Action, action_type: ActionType): logger.info('Deleting {}'.format(action)) if action_type is ActionType.FIXED_ACTION: self.fixed_action_service.delete_action(action) elif action_type is ActionType.REPETITIVE_ACTION: self.repetitive_action_service.delete_action(action) else: raise Exception('Action Type not implemented')
async def process_messages(self): logger.info("processing messages : {}".format(self.pending.qsize())) while not self.pending.empty(): r = await self.pending.get() if self.cron_service.is_cron(r[0]): self.cron_service.put(r[1], r[0]) time = self.parser.parse(r[0]) self.create_alert(time, r[1])
def create_app(config=app_config) -> Flask: app = Flask(__name__) app.config.from_object(config) CORS(app) from .api import api api.init_app(app) logger.info('start up') return app
def _clean_tasks(self): while True: self.tasks = { _id: task for _id, task in self.tasks.items() if not task.is_dead_for_more_than_5_minutes() } # time.sleep(60) time.sleep(60) logger.info(f'Keys : {[i for i in self.tasks.keys()]}')
def create_user(name: str, pwd: str, scopes: List[str] = None) -> User: logger.info(f'Creating user {name}') user = DbUser(nickname=name, password=pwd, scopes=' '.join(scopes) if scopes else '') with transaction_context() as session: session.add(user) session.commit() ret = User(**user.serialize) return ret
def get_token_service() -> TokenService: connector = ApiUserConnector() r = redis.Redis() try: r.ping() logger.info('Redis connection available') service = RedisTokenService(connector) except: service = TokenService(connector) return service
def get_cache_service() -> CacheService: connector = MockDBConnector() r = redis.Redis() try: r.ping() logger.info('Redis connection available') service = RedisCacheService(connector) except: service = CacheService(connector) return service
def go(self): logger.info('FixedActionService go !') self.populate_queue() self.start_time = time.time() while not self.queue.empty(): action: Action = self.queue.get() logger.info(f'Getting action : {action}') while self.elapsed_time < action.time: self.elapsed_time = time.time() - self.start_time self.shared_queue.put(action)
def _evalute_take_profit(self) -> bool: if self.take_profit is None: return False perf = self._evaluate_perf() if perf > self.take_profit: logger.info( f'Closing position due to take profit : {self.serialize}') return True else: return False
def _evalute_stop_loss(self) -> bool: if self.stop_loss is None: return False perf = self._evaluate_perf() if perf < self.stop_loss: logger.info( f'Closing position due to stop loss : {self.serialize}') return True else: return False
def update_scope(name: str, scopes: List[str]) -> User: logger.info(f'Updating scope of user {name} with {scopes}') with transaction_context() as session: user = session.query(DbUser).filter_by(nickname=name).first() if user is None: raise Exception(f'No user named {name}') user.scopes = ' '.join(scopes) session.commit() ret = User(**user.serialize) return ret
def create_alert(self, time: dt.datetime, message: str): alert: Alert = Alert(None, message, time.strftime(app_config.TIME_FORMAT)) alert = self.task_store.put(alert) logger.info("Creating alert {}".format(alert)) self.sync_queue.put(alert) self.loop.create_task( self.run_at( dt.datetime.strptime(alert.time, app_config.TIME_FORMAT), self.display_alert(alert)))
async def display_alert(self, alert: Alert): logger.info('Displaying alert'.format(alert)) # remove from db self.task_store.remove_one(alert.id) # send message to queue to remove it from main ui self.sync_queue.put(alert) # start gui for Alert AlertWindow(alert).start() if self.cron_service.cron_in_db(alert.message): logger.info("Alert is cron, rescheduling") time = self.cron_service.get_next_time(alert.message) self.create_alert(time, alert.message)
def compute_base_100(self) -> pd.DataFrame: logger.info('ReportingService computing base 100....') if self.base_100 is None: df: pd.DataFrame = self.ptf_service.ptf_to_df() df['perf'] = df[app_config.SPOT].pct_change() df['base_100'] = 100 * np.nan_to_num(1 + df['perf'].cumsum(), nan=1) df['perf_asset_value'] = df['asset_value'].pct_change() df['asset_value_base_100'] = 100 * np.nan_to_num( 1 + df['perf_asset_value'].cumsum(), nan=1) self.base_100 = df return self.base_100
def compute(self): logger.info('MovingAverageStrategyService computing signals...') self.df = self.data_service.df self.df = self.df.set_index(app_config.AS_OF_DATE) # Moving averages short_term_trend days and long_term_trend days self.df['short_term_trend'] = np.round(self.df[app_config.CLOSE].rolling(f'{self.short_term_trend}D').mean()) self.df['long_term_trend'] = np.round(self.df[app_config.CLOSE].rolling(f'{self.long_term_trend}D').mean()) self.df['short_term_trend - long_term_trend'] = self.df['short_term_trend'] - self.df['long_term_trend'] self.df['Regime'] = np.where(self.df['short_term_trend - long_term_trend'] > self.signal_threshold, 1, 0) self.df['Regime'] = np.where(self.df['short_term_trend - long_term_trend'] < -self.signal_threshold, -1, self.df['Regime']) logger.info('MovingAverageStrategyService computation done...')
def compute_max_draw_down(self) -> float: logger.info('ReportingService computing max_drawdown....') if self.max_draw_down is None: df: pd.DataFrame = self.ptf_service.ptf_to_df() roll_max = df[app_config.SPOT].cummax() df['roll_max'] = roll_max daily_dd = df[app_config.SPOT] / roll_max - 1.0 df['daily_dd'] = daily_dd max_dd = df['daily_dd'].cummin() df['max_daily_dd'] = max_dd self.max_draw_down = df['max_daily_dd'].iloc[-1] return self.max_draw_down
def _log(self): while True: msg = self.queue_log.get() if msg['level'] == 'info': logger.info(msg['message']) elif msg['level'] == 'error': logger.error(msg['message']) else: logger.debug(msg['message']) if msg.get('tid'): tid = msg['tid'] self.tasks[tid].ttl = msg['ttl'] self.tasks[tid].result = msg['result']
def compute_perf_annu(self) -> float: logger.info('ReportingService computing annual return....') if self.perf_annu is None: df: pd.DataFrame = self.ptf_service.ptf_to_df() df = df.fillna(0) lst = df[app_config.SPOT].tolist() try: self.perf_periode = (lst[len(lst) - 1] - lst[0]) / lst[0] except ZeroDivisionError: self.perf_periode = 0 nb_days = len(df[app_config.AS_OF_DATE]) perf_daily = (1 + self.perf_periode)**(1 / nb_days) - 1 self.perf_annu = (1 + perf_daily)**365 - 1 return self.perf_annu
def df(self): if self._df is None: logger.info('DataService get data...') self._df = self.connector.get_df() logger.info( f'DataService filtering between {self.start_date} and {self.end_date}' ) if self.start_date is not None and self.end_date is not None: self._df = self._df.set_index( app_config.AS_OF_DATE).loc[self.start_date:self. end_date].reset_index() if self._df.empty: raise NoDataFoundException( f'DataFrame is empty for dates {self.start_date} - {self.end_date}' ) return self._df