# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. import requests import json from abc import ABC, abstractmethod from typing import Dict from attrdict import AttrDict from hub.routes.pipeline import RoutesPipeline, AllRoutes from shared.cfg import CFG from shared.log import get_logger logger = get_logger() class AbstractStripeHubEvent(ABC): def __init__(self, payload) -> None: self.payload = AttrDict(payload) @property def is_active_or_trialing(self) -> bool: return self.payload.data.object.status in ("active", "trialing") @staticmethod def send_to_routes(report_routes, message_to_route) -> None: logger.info( "send to routes", report_routes=report_routes, message_to_route=message_to_route,
import os import errno from shared.config import get_settings from shared.log import get_logger from shared.fifo import mkfifo FIFO = 'fifo' async def fifo(): try: os.mkfifo(FIFO) except OSError as oe: if oe.errno != errno.EEXIST: raise with open(FIFO) as fifo_in: while True: data = fifo_in.read().strip() if len(data) > 0: yield data await sleep(5) yield "" if __name__ == "__main__": print("A", __name__) else: print("B", __name__) logger = get_logger('fifo') config = get_settings('fifo')
def main(): app = algo.Algo() admin_dir = dn.get_directory_name(ext='admin') risk_file_out = su.read_text_file(file_name=admin_dir + '/RiskParameter.txt') app.bet_size = float(risk_file_out[0]) con = msu.get_my_sql_connection() date_now = cu.get_doubledate() report_date = exp.doubledate_shift_bus_days() report_date_list = [ exp.doubledate_shift_bus_days(shift_in_days=x) for x in range(1, 10) ] overnight_calendars_list = [] for i in range(len(report_date_list)): ocs_output = ocs.generate_overnight_spreads_sheet_4date( date_to=report_date_list[i]) overnight_calendars = ocs_output['overnight_calendars'] overnight_calendars = \ overnight_calendars[overnight_calendars['tickerHead'].isin(['CL', 'HO', 'NG', 'C', 'W', 'KW', 'S', 'SM', 'BO', 'LC', 'LN', 'FC'])] #isin(['CL', 'HO','NG', 'C', 'W', 'KW', 'S', 'SM', 'BO', 'LC', 'LN', 'FC'])] overnight_calendars = overnight_calendars[ (overnight_calendars['ticker1L'] != '') & (overnight_calendars['ticker2L'] != '')] overnight_calendars['back_spread_price'] = np.nan overnight_calendars['front_spread_price'] = np.nan overnight_calendars['mid_ticker_price'] = np.nan overnight_calendars['back_spread_ticker'] = [ overnight_calendars['ticker1'].iloc[x] + '-' + overnight_calendars['ticker2'].iloc[x] for x in range(len(overnight_calendars.index)) ] overnight_calendars['front_spread_ticker'] = [ overnight_calendars['ticker1L'].iloc[x] + '-' + overnight_calendars['ticker2L'].iloc[x] for x in range(len(overnight_calendars.index)) ] overnight_calendars['target_quantity'] = [ min(mth.ceil(app.bet_size / x), app.total_traded_volume_max_before_user_confirmation) for x in overnight_calendars['dollarNoise100'] ] overnight_calendars['alias'] = [ overnight_calendars['ticker1'].iloc[x] + '_' + overnight_calendars['ticker2'].iloc[x] + '_ocs' for x in range(len(overnight_calendars.index)) ] overnight_calendars['total_quantity'] = 0 overnight_calendars['total_risk'] = 0 overnight_calendars['holding_period'] = 0 #overnight_calendars['expiring_position_q'] = 0 overnight_calendars.reset_index(drop=True, inplace=True) overnight_calendars_list.append(overnight_calendars) overnight_calendars = overnight_calendars_list.pop(0) open_strategy_frame = ts.get_filtered_open_strategies( strategy_class_list=['ocs'], as_of_date=date_now) for i in range(len(open_strategy_frame.index)): position_manager_output = pm.get_ocs_position( alias=open_strategy_frame['alias'].iloc[i], as_of_date=date_now, con=con) trades_frame = ts.get_trades_4strategy_alias( alias=open_strategy_frame['alias'].iloc[i], con=con) datetime_now = cu.convert_doubledate_2datetime(date_now) holding_period = (datetime_now - trades_frame['trade_date'].min()).days if (not position_manager_output['empty_position_q']) & ( not position_manager_output['correct_position_q']): print('Check ' + open_strategy_frame['alias'].iloc[i] + ' ! Position may be incorrect') elif position_manager_output['correct_position_q']: ticker_head = cmi.get_contract_specs( position_manager_output['sorted_position'] ['ticker'].iloc[0])['ticker_head'] position_name = '' if position_manager_output['scale'] > 0: position_name = ticker_head + '_long' else: position_name = ticker_head + '_short' app.ocs_portfolio.order_send(ticker=position_name, qty=abs( position_manager_output['scale'])) app.ocs_portfolio.order_fill(ticker=position_name, qty=abs( position_manager_output['scale'])) ticker1 = position_manager_output['sorted_position'][ 'ticker'].iloc[0] ticker2 = position_manager_output['sorted_position'][ 'ticker'].iloc[1] selection_indx = overnight_calendars[ 'back_spread_ticker'] == ticker1 + '-' + ticker2 if sum(selection_indx) == 1: overnight_calendars.loc[ selection_indx, 'total_quantity'] = position_manager_output['scale'] overnight_calendars.loc[ selection_indx, 'total_risk'] = position_manager_output[ 'scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'] overnight_calendars.loc[ selection_indx, 'alias'] = open_strategy_frame['alias'].iloc[i] overnight_calendars.loc[selection_indx, 'holding_period'] = holding_period app.ocs_risk_portfolio.order_send( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'])) app.ocs_risk_portfolio.order_fill( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars.loc[selection_indx, 'dollarNoise100'])) else: for j in range(len(overnight_calendars_list)): overnight_calendars_past = overnight_calendars_list[j] selection_indx = overnight_calendars_past[ 'back_spread_ticker'] == ticker1 + '-' + ticker2 if sum(selection_indx) == 1: overnight_calendars_past.loc[ selection_indx, 'total_quantity'] = position_manager_output[ 'scale'] overnight_calendars_past.loc[ selection_indx, 'total_risk'] = position_manager_output[ 'scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'] overnight_calendars_past.loc[ selection_indx, 'alias'] = open_strategy_frame['alias'].iloc[i] overnight_calendars_past.loc[ selection_indx, 'holding_period'] = holding_period app.ocs_risk_portfolio.order_send( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'])) app.ocs_risk_portfolio.order_fill( ticker=position_name, qty=abs(position_manager_output['scale'] * overnight_calendars_past.loc[ selection_indx, 'dollarNoise100'])) if j > 1: overnight_calendars_past.loc[ selection_indx, 'butterflyMean'] = np.nan overnight_calendars_past.loc[ selection_indx, 'butterflyNoise'] = np.nan overnight_calendars = overnight_calendars.append( overnight_calendars_past[selection_indx]) break overnight_calendars.reset_index(drop=True, inplace=True) overnight_calendars['working_order_id'] = np.nan spread_ticker_list = list( set(overnight_calendars['back_spread_ticker']).union( overnight_calendars['front_spread_ticker'])) back_spread_ticker_list = list(overnight_calendars['back_spread_ticker']) theme_name_list = set([ x + '_long' for x in back_spread_ticker_list ]).union(set([x + '_short' for x in back_spread_ticker_list])) ocs_alias_portfolio = aup.portfolio(ticker_list=theme_name_list) for i in range(len(overnight_calendars.index)): if overnight_calendars.loc[i, 'total_quantity'] > 0: position_name = overnight_calendars.loc[ i, 'back_spread_ticker'] + '_long' ocs_alias_portfolio.order_send( ticker=position_name, qty=overnight_calendars.loc[i, 'total_quantity']) ocs_alias_portfolio.order_fill( ticker=position_name, qty=overnight_calendars.loc[i, 'total_quantity']) elif overnight_calendars.loc[i, 'total_quantity'] < 0: position_name = overnight_calendars.loc[ i, 'back_spread_ticker'] + '_short' ocs_alias_portfolio.order_send( ticker=position_name, qty=-overnight_calendars.loc[i, 'total_quantity']) ocs_alias_portfolio.order_fill( ticker=position_name, qty=-overnight_calendars.loc[i, 'total_quantity']) app.price_request_dictionary['spread'] = spread_ticker_list app.price_request_dictionary['outright'] = overnight_calendars[ 'ticker1'].values app.overnight_calendars = overnight_calendars app.open_strategy_list = list(open_strategy_frame['alias']) app.ocs_alias_portfolio = ocs_alias_portfolio app.ticker_list = list( set(overnight_calendars['ticker1']).union( overnight_calendars['ticker2']).union( set(overnight_calendars['ticker1L'])).union( set(overnight_calendars['ticker2L']))) app.output_dir = ts.create_strategy_output_dir(strategy_class='ocs', report_date=report_date) app.log = lg.get_logger(file_identifier='ib_ocs', log_level='INFO') app.con = con app.pnl_frame = tpm.get_daily_pnl_snapshot(as_of_date=report_date) print('Emre') app.connect(client_id=2) app.run()
#!/usr/bin/env python3.8 # pylint: disable=no-member from asyncio import sleep from shared.config import get_settings from shared.fmt import fmt from shared.http import get_request from shared.log import get_logger from json.decoder import JSONDecodeError logger = get_logger("weather") settings = get_settings("weather") async def openweathermap(): URL = "https://api.openweathermap.org/data/2.5/weather?id={}&appid={}&units=metric".format( settings["city_id"], settings["openweathermap_api_key"]) while True: try: data = await get_request(URL) except JSONDecodeError as e: print(e) if data: try: temperature = data["main"]["temp"] summary = data["weather"][0]["description"].title() except (NameError, KeyError) as e:
status = await client.status() if status["state"] == "play": colors = {'fg': 'green'} elif status["state"] == "pause": colors = {'fg': 'grey'} elif status["state"] == "stop": colors = {'fg': 'grey'} currentsong = await client.currentsong() if "title" in currentsong: title = currentsong["title"] yield fmt("{}".format(title), colors=colors) # <<<<< except Exception as e: raise e except Exception as e: raise e if __name__ == "__main__": print("A", __name__) else: print("B", __name__) logger = get_logger("mpd") config = get_settings("mpd") client = MPDClient() #tracemalloc.start()
import warnings with warnings.catch_warnings(): warnings.filterwarnings("ignore",category=FutureWarning) import h5py warnings.filterwarnings("ignore", message="numpy.ufunc size changed") import shared.log as lg log = lg.get_logger(file_identifier='evening_job',log_level='INFO') import shared.directory_names as dn import shared.downloads as sd import shared.calendar_utilities as cu import pickle as pickle import my_sql_routines.futures_price_loader as fpl import my_sql_routines.options_price_loader as opl import my_sql_routines.options_greek_loader as ogl import my_sql_routines.options_signal_loader as osl import my_sql_routines.my_sql_utilities as msu import get_price.presave_price as pp import opportunity_constructs.vcs as vcs import formats.options_strategy_formats as osf import formats.futures_strategy_formats as fsf import formats.risk_pnl_formats as rpf import ta.email_reports as er import ta.prepare_daily as prep import my_sql_routines.options_pnl_loader as opnl import contract_utilities.expiration as exp
def main(): app = algo.Algo() report_date = exp.doubledate_shift_bus_days() todays_date = cu.get_doubledate() con = msu.get_my_sql_connection() vcs_output = vcs.generate_vcs_sheet_4date(date_to=report_date) vcs_pairs = vcs_output['vcs_pairs'] filter_out = of.get_vcs_filters(data_frame_input=vcs_pairs, filter_list=['long2', 'short2']) vcs_pairs = filter_out['selected_frame'] vcs_pairs = vcs_pairs[vcs_pairs['downside'].notnull() & vcs_pairs['upside'].notnull()] # &(vcs_pairs.tickerClass!='Energy') vcs_pairs = vcs_pairs[(vcs_pairs['trDte1'] >= 50) & (vcs_pairs.tickerClass != 'Metal') & (vcs_pairs.tickerClass != 'FX') & (vcs_pairs.tickerClass != 'Energy')] vcs_pairs = vcs_pairs[((vcs_pairs['Q'] <= 30) & (vcs_pairs['fwdVolQ'] >= 30)) | ((vcs_pairs['Q'] >= 70) & (vcs_pairs['fwdVolQ'] <= 70))] vcs_pairs.reset_index(drop=True, inplace=True) vcs_pairs['underlying_ticker1'] = [ omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker1'] ] vcs_pairs['underlying_ticker2'] = [ omu.get_option_underlying(ticker=x) for x in vcs_pairs['ticker2'] ] vcs_pairs['underlying_tickerhead'] = [ cmi.get_contract_specs(x)['ticker_head'] for x in vcs_pairs['underlying_ticker1'] ] futures_data_dictionary = { x: gfp.get_futures_price_preloaded(ticker_head=x) for x in vcs_pairs['underlying_tickerhead'].unique() } proxy_output_list1 = [ up.get_underlying_proxy_ticker( ticker=x, settle_date=report_date, futures_data_dictionary=futures_data_dictionary) for x in vcs_pairs['underlying_ticker1'] ] vcs_pairs['proxy_ticker1'] = [x['ticker'] for x in proxy_output_list1] vcs_pairs['add_2_proxy1'] = [x['add_2_proxy'] for x in proxy_output_list1] proxy_output_list2 = [ up.get_underlying_proxy_ticker( ticker=x, settle_date=report_date, futures_data_dictionary=futures_data_dictionary) for x in vcs_pairs['underlying_ticker2'] ] vcs_pairs['proxy_ticker2'] = [x['ticker'] for x in proxy_output_list2] vcs_pairs['add_2_proxy2'] = [x['add_2_proxy'] for x in proxy_output_list2] vcs_pairs['expiration_date1'] = [ int( exp.get_expiration_from_db(instrument='options', ticker=x, con=con).strftime('%Y%m%d')) for x in vcs_pairs['ticker1'] ] vcs_pairs['expiration_date2'] = [ int( exp.get_expiration_from_db(instrument='options', ticker=x, con=con).strftime('%Y%m%d')) for x in vcs_pairs['ticker2'] ] vcs_pairs['interest_date1'] = [ grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output'] for x in vcs_pairs['expiration_date1'] ] vcs_pairs['interest_date2'] = [ grfs.get_simple_rate(as_of_date=report_date, date_to=x)['rate_output'] for x in vcs_pairs['expiration_date2'] ] vcs_pairs['exercise_type'] = [ cmi.get_option_exercise_type(ticker_head=x) for x in vcs_pairs['tickerHead'] ] admin_dir = dn.get_directory_name(ext='admin') risk_file_out = su.read_text_file(file_name=admin_dir + '/RiskParameter.txt') vcs_risk_parameter = 5 * 2 * float(risk_file_out[0]) vcs_pairs['long_quantity'] = vcs_risk_parameter / abs( vcs_pairs['downside']) vcs_pairs['short_quantity'] = vcs_risk_parameter / vcs_pairs['upside'] vcs_pairs['long_quantity'] = vcs_pairs['long_quantity'].round() vcs_pairs['short_quantity'] = vcs_pairs['short_quantity'].round() vcs_pairs['alias'] = [ generate_vcs_alias(vcs_row=vcs_pairs.iloc[x]) for x in range(len(vcs_pairs.index)) ] vcs_pairs['call_mid_price1'] = np.nan vcs_pairs['put_mid_price1'] = np.nan vcs_pairs['call_mid_price2'] = np.nan vcs_pairs['put_mid_price2'] = np.nan vcs_pairs['call_iv1'] = np.nan vcs_pairs['put_iv1'] = np.nan vcs_pairs['call_iv2'] = np.nan vcs_pairs['put_iv2'] = np.nan vcs_pairs['underlying_mid_price1'] = np.nan vcs_pairs['underlying_mid_price2'] = np.nan vcs_pairs['proxy_mid_price1'] = np.nan vcs_pairs['proxy_mid_price2'] = np.nan vcs_pairs['current_strike1'] = np.nan vcs_pairs['current_strike2'] = np.nan ta_folder = dn.get_dated_directory_extension(folder_date=todays_date, ext='ta') app.vcs_pairs = vcs_pairs app.con = con app.futures_data_dictionary = futures_data_dictionary app.report_date = report_date app.todays_date = todays_date app.log = lg.get_logger(file_identifier='vcs', log_level='INFO') app.trade_file = ta_folder + '/trade_dir.csv' app.vcs_risk_parameter = vcs_risk_parameter app.connect(client_id=3) app.run()
import json from asyncio import sleep import httpx from h2.exceptions import StreamClosedError from shared.log import get_logger logger = get_logger('httpx') #async with httpx.AsyncClient() as client: # r = await client.get('https://www.example.com/') #r async def get_request(url: str, oksleep=90, errsleep=180): async with httpx.AsyncClient() as client: try: res = await client.get(url) return json.loads(res.read()) # httpx exceptions except httpx._exceptions.ConnectTimeout as e: logger.debug(e) await sleep(errsleep) except httpx._exceptions.ReadTimeout as e: logger.debug(e) await sleep(errsleep) except httpx._exceptions.WriteTimeout as e:
import shared.log as lg log = lg.get_logger(file_identifier='morning_job_2', log_level='INFO') import my_sql_routines.my_sql_utilities as msu import my_sql_routines.futures_price_loader as fpl import get_price.presave_price as pp import opportunity_constructs.spread_carry as sc import opportunity_constructs.overnight_calendar_spreads as ocs import formats.futures_strategy_formats as fsf import formats.strategy_followup_formats as sff import contract_utilities.expiration as exp import formats.risk_pnl_formats as rpf import ta.prepare_daily as prep import ta.email_reports as er import save_ib_data.program as sib import math as m con = msu.get_my_sql_connection() report_date = exp.doubledate_shift_bus_days() try: log.info('update_futures_price_database...') fpl.update_futures_price_database(con=con) except Exception: log.error('update_futures_price_database failed', exc_info=True) quit() try: log.info('generate_and_update_futures_data_files...') pp.generate_and_update_futures_data_files(ticker_head_list='butterfly')
from shared.args import getopt from shared.config import config from shared.log import get_logger from typing import NamedTuple logger = get_logger('blocks2') class Action(NamedTuple): button: int = 1 command: str = "" class Block: def __init__(self, name: str, block: dict): self.name = name self.line = "" if "cmd" in block: self.type = "cmd" elif "func" in block: self.type = "func" elif "static" in block: self.type = "static" else: raise Exception self.actions = block.get("actions", None) self.colors = block.get("colors", None) self.pad = block.get("pad", None)