def __init__(self): super(IGStore, self).__init__() self.notifs = collections.deque() # store notifications for cerebro self._env = None # reference to cerebro for general notifications self.broker = None # broker instance self.datas = list() # datas that have registered over start self._orders = collections.OrderedDict() # map order.ref to oid self._ordersrev = collections.OrderedDict() # map oid to order.ref self._transpend = collections.defaultdict(collections.deque) self._oenv = self._ENVPRACTICE if self.p.practice else self._ENVLIVE self.igapi = IGService(self.p.usr, self.p.pwd, self.p.token, self._oenv) self.igapi.create_session() self.igss = Streamer(ig_service=self.igapi) self.ig_session = self.igss.create_session() self.igss.connect(self.p.account) #Work with JSON rather than Pandas for better backtrader integration self.igapi.return_dataframe = False self._cash = 0.0 self._value = 0.0 self.pull_cash_and_value() self._evt_acct = threading.Event()
class Ayam: def __init__(self): self.ig_service = IGService(username=config2.username, password=config2.password, api_key=config2.api_key, acc_type=config2.acc_type) self.ig_service.create_session() def create_position(self, epic, direction, objectif, currency_info, qty): otc = self.ig_service.create_open_position( direction=direction, currency_code=currency_info, order_type="MARKET", size=qty, force_open=True, expiry="-", guaranteed_stop=False, epic=epic, limit_level=objectif, level=None, limit_distance=None, quote_id=None, stop_distance=None, stop_level=None, ) return otc
def main(): expire_after = timedelta(hours=1) session = requests_cache.CachedSession(cache_name='cache', backend='sqlite', expire_after=expire_after) # set expire_after=None if you don't want cache expiration # set expire_after=0 if you don't want to cache queries #config = IGServiceConfig() # no cache ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) # if you want to globally cache queries #ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() accounts = ig_service.fetch_accounts() print("accounts:\n%s" % accounts) #account_info = ig_service.switch_account(config.acc_number, False) #print(account_info) #open_positions = ig_service.fetch_open_positions() #print("open_positions:\n%s" % open_positions) print("") #working_orders = ig_service.fetch_working_orders() #print("working_orders:\n%s" % working_orders) print("") #epic = 'CS.D.EURUSD.MINI.IP' epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini #resolution = 'D' # see from pandas.tseries.frequencies import to_offset #resolution = 'H' resolution = '1Min' #num_points = 10 #response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points) # if you want to cache this query #response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points, session) #df_ask = response['prices']['ask'] #print("ask prices:\n%s" % df_ask) (start_date, end_date) = ('2015-09-15', '2015-09-28') #response = ig_service.fetch_historical_prices_by_epic_and_date_range(epic, resolution, start_date, end_date) # if you want to cache this query response = ig_service.fetch_historical_prices_by_epic_and_date_range(epic, resolution, start_date, end_date, session) df_ask = response['prices']['ask'] print("ask prices:\n%s" % df_ask)
def __init__(self, config): CACHE_NAME = 'igcache' # Set up logging self.logger = logging.getLogger(__name__) self.logger.info("Establishing cached session with IG Markets") session_cached = requests_cache.CachedSession( cache_name=CACHE_NAME, backend='sqlite', expire_after=timedelta(hours=1)) self.ig_service = IGService(config.IG_USERNAME, config.IG_PASSWORD, config.IG_API_KEY, config.IG_ACC_TYPE, session_cached) # Creates REST session self.ig_session = self.ig_service.create_session()
def getTimeseries(self, start_date, end_date): ''' # Gets data from IG markets API and structures into flattend df :return: flat df ''' self.start_date = start_date self.end_date = end_date ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service.create_session() self.ig_reponse = ig_service.fetch_historical_prices_by_epic_and_date_range(epic=self.epic, resolution=self.resolution, start_date=self.start_date, end_date=self.end_date) self.dataframe = flatten_df(self.ig_reponse['prices']) self.sort_df() return self.dataframe
def main(): logging.basicConfig(level=logging.DEBUG) expire_after = timedelta(hours=1) session = requests_cache.CachedSession(cache_name='cache', backend='sqlite', expire_after=expire_after) # set expire_after=None if you don't want cache expiration # set expire_after=0 if you don't want to cache queries #config = IGServiceConfig() # no cache ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) # if you want to globally cache queries #ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() accounts = ig_service.fetch_accounts() print("accounts:\n%s" % accounts) #account_info = ig_service.switch_account(config.acc_number, False) #print(account_info) #open_positions = ig_service.fetch_open_positions() #print("open_positions:\n%s" % open_positions) print("") #working_orders = ig_service.fetch_working_orders() #print("working_orders:\n%s" % working_orders) print("") #epic = 'CS.D.EURUSD.MINI.IP' epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini resolution = 'D' # see from pandas.tseries.frequencies import to_offset #resolution = 'H' #resolution = '1Min' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points)
def main(): logging.basicConfig(level=logging.INFO) # logging.basicConfig(level=logging.DEBUG) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() # Ensure configured account is selected accounts = ig_session[u'accounts'] for account in accounts: if account[u'accountId'] == config.acc_number: accountId = account[u'accountId'] break else: print('Account not found: {0}'.format(config.acc_number)) accountId = None ig_stream_service.connect(accountId) # Making a new Subscription in MERGE mode subscription_prices = Subscription( mode="MERGE", items=['QB.D.FT1605UD.03.IP/MINUTE'], fields=["UPDATE_TIME", "BID", "OFFER", "CHANGE", "MARKET_STATE"], ) #adapter="QUOTE_ADAPTER") # Adding the "on_price_update" function to Subscription subscription_prices.addlistener(on_prices_update) # Registering the Subscription sub_key_prices = ig_stream_service.ls_client.subscribe(subscription_prices) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items=['ACCOUNT:' + accountId], fields=["AVAILABLE_CASH"], ) # #adapter="QUOTE_ADAPTER") # Adding the "on_balance_update" function to Subscription subscription_account.addlistener(on_account_update) # Registering the Subscription sub_key_account = ig_stream_service.ls_client.subscribe( subscription_account) input("{0:-^80}\n".format("HIT CR TO UNSUBSCRIBE AND DISCONNECT FROM \ LIGHTSTREAMER")) # Disconnecting ig_stream_service.disconnect()
def connect(self, accountId=None): logger.debug("Connecting to IG Streaming API...") ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() if accountId is None: accountId = ig_session[u'accounts'][0][u'accountId'] ig_stream_service.connect(accountId) self.ig_stream_service = ig_stream_service
def run(config, testing, tickers, filename, n, n_window): # Set up variables needed for backtest events_queue = queue.Queue() ig_service = IGService(config.IG.USERNAME, config.IG.PASSWORD, config.IG.API_KEY, config.IG.ACCOUNT.TYPE) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() accountId = ig_session[u'accounts'][0][u'accountId'] ig_stream_service.connect(accountId) initial_equity = PriceParser.parse(500000.00) # Use IG Tick Price Handler price_handler = IGTickPriceHandler(events_queue, ig_stream_service, tickers) # Use the Display Strategy strategy = DisplayStrategy(n=n, n_window=n_window) # Use an example Position Sizer position_sizer = FixedPositionSizer() # Use an example Risk Manager risk_manager = ExampleRiskManager() # Use the default Portfolio Handler portfolio_handler = PortfolioHandler(initial_equity, events_queue, price_handler, position_sizer, risk_manager) # Use the ExampleCompliance component compliance = ExampleCompliance(config) # Use a simulated IB Execution Handler execution_handler = IBSimulatedExecutionHandler(events_queue, price_handler, compliance) # Use the default Statistics statistics = SimpleStatistics(config, portfolio_handler) # Set up the backtest backtest = Backtest(price_handler, strategy, portfolio_handler, execution_handler, position_sizer, risk_manager, statistics, initial_equity) results = backtest.simulate_trading(testing=testing) statistics.save(filename) return results
def main(): expire_after = timedelta(hours=1) session = requests_cache.CachedSession(cache_name='cache', backend='sqlite', expire_after=expire_after) # set expire_after=None if you don't want cache expiration # set expire_after=0 if you don't want to cache queries #config = IGServiceConfig() # no cache ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) # if you want to globally cache queries #ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() accounts = ig_service.fetch_accounts() print("accounts:\n%s" % accounts) #account_info = ig_service.switch_account(config.acc_number, False) #print(account_info) #open_positions = ig_service.fetch_open_positions() #print("open_positions:\n%s" % open_positions) print("") #working_orders = ig_service.fetch_working_orders() #print("working_orders:\n%s" % working_orders) print("") #epic = 'CS.D.EURUSD.MINI.IP' epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini #resolution = 'D' # see from pandas.tseries.frequencies import to_offset #resolution = 'H' resolution = '1Min' #num_points = 10 #response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points) # if you want to cache this query #response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points, session) #df_ask = response['prices']['ask'] #print("ask prices:\n%s" % df_ask) (start_date, end_date) = ('2015-01-15', '2015-01-28') #response = ig_service.fetch_historical_prices_by_epic_and_date_range(epic, resolution, start_date, end_date) # if you want to cache this query response = ig_service.fetch_historical_prices_by_epic_and_date_range(epic, resolution, start_date, end_date, session) df_ask = response['prices']['ask'] print("ask prices:\n%s" % df_ask)
def main(): logging.basicConfig(level=logging.INFO) # logging.basicConfig(level=logging.DEBUG) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, acc_number=config.acc_number) ig_stream_service = IGStreamService(ig_service) ig_stream_service.create_session() #ig_stream_service.create_session(version='3') # Making a new Subscription in MERGE mode subscription_prices = Subscription( mode="MERGE", #items=["L1:CS.D.GBPUSD.CFD.IP", "L1:CS.D.USDJPY.CFD.IP"], # sample CFD epics items=["L1:CS.D.GBPUSD.TODAY.IP", "L1:IX.D.FTSE.DAILY.IP"], # sample spreadbet epics fields=["UPDATE_TIME", "BID", "OFFER", "CHANGE", "MARKET_STATE"], ) # Adding the "on_price_update" function to Subscription subscription_prices.addlistener(on_prices_update) # Registering the Subscription sub_key_prices = ig_stream_service.ls_client.subscribe(subscription_prices) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items=["ACCOUNT:" + config.acc_number], fields=["AVAILABLE_CASH"], ) # Adding the "on_balance_update" function to Subscription subscription_account.addlistener(on_account_update) # Registering the Subscription sub_key_account = ig_stream_service.ls_client.subscribe( subscription_account) input("{0:-^80}\n".format("HIT CR TO UNSUBSCRIBE AND DISCONNECT FROM \ LIGHTSTREAMER")) # Disconnecting ig_stream_service.disconnect()
def main(): logging.basicConfig(level=logging.INFO) # logging.basicConfig(level=logging.DEBUG) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() accountId = ig_session[u'accounts'][0][u'accountId'] ig_stream_service.connect(accountId) # Making a new Subscription in MERGE mode subcription_prices = Subscription( mode="MERGE", items=['L1:CS.D.GBPUSD.CFD.IP', 'L1:CS.D.USDJPY.CFD.IP'], fields=["UPDATE_TIME", "BID", "OFFER", "CHANGE", "MARKET_STATE"], ) #adapter="QUOTE_ADAPTER") # Adding the "on_price_update" function to Subscription subcription_prices.addlistener(on_prices_update) # Registering the Subscription sub_key_prices = ig_stream_service.ls_client.subscribe(subcription_prices) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items='ACCOUNT:' + accountId, fields=["AVAILABLE_CASH"], ) # #adapter="QUOTE_ADAPTER") # Adding the "on_balance_update" function to Subscription subscription_account.addlistener(on_account_update) # Registering the Subscription sub_key_account = ig_stream_service.ls_client.subscribe( subscription_account) compat.wait_for_input( "{0:-^80}\n".format("HIT CR TO UNSUBSCRIBE AND DISCONNECT FROM \ LIGHTSTREAMER")) # Disconnecting ig_stream_service.disconnect()
def main(): # setup global priceObj # logging.basicConfig(level=logging.INFO) config = IGServiceConfig() ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() accounts = ig_session[u'accounts'] for account in accounts: if account[u'accountId'] == config.acc_number: accountId = account[u'accountId'] break ig_stream_service.connect(accountId) # save to DB thread sched = BackgroundScheduler() sched.add_job(saveToDB, 'interval', seconds=5) sched.start() # Making a new Subscription in MERGE mode subscriptionCap = 38 subscriptionCurrent = 0 i = 0 listSubscription = [] while i < len(priceObj): items = list(priceObj.keys())[i:i + subscriptionCap] subscription_prices = Subscription(mode="MERGE", items=items, fields=["BID", "OFFER"]) subscription_prices.addlistener(onPriceUpdate) sub_key_prices = ig_stream_service.ls_client.subscribe( subscription_prices) listSubscription.append(sub_key_prices) time.sleep(2) # print(subscription_prices) print(items) i += subscriptionCap input("{0:-^80}\n".format("Press Enter to close")) # Disconnecting ig_stream_service.disconnect()
def main(): logging.basicConfig(level=logging.DEBUG) expire_after = timedelta(hours=1) session = requests_cache.CachedSession(cache_name="cache", backend="sqlite", expire_after=expire_after) # set expire_after=None if you don't want cache expiration # set expire_after=0 if you don't want to cache queries # config = IGServiceConfig() # no cache ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, acc_number=config.acc_number) # if you want to globally cache queries # ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() # ig_stream_service.create_session(version='3') accounts = ig_service.fetch_accounts() print("accounts:\n%s" % accounts) # account_info = ig_service.switch_account(config.acc_number, False) # print(account_info) # open_positions = ig_service.fetch_open_positions() # print("open_positions:\n%s" % open_positions) print("") # working_orders = ig_service.fetch_working_orders() # print("working_orders:\n%s" % working_orders) print("") # epic = 'CS.D.EURUSD.MINI.IP' epic = "IX.D.ASX.IFM.IP" # US (SPY) - mini #epic = "CS.D.GBPUSD.CFD.IP" # sample CFD epic resolution = "D" # see from pandas.tseries.frequencies import to_offset # resolution = 'H' # resolution = '1Min' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points)
def get_candles(instruments, params): ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service.create_session() account_info = ig_service.switch_account(config.acc_number, False) # not necessary print(account_info) open_positions = ig_service.fetch_open_positions() print("open_positions:\n%s" % open_positions) print("") epic = 'CS.D.EURUSD.MINI.IP' resolution = 'D' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points) df_ask = response['prices']['ask'] print("ask prices:\n%s" % df_ask)
def create_ig_service(credentials): LOGGER.info("Creating service with user:%s, api_key:%s, password:<hidden>", credentials["username"], credentials["api_key"]) return IGService(credentials["username"], credentials["password"], credentials["api_key"])
store.initialize_library('NASDAQ') # Access the library library = store['NASDAQ'] # Get some data from IG # Get environment variables for IG acc_number = os.environ['IG_SERVICE_ACC_NUMBER'] password = os.environ['IG_SERVICE_PASSWORD'] api_key = os.environ['IG_SERVICE_API_KEY'] username = os.environ['IG_SERVICE_USERNAME'] acc_type = os.environ['IG_SERVICE_ACC_TYPE'] # Create IG Session ig_service = IGService(username, password, api_key, acc_type) ig_service.create_session() # Get account info account_info = ig_service.switch_account(acc_number, False) # not necessary print(account_info) # get position info open_positions = ig_service.fetch_open_positions() print("open_positions:\n%s" % open_positions) print() epic = 'CS.D.EURUSD.MINI.IP' resolution = 'D' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points(
def main(): logging.basicConfig(level=logging.DEBUG) expire_after = timedelta(hours=1) session = requests_cache.CachedSession(cache_name='cache', backend='sqlite', expire_after=expire_after) # set expire_after=None if you don't want cache expiration # set expire_after=0 if you don't want to cache queries #config = IGServiceConfig() # no cache ig_service = IGService("mz_demo", "Marko01.", "660d3924e9ffd0ed2481f027f150081b87574363", "DEMO") # if you want to globally cache queries #ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() accounts = ig_service.fetch_accounts() print("accounts:\n%s" % accounts) #account_info = ig_service.switch_account(config.acc_number, False) # print(account_info) open_positions = ig_service.fetch_open_positions() print("open_positions:\n%s" % open_positions) print("") working_orders = ig_service.fetch_working_orders() print("working_orders:\n%s" % working_orders) print("") #epic = 'CS.D.EURUSD.MINI.IP' epic = 'IX.D.AEX.IFM.IP' resolution = 'D' # see from pandas.tseries.frequencies import to_offset num_points = 120 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points) # Exception: error.public-api.exceeded-account-historical-data-allowance result_1 = ig_service.search_markets("Treasury Bond Decimalised") market = ig_service.fetch_market_by_epic("CC.D.CC.UMA.IP") print(result_1) epic = "IX.D.SPTRD.IFA.IP" market_info = ig_service.fetch_market_by_epic(epic, session) print(market_info) # if you want to cache this query response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points, session)
def start_session(self)->IGService: self.check_required_credentials() session = IGService(self.login, self.password, self.apiKey, self.accType) session.create_session() self.current_account = session.ig_session['currentAccountId'] return session
#!/usr/bin/env python # -*- coding: utf-8 -*- from trading_ig import IGService from utils_igmarkets.trading_ig_config import Config as config ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service.create_session() open_positions = ig_service.fetch_open_positions() print("open_positions:\n%s" % open_positions) print("") epic = 'CS.D.USDJPY.MINI.IP' # telstra: AA.D.TLS.CASH.IP # "epic": "CS.D.USDJPY.MINI.IP", resolution = 'min' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points) df_ask = response['prices']['ask'] print("ask prices:\n%s" % df_ask)
class IGStore(with_metaclass(MetaSingleton, object)): ''' The IG store class should inherit from the the metaclass and add some extensions to it. ''' BrokerCls = None # broker class will autoregister DataCls = None # data class will auto register params = ( ('token', ''), ('account', ''), ('usr', ''), ('pwd', ''), ('currency_code', 'GBP'), #The currency code of the account ('practice', True), ('account_tmout', 10.0), # account balance refresh timeout ) _ENVPRACTICE = 'DEMO' _ENVLIVE = 'LIVE' _ORDEREXECS = { bt.Order.Market: 'MARKET', bt.Order.Limit: 'LIMIT', bt.Order.Stop: 'STOP', bt.Order.StopLimit: 'TODO', } _GRANULARITIES = 'TODO - NEEDED FOR HISTORICAL' @classmethod def getdata(cls, *args, **kwargs): '''Returns ``DataCls`` with args, kwargs''' return cls.DataCls(*args, **kwargs) @classmethod def getbroker(cls, *args, **kwargs): '''Returns broker with *args, **kwargs from registered ``BrokerCls``''' return cls.BrokerCls(*args, **kwargs) def __init__(self): super(IGStore, self).__init__() self.notifs = collections.deque() # store notifications for cerebro self._env = None # reference to cerebro for general notifications self.broker = None # broker instance self.datas = list() # datas that have registered over start self._orders = collections.OrderedDict() # map order.ref to oid self._ordersrev = collections.OrderedDict() # map oid to order.ref self._transpend = collections.defaultdict(collections.deque) self._oenv = self._ENVPRACTICE if self.p.practice else self._ENVLIVE self.igapi = IGService(self.p.usr, self.p.pwd, self.p.token, self._oenv) self.igapi.create_session() self.igss = Streamer(ig_service=self.igapi) self.ig_session = self.igss.create_session() self.igss.connect(self.p.account) #Work with JSON rather than Pandas for better backtrader integration self.igapi.return_dataframe = False self._cash = 0.0 self._value = 0.0 self.pull_cash_and_value() self._evt_acct = threading.Event() def broker_threads(self): ''' Setting up threads and targets for broker related notifications. ''' self.q_account = queue.Queue() kwargs = {'q': self.q_account} self.q_account.put(True) # force an immediate update t = threading.Thread(target=self._t_account) t.daemon = True t.start() t = threading.Thread(target=self._t_account_events, kwargs=kwargs) t.daemon = True t.start() self.q_ordercreate = queue.Queue() t = threading.Thread(target=self._t_order_create) t.daemon = True t.start() self.q_orderclose = queue.Queue() t = threading.Thread(target=self._t_order_cancel) t.daemon = True t.start() # Wait once for the values to be set self._evt_acct.wait(self.p.account_tmout) def pull_cash_and_value(self): ''' Method to set the initial cash and value before streaming updates start. ''' accounts = self.igapi.fetch_accounts() for account in accounts['accounts']: if self.p.account == account['accountId']: self._cash = account['balance']['available'] self._value = account['balance']['balance'] def get_cash(self): #TODO - Check where we return self._cash def get_notifications(self): '''Return the pending "store" notifications''' self.notifs.append(None) # put a mark / threads could still append return [x for x in iter(self.notifs.popleft, None)] def get_positions(self): #TODO - Get postion info from returned object. positions = self.igapi.fetch_open_positions() return positions['positions'] def get_value(self): return self._value def put_notification(self, msg, *args, **kwargs): self.notifs.append((msg, args, kwargs)) def start(self, data=None, broker=None): # Datas require some processing to kickstart data reception if data is None and broker is None: self.cash = None return if data is not None: self._env = data._env # For datas simulate a queue with None to kickstart co self.datas.append(data) if self.broker is not None: self.broker.data_started(data) elif broker is not None: self.broker = broker self.streaming_events() self.broker_threads() def stop(self): # signal end of thread if self.broker is not None: self.q_ordercreate.put(None) self.q_orderclose.put(None) self.q_account.put(None) ''' Loads of methods to add in-between ''' def _t_account(self): #TODO ''' This is a thread with a queue that will extract data as it comes in I need to pass the relavant account info here after subscribing to the account information through lightstreamer ''' while True: try: msg = self.q_account.get(timeout=self.p.account_tmout) if msg is None: break # end of thread elif type( msg ) != bool: #Check it is not the true value at the start of the queue... TODO improve this try: self._cash = float(msg["AVAILABLE_CASH"]) self._value = float(msg["EQUITY"]) except KeyError: pass except queue.Empty: # tmout -> time to refresh pass self._evt_acct.set() def order_create(self, order, stopside=None, takeside=None, **kwargs): ''' additional kwargs expiry: Sting, default = 'DFB' Other examples could be 'DEC-14'. Check the instrument details through IG to find out the correct expiry. guaranteed_stop: Bool, default = False. Sets whether or not to use a guranteed stop. time_in_force: String. Must be either 'GOOD_TILL_CANCELLED' or "GOOD_TILL_DATE" good_till_date: Datetime object. Must be provided is "GOOD_TILL_DATE" is set. ''' okwargs = dict() okwargs['currency_code'] = self.p.currency_code #okwargs['dealReference'] = order.ref okwargs['epic'] = order.data._dataname #Size must be positive for both buy and sell orders okwargs['size'] = abs(order.created.size) okwargs['direction'] = 'BUY' if order.isbuy() else 'SELL' okwargs['order_type'] = self._ORDEREXECS[order.exectype] #TODO FILL_OR_KILL #okwargs['timeInForce'] = 'FILL_OR_KILL' okwargs['force_open'] = "false" #Filler - required arguments can update later if Limit order is required okwargs['level'] = order.created.price okwargs['limit_level'] = None okwargs['limit_distance'] = None okwargs['stop_level'] = None okwargs['stop_distance'] = None #Allow users to set the expiry through kwargs if 'expiry' in kwargs: okwargs['expiry'] = kwargs["expiry"] else: okwargs['expiry'] = 'DFB' #Allow users to set the a guaranteed stop #Convert from boolean value to string. if 'guaranteed_stop' in kwargs: if kwargs['guaranteed_stop'] == True: okwargs['guaranteed_stop'] = "true" elif kwargs['guaranteed_stop'] == False: okwargs['guaranteed_stop'] = "false" else: raise ValueError( 'guaranteed_stop must be a boolean value: "{}" ' 'was entered'.format(kwargs['guaranteed_stop'])) else: okwargs['guaranteed_stop'] = "false" #Market orders use an 'order_type' keyword. Limit and stop orders use 'type' if order.exectype == bt.Order.Market: okwargs['quote_id'] = None okwargs[ 'level'] = None #IG Does not allow a level to be set on market orders if order.exectype in [bt.Order.Stop, bt.Order.Limit]: #Allow passing of a timeInForce kwarg if 'time_in_force' in kwargs: okwargs['time_in_force'] = kwargs['time_in_force'] if kwargs['time_in_force'] == 'GOOD_TILL_DATE': if 'good_till_date' in kwargs: #Trading_IG will do a datetime conversion okwargs['good_till_date'] = kwargs['good_till_date'] else: raise ValueError( 'If timeInForce == GOOD_TILL_DATE, a ' 'goodTillDate datetime kwarg must be provided.') else: okwargs['time_in_force'] = 'GOOD_TILL_CANCELLED' if order.exectype == bt.Order.StopLimit: #TODO okwargs['lowerBound'] = order.created.pricelimit okwargs['upperBound'] = order.created.pricelimit if order.exectype == bt.Order.StopTrail: # TODO need to figure out how to get the stop distance and increment # from the trail amount. # print('order trail amount: {}'.format(order.trailamount)) okwargs['stop_distance'] = order.trailamount #okwargs['trailingStopIncrement'] = 'TODO!' if stopside is not None: okwargs['stop_level'] = stopside.price if takeside is not None: okwargs['limit_level'] = takeside.price okwargs.update(**kwargs) # anything from the user self.q_ordercreate.put(( order.ref, okwargs, )) return order def order_cancel(self, order): self.q_orderclose.put(order.ref) return order def _t_order_cancel(self): while True: oref = self.q_orderclose.get() if oref is None: break oid = self._orders.get(oref, None) if oid is None: continue # the order is no longer there try: o = self.igapi.delete_working_order(oid) except Exception as e: continue # not cancelled - FIXME: notify self.broker._cancel(oref) def _t_order_create(self): while True: msg = self.q_ordercreate.get() if msg is None: break oref, okwargs = msg # Check to see if it is a market order or working order. # Market orders have an 'order_type' kwarg. Working orders # use the 'type' kwarg for setting stop or limit if okwargs['order_type'] == 'MARKET': try: #NOTE The IG API will confirm the deal automatically with the #create_open_position call. Therefore if no error is returned here #Then it was accepted and open. o = self.igapi.create_open_position(**okwargs) except Exception as e: self.put_notification(e) self.broker._reject(oref) return else: # print('Creating Working Order') try: o = self.igapi.create_working_order(**okwargs) except Exception as e: print(e) self.put_notification(e) self.broker._reject(oref) return # Ids are delivered in different fields and all must be fetched to # match them (as executions) to the order generated here _o = {'dealId': None} oids = list() oids.append(o['dealId']) #print('_t_order_create Deal ID = {}'.format(o['dealId'])) if o['dealStatus'] == 'REJECTED': self.broker._reject(oref) self.put_notification(o['reason']) if not oids: self.broker._reject(oref) return self._orders[oref] = oids[0] #Send the summission notification #TODO Shouldn't this come earlier???? self.broker._submit(oref) if okwargs['order_type'] == 'MARKET': self.broker._accept(oref) # taken immediately self.broker._fill(oref, o['size'], o['level'], okwargs['order_type']) for oid in oids: self._ordersrev[oid] = oref # maps ids to backtrader order def streaming_account(self, tmout=None): ''' Added by me to create a subscription to account information such as balance, equity funds, margin. ''' q = queue.Queue() kwargs = {'q': q, 'tmout': tmout} t = threading.Thread(target=self._t_account_listener, kwargs=kwargs) t.daemon = True t.start() t = threading.Thread(target=self._t_account_events, kwargs=kwargs) t.daemon = True t.start() return q def _t_account_events(self, q, tmout=None): ''' Thread to create the subscription to account events. Here we create a merge subscription for lightstreamer. ''' self.igss.set_account_q(q) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items=['ACCOUNT:' + self.p.account], fields=["AVAILABLE_CASH", "EQUITY"], ) # #adapter="QUOTE_ADAPTER") # Adding the "on_balance_update" function to Subscription subscription_account.addlistener(self.igss.on_account_update) # Registering the Subscription sub_key_account = self.igss.ls_client.subscribe(subscription_account) def streaming_events(self, tmout=None): pass def streaming_prices(self, dataname, tmout=None): q = queue.Queue() kwargs = {'q': q, 'dataname': dataname, 'tmout': tmout} t = threading.Thread(target=self._t_streaming_prices, kwargs=kwargs) t.daemon = True t.start() return q def _t_streaming_prices(self, dataname, q, tmout): ''' Target for the streaming prices thread. This will setup the streamer. ''' if tmout is not None: _time.sleep(tmout) self.igss.set_price_q(q, dataname) #igss = Streamer(q, ig_service=self.igapi) #ig_session = igss.create_session() #igss.connect(self.p.account) epic = 'CHART:' + dataname + ':TICK' # Making a new Subscription in MERGE mode subcription_prices = Subscription( mode="DISTINCT", items=[epic], fields=["UTM", "BID", "OFR", "TTV", "LTV"], ) #adapter="QUOTE_ADAPTER") # Adding the "on_price_update" function to Subscription subcription_prices.addlistener(self.igss.on_prices_update) sub_key_prices = self.igss.ls_client.subscribe(subcription_prices)
def test_ig_service(): DELAY = 30 def wait(delay): print( "Wait %s s to avoid error.public-api.exceeded-account-allowance" % delay) time.sleep(delay) session_cached = requests_cache.CachedSession( cache_name='cache', backend='sqlite', expire_after=timedelta(hours=1)) session_not_cached = requests.Session() for session in [session_cached, session_cached, session_not_cached]: pp = pprint.PrettyPrinter(indent=4) assert (isinstance(trading_ig.__version__, six.string_types)) config = ConfigEnvVar("IG_SERVICE") #ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() print("fetch_accounts") response = ig_service.fetch_accounts() print(response) #assert(response['balance'][0]['available']>0) assert (response['balance'][0] > 0) print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_transaction_history_by_type_and_period") response = ig_service.fetch_transaction_history_by_type_and_period( 10000, "ALL") print(response) assert (isinstance(response, pd.DataFrame)) wait(DELAY) print("") print("fetch_open_positions") response = ig_service.fetch_open_positions() print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_working_orders") response = ig_service.fetch_working_orders() print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_top_level_navigation_nodes") response = ig_service.fetch_top_level_navigation_nodes() print(response) # dict with nodes and markets assert (isinstance(response, dict)) market_id = response['nodes']['id'].iloc[0] print("") print("fetch_client_sentiment_by_instrument") response = ig_service.fetch_client_sentiment_by_instrument(market_id) print(response) assert (isinstance(response, dict)) print("") print("fetch_related_client_sentiment_by_instrument") response = ig_service.fetch_related_client_sentiment_by_instrument( market_id) print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_sub_nodes_by_node") node = market_id #? response = ig_service.fetch_sub_nodes_by_node(node) print(response) assert (isinstance(response['markets'], pd.DataFrame)) assert (isinstance(response['nodes'], pd.DataFrame)) print("") wait(DELAY) print("fetch_all_watchlists") response = ig_service.fetch_all_watchlists() print(response) assert (isinstance(response, pd.DataFrame)) watchlist_id = response['id'].iloc[0] # u'Popular Markets' #epic = print("") print("fetch_watchlist_markets") response = ig_service.fetch_watchlist_markets(watchlist_id) print(response) assert (isinstance(response, pd.DataFrame)) epic = response['epic'].iloc[ 0] # epic = 'CS.D.EURUSD.MINI.IP' # epic = u'IX.D.CAC.IDF.IP' print("") print("fetch_market_by_epic") response = ig_service.fetch_market_by_epic(epic) print(response) #pp.pprint(response) assert (isinstance(response, dict)) print("") print("search_markets") search_term = 'EURUSD' #search_term = 'SPY' response = ig_service.search_markets(search_term) print(response) assert (isinstance(response, pd.DataFrame)) print("") print("fetch_historical_prices_by_epic_and_num_points") #epic = 'CS.D.EURUSD.MINI.IP' #epic = 'IX.D.ASX.IFM.IP' # US 500 (SPY) #epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini #resolution = 'HOUR' # MINUTE, MINUTE_2, MINUTE_3, MINUTE_5, MINUTE_10, MINUTE_15, MINUTE_30, HOUR, HOUR_2, HOUR_3, HOUR_4, DAY, WEEK, MONTH resolution = 'H' # http://pandas.pydata.org/pandas-docs/stable/timeseries.html#dateoffset-objects num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points) print(response) #print(response['prices']['price']) #print(response['prices']['price']['ask']) #print(response['prices']['volume']) assert (isinstance(response['allowance'], dict)) #assert(isinstance(response['prices']['volume'], pd.Series)) #assert(isinstance(response['prices']['price'], pd.Panel)) assert (isinstance(response['prices'], pd.DataFrame)) print("") print("fetch_historical_prices_by_epic_and_date_range") start_date = datetime(2014, 12, 15) end_date = datetime(2014, 12, 20) response = ig_service.fetch_historical_prices_by_epic_and_date_range( epic, resolution, start_date, end_date) print(response) assert (isinstance(response['allowance'], dict)) #assert(isinstance(response['prices']['volume'], pd.Series)) #assert(isinstance(response['prices']['price'], pd.Panel)) assert (isinstance(response['prices'], pd.DataFrame)) wait(DELAY) print("")
def test_ig_service(): delay_for_ig = 30 def wait(delay): print( "Wait %s s to avoid 'error.public-api.exceeded-account-allowance'" % delay) time.sleep(delay) session_cached = requests_cache.CachedSession( cache_name=CACHE_NAME, backend="sqlite", expire_after=timedelta(hours=1)) session_not_cached = requests.Session() for i, session in enumerate( [session_cached, session_cached, session_not_cached]): # pp = pprint.PrettyPrinter(indent=4) assert isinstance(trading_ig.__version__, six.string_types) # ig_service = IGService(config.username, config.password, # config.api_key, config.acc_type) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() print("%d - fetch_accounts" % i) response = ig_service.fetch_accounts() print(response) # assert(response['balance'][0]['available']>0) assert response["balance"][0] > 0 print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_transaction_history_by_type_and_period") response = ig_service.fetch_transaction_history_by_type_and_period( 10000, "ALL") print(response) assert isinstance(response, pd.DataFrame) wait(delay_for_ig) print("") print("fetch_open_positions") response = ig_service.fetch_open_positions() print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_working_orders") response = ig_service.fetch_working_orders() print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_top_level_navigation_nodes") response = ig_service.fetch_top_level_navigation_nodes() print(response) # dict with nodes and markets assert isinstance(response, dict) market_id = response["nodes"]["id"].iloc[0] print("") print("fetch_client_sentiment_by_instrument") response = ig_service.fetch_client_sentiment_by_instrument(market_id) print(response) assert isinstance(response, dict) print("") print("fetch_related_client_sentiment_by_instrument") response = ig_service.fetch_related_client_sentiment_by_instrument( market_id) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_sub_nodes_by_node") node = market_id response = ig_service.fetch_sub_nodes_by_node(node) print(response) assert isinstance(response["markets"], pd.DataFrame) assert isinstance(response["nodes"], pd.DataFrame) print("") wait(delay_for_ig) print("fetch_all_watchlists") response = ig_service.fetch_all_watchlists() print(response) assert isinstance(response, pd.DataFrame) watchlist_id = response["id"].iloc[0] # u'Popular Markets' print("") print("fetch_watchlist_markets") response = ig_service.fetch_watchlist_markets(watchlist_id) print(response) assert isinstance(response, pd.DataFrame) # epic = 'CS.D.EURUSD.MINI.IP' # epic = u'IX.D.CAC.IDF.IP' epic = response["epic"].iloc[0] print("") print("fetch_market_by_epic") response = ig_service.fetch_market_by_epic(epic) print(response) # pp.pprint(response) assert isinstance(response, dict) print("") print("search_markets") search_term = "EURUSD" # search_term = 'SPY' response = ig_service.search_markets(search_term) print(response) assert isinstance(response, pd.DataFrame) print("") wait(delay_for_ig) wait(delay_for_ig) print("fetch_historical_prices_by_epic_and_num_points") # epic = 'CS.D.EURUSD.MINI.IP' # epic = 'IX.D.ASX.IFM.IP' # US 500 (SPY) # epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini # MINUTE, MINUTE_2, MINUTE_3, MINUTE_5, MINUTE_10, MINUTE_15, # MINUTE_30, HOUR, HOUR_2, HOUR_3, HOUR_4, DAY, WEEK, MONTH # resolution = 'HOUR' # http://pandas.pydata.org/pandas-docs/stable/timeseries.html#dateoffset-objects resolution = "H" num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points) print(response) # print(response['prices']['price']) # print(response['prices']['price']['ask']) # print(response['prices']['volume']) assert isinstance(response["allowance"], dict) # assert(isinstance(response['prices']['volume'], pd.Series)) # assert(isinstance(response['prices']['price'], pd.Panel)) assert isinstance(response["prices"], pd.DataFrame) print("") wait(delay_for_ig) print("fetch_historical_prices_by_epic_and_date_range") end_date = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) start_date = end_date - timedelta(days=3) response = ig_service.fetch_historical_prices_by_epic_and_date_range( epic, resolution, start_date, end_date) print(response) assert isinstance(response["allowance"], dict) # assert(isinstance(response['prices']['volume'], pd.Series)) # assert(isinstance(response['prices']['price'], pd.Panel)) assert isinstance(response["prices"], pd.DataFrame) print("") wait(delay_for_ig)
def main(): epics1 = [ 'CHART:CS.D.GBPEUR.MINI.IP:1MINUTE', 'CHART:IR.D.10YEAR100.FWM2.IP:1MINUTE', 'CHART:CC.D.LCO.UME.IP:1MINUTE', 'CHART:CS.D.NZDUSD.MINI.IP:1MINUTE', 'CHART:CS.D.USDCAD.MINI.IP:1MINUTE', 'CHART:CS.D.USDJPY.MINI.IP:1MINUTE', 'CHART:CO.D.RR.FWM1.IP:1MINUTE', 'CHART:CO.D.O.FWM2.IP:1MINUTE', 'CHART:IX.D.SPTRD.IFM.IP:1MINUTE', 'CHART:IX.D.NASDAQ.IFE.IP:1MINUTE' ] epics2 = ['CHART:CS.D.GBPEUR.MINI.IP:1MINUTE'] epics3 = ['CHART:IR.D.10YEAR100.FWM2.IP:1MINUTE'] epics4 = ['CHART:CS.D.NZDUSD.MINI.IP:1MINUTE'] epics2 = ['CHART:KA.D.ECHOGS.CASH.IP:1MINUTE'] logging.basicConfig(level=logging.INFO) # logging.basicConfig(level=logging.DEBUG) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() # Ensure configured account is selected accounts = ig_session[u'accounts'] for account in accounts: if account[u'accountId'] == config.acc_number: accountId = account[u'accountId'] break else: print('Account not found: {0}'.format(config.acc_number)) accountId = None ig_stream_service.connect(accountId) # Making a new Subscription in MERGE mode subscription_prices = Subscription( mode="MERGE", items=epics1, fields=["UTM", "BID_OPEN", "BID_HIGH", "BID_LOW", "BID_CLOSE"], ) # adapter="QUOTE_ADAPTER") # Adding the "on_price_update" function to Subscription subscription_prices.addlistener(on_prices_update) # Registering the Subscription sub_key_prices = ig_stream_service.ls_client.subscribe(subscription_prices) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items=['ACCOUNT:' + accountId], fields=["AVAILABLE_CASH"], ) # #adapter="QUOTE_ADAPTER") # Registering the Subscription sub_key_account = ig_stream_service.ls_client.subscribe( subscription_account) input("{0:-^80}\n".format("HIT CR TO UNSUBSCRIBE AND DISCONNECT FROM \ LIGHTSTREAMER")) # Disconnecting ig_stream_service.disconnect()
def __init__(self): self.ig_service = IGService(username=config2.username, password=config2.password, api_key=config2.api_key, acc_type=config2.acc_type) self.ig_service.create_session()
#!/usr/bin/env python # -*- coding: utf-8 -*- from trading_ig import IGService from utils_igmarkets.trading_ig_config import Config as config ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service.create_session() open_positions = ig_service.fetch_open_positions() print("open_positions:\n%s" % open_positions) print("") epic = 'CS.D.USDJPY.MINI.IP' # telstra: AA.D.TLS.CASH.IP # "epic": "CS.D.USDJPY.MINI.IP", resolution = 'min' num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points( epic, resolution, num_points) df_ask = response['prices']['ask'] print("ask prices:\n%s" % df_ask)
def test_ig_service(): DELAY = 30 def wait(delay): print("Wait %s s to avoid error.public-api.exceeded-account-allowance" % delay) time.sleep(delay) session_cached = requests_cache.CachedSession(cache_name="cache", backend="sqlite", expire_after=timedelta(hours=1)) session_not_cached = requests.Session() for session in [session_cached, session_cached, session_not_cached]: pp = pprint.PrettyPrinter(indent=4) assert isinstance(trading_ig.__version__, six.string_types) config = ConfigEnvVar("IG_SERVICE") # ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type, session) ig_service.create_session() print("fetch_accounts") response = ig_service.fetch_accounts() print(response) # assert(response['balance'][0]['available']>0) assert response["balance"][0] > 0 print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_account_activity_by_period") response = ig_service.fetch_account_activity_by_period(10000) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_transaction_history_by_type_and_period") response = ig_service.fetch_transaction_history_by_type_and_period(10000, "ALL") print(response) assert isinstance(response, pd.DataFrame) wait(DELAY) print("") print("fetch_open_positions") response = ig_service.fetch_open_positions() print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_working_orders") response = ig_service.fetch_working_orders() print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_top_level_navigation_nodes") response = ig_service.fetch_top_level_navigation_nodes() print(response) # dict with nodes and markets assert isinstance(response, dict) market_id = response["nodes"]["id"].iloc[0] print("") print("fetch_client_sentiment_by_instrument") response = ig_service.fetch_client_sentiment_by_instrument(market_id) print(response) assert isinstance(response, dict) print("") print("fetch_related_client_sentiment_by_instrument") response = ig_service.fetch_related_client_sentiment_by_instrument(market_id) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_sub_nodes_by_node") node = market_id # ? response = ig_service.fetch_sub_nodes_by_node(node) print(response) assert isinstance(response["markets"], pd.DataFrame) assert isinstance(response["nodes"], pd.DataFrame) print("") wait(DELAY) print("fetch_all_watchlists") response = ig_service.fetch_all_watchlists() print(response) assert isinstance(response, pd.DataFrame) watchlist_id = response["id"].iloc[0] # u'Popular Markets' # epic = print("") print("fetch_watchlist_markets") response = ig_service.fetch_watchlist_markets(watchlist_id) print(response) assert isinstance(response, pd.DataFrame) epic = response["epic"].iloc[0] # epic = 'CS.D.EURUSD.MINI.IP' # epic = u'IX.D.CAC.IDF.IP' print("") print("fetch_market_by_epic") response = ig_service.fetch_market_by_epic(epic) print(response) # pp.pprint(response) assert isinstance(response, dict) print("") print("search_markets") search_term = "EURUSD" # search_term = 'SPY' response = ig_service.search_markets(search_term) print(response) assert isinstance(response, pd.DataFrame) print("") print("fetch_historical_prices_by_epic_and_num_points") # epic = 'CS.D.EURUSD.MINI.IP' # epic = 'IX.D.ASX.IFM.IP' # US 500 (SPY) # epic = 'IX.D.ASX.IFM.IP' # US (SPY) - mini # resolution = 'HOUR' # MINUTE, MINUTE_2, MINUTE_3, MINUTE_5, MINUTE_10, MINUTE_15, MINUTE_30, HOUR, HOUR_2, HOUR_3, HOUR_4, DAY, WEEK, MONTH resolution = "H" # http://pandas.pydata.org/pandas-docs/stable/timeseries.html#dateoffset-objects num_points = 10 response = ig_service.fetch_historical_prices_by_epic_and_num_points(epic, resolution, num_points) print(response) # print(response['prices']['price']) # print(response['prices']['price']['ask']) # print(response['prices']['volume']) assert isinstance(response["allowance"], dict) # assert(isinstance(response['prices']['volume'], pd.Series)) # assert(isinstance(response['prices']['price'], pd.Panel)) assert isinstance(response["prices"], pd.DataFrame) print("") print("fetch_historical_prices_by_epic_and_date_range") start_date = datetime(2014, 12, 15) end_date = datetime(2014, 12, 20) response = ig_service.fetch_historical_prices_by_epic_and_date_range(epic, resolution, start_date, end_date) print(response) assert isinstance(response["allowance"], dict) # assert(isinstance(response['prices']['volume'], pd.Series)) # assert(isinstance(response['prices']['price'], pd.Panel)) assert isinstance(response["prices"], pd.DataFrame) wait(DELAY) print("")
def main(): logging.basicConfig(level=logging.INFO) # logging.basicConfig(level=logging.DEBUG) ig_service = IGService(config.username, config.password, config.api_key, config.acc_type) ig_stream_service = IGStreamService(ig_service) ig_session = ig_stream_service.create_session() # Ensure configured account is selected accounts = ig_session[u"accounts"] for account in accounts: if account[u"accountId"] == config.acc_number: accountId = account[u"accountId"] break else: print("Account not found: {0}".format(config.acc_number)) accountId = None ig_stream_service.connect(accountId) # Making a new Subscription in MERGE mode subscription_prices = Subscription( mode="MERGE", items=["CHART:CS.D.EURUSD.MINI.IP:1MINUTE"], fields=[ "LTV", "UTM", "DAY_OPEN_MID", "DAY_NET_CHG_MID", "DAY_PERC_CHG_MID", "DAY_HIGH", "DAY_LOW", "OFR_OPEN", "OFR_HIGH", "OFR_LOW", "OFR_CLOSE", "BID_OPEN", "BID_HIGH", "BID_LOW", "BID_CLOSE", "LTP_OPEN", "LTP_HIGH", "LTP_LOW", "LTP_CLOSE", "CONS_END", "CONS_TICK_COUNT" ], ) # adapter="QUOTE_ADAPTER") # Adding the "on_price_update" function to Subscription subscription_prices.addlistener(on_prices_update) # Registering the Subscription sub_key_prices = ig_stream_service.ls_client.subscribe(subscription_prices) # Making an other Subscription in MERGE mode subscription_account = Subscription( mode="MERGE", items=["ACCOUNT:" + accountId], fields=["AVAILABLE_CASH"], ) # #adapter="QUOTE_ADAPTER") # Adding the "on_balance_update" function to Subscription subscription_account.addlistener(on_account_update) # Registering the Subscription sub_key_account = ig_stream_service.ls_client.subscribe( subscription_account) heartbeat_items = ["TRADE:HB.U.HEARTBEAT.IP"] heartbeat = Subscription( mode='MERGE', items=heartbeat_items, fields=["HEARTBEAT"], ) heartbeat.addlistener(on_heartbeat_update) sub_heartbeat = ig_stream_service.ls_client.subscribe(heartbeat) input("{0:-^80}\n".format("HIT CR TO UNSUBSCRIBE AND DISCONNECT FROM \ LIGHTSTREAMER")) # Disconnecting ig_stream_service.disconnect() producer.flush()