Example #1
0
 def setUp(self):
     r = rconfig.Read()
     self.broker = Broker("TestBroker")
     self.redis = redis.StrictRedis(host=r.host,
                                    port=r.port,
                                    db=r.db,
                                    password=r.password)
Example #2
0
    def __init__(self):
        self.live_trading = True  # set False for backtesting.
        self.log_level = logging.DEBUG
        self.logger = self.setup_logger()

        # Don't connect to live data feeds if backtesting.
        if self.live_trading:
            self.exchanges = self.load_exchanges(self.logger)

        # Connect to database.
        print("Connecting to database...")
        self.db_client = MongoClient(
            self.DB_URL, serverSelectionTimeoutMS=self.DB_TIMEOUT_MS)
        self.db = self.db_client[self.DB_NAME]
        self.check_db_connection()

        # Event queue and producer/consumer worker classes.
        self.events = queue.Queue(0)
        self.data = Datahandler(self.exchanges, self.logger, self.db,
                                self.db_client)
        self.strategy = Strategy(self.exchanges, self.logger, self.db,
                                 self.db_client)
        self.portfolio = Portfolio(self.logger)
        self.broker = Broker(self.exchanges, self.logger)

        # Processing performance variables.
        self.start_processing = None
        self.end_processing = None

        self.run()
Example #3
0
class BaseTask(abc.ABC):
    task_name = None

    def __init__(self):
        if not self.task_name:
            raise ValueError("task_name should set")
        self.broker = Broker()

    @abc.abstractmethod
    def run(self, *args, **kwargs):
        raise NotImplementedError("BaseTask run method must be implented.")

    def update_state(self, task_id, state, meta={}):
        _task = {"state": state, "meta": meta}
        serialized_task = json.dumps(_task)
        backend = Backend()
        backend.enqueue(queue_name=task_id, item=serialized_task)
        print(f"task {task_id} success queued")

    def delay(self, *args, **kwargs):
        try:
            self.task_id = str(uuid.uuid4())
            _task = {"task_id": self.task_id, "args": args, "kwargs": kwargs}
            serialized_task = json.dumps(_task)
            self.broker.enqueue(queue_name=self.task_name,
                                item=serialized_task)
            print(f"task {self.task_id} success queued")
        except Exception:
            raise Exception("unable to publish task to broker")
        return self.task_id
Example #4
0
def main():

    rounds = 1000
    broker = Broker(rounds)
    broker.setup()
    broker.play()
    broker.display_results()
    return broker.play(), broker.display_results()
Example #5
0
    def setup_broker(self, cfg=None):
        """
        Currently only creates a dummy Broker object for registering Applications to.

        :param cfg:
        :return:
        """
        self.__broker = Broker()
Example #6
0
def test_broker_single_register():
    broker = Broker()
    data = {
        'pk': hexlify(b'this is a test').decode('utf-8'),
        'id': hexlify(b'someID').decode('utf-8')
    }
    broker.register(data, 'mix')
    cache = broker.fetch([], 'mix')
    assert len(cache) == 1
Example #7
0
 def __init_broker_and_processors(self):
     self.__broker = Broker(self)
     self.__event_processor = EventProcessor(0, self)
     self.__event_processors = list()
     for i in range(self.config.processor_num):
         id = i + 1
         event_channel = EventChannel()
         event_channel.register(self.__broker)
         event_processor = EventProcessor(id, self, event_channel)
         self.__event_processors.append(event_processor)
Example #8
0
	def __init__(self, config, name, userOpt, itemName, discoverFun):
		Broker.__init__(self, config, name, userOpt, itemName, discoverFun)
		itemsUser = config.getList(userOpt, [], onChange = None)
		if not itemsUser:
			itemsUser = None
		itemsDisc = self._discover(discoverFun).keys()
		self._itemsStart = itemsDisc
		if itemsDisc and itemsUser:
			self._itemsStart = utils.filterBlackWhite(itemsDisc, itemsUser)
		elif not itemsDisc:
			self._itemsStart = utils.filterBlackWhite(itemsUser, itemsUser)
		self._nIndex = 0
Example #9
0
def RecordLSHFactory(pipeline, fields, filter_={}, field_weights = None):

    name = __LSHName__(fields, filter_)
    broker = Broker(pipeline)
    try:
        i = broker.download_obj(name)
        i.LSH = {}
        for key in i.fields:
            i.LSH[key] = broker.download_obj('{}_{}'.format(name,key))
        assert isinstance(i, RecordLSH)
        return i
    except AssertionError:
        return RecordLSH(pipeline, fields, filter_=filter_, field_weights = field_weights)
Example #10
0
    def __init__(self, data):
        """
        TODO: Abstract this from USD and BTC specific accounts

        :param data: dataframe to run backtest on
        """

        self.data = data
        self.strategies = []

        usd_account = Account('USD', 1000)
        btc_account = Account('BTC', 0, 'Close')
        self.broker = Broker(data, [usd_account, btc_account])
Example #11
0
  def __init__(self, instrument, strategies, start_date, end_date, opening_bal,time_stamp=None):
    '''
    constructs message queues
    initialises brokers and traders
    '''    
    self.instrument = instrument
    
    self.start_date = start_date
    self.end_date = end_date
    
    self.opening_bal = opening_bal
    
    self.datafeed = DataFeed(instrument)
    
    self.orderQ = MsgQ()    
    self.receiptQ = MsgQ()
    
    self.term_req_Q = MsgQ()
    self.term_notice_Q = MsgQ()
    
    self.broker = Broker(self.datafeed, self.orderQ, self.receiptQ, self.term_req_Q, self.term_notice_Q)   

    self.traders = []    
    for strategy in strategies:
      trader = Trader(self.datafeed, self.broker, self.opening_bal, self.instrument, strategy, self.start_date, self.end_date)
      self.traders.append(trader)
      
    self.time_stamp = time_stamp
Example #12
0
 def get_broker_by_name(self, broker_name):
     query = "SELECT * FROM brokers WHERE broker_name = %s"
     val = (broker_name,)
     self.cursor.execute(query, val)
     result = self.cursor.fetchone()
     broker = Broker(result[0],result[1],result[2])
     return broker
Example #13
0
def test4():
    ip_add = "127.0.0.1"
    # initialize
    broke1 = Broker(ip_add)
    sub1 = Subscriber(ip_add)
    pub1 = Publisher(ip_add)

    # set up subscriber and publisher with broker
    stop = threading.Event()
    # subscriber
    sub1.register_sub("soccer, basketball")  # subscribe -- need no threading
    # publishers/broker
    T1 = Thread(target=broke1.run,
                args=(stop, ))  # open up broker for publisher to register with
    T2 = Thread(target=pub1.register_pub,
                args=("soccer", ))  # publisher registers
    T1.start()
    T2.start()
    stop.set()
    T1.join()
    T2.join()

    # open up broker, subscriber for listening and start publishing rapidly.
    stop.clear()
    T1 = Thread(target=broke1.run, args=(stop, ))  # open up broker
    T2 = Thread(target=sub1.notify, args=(stop, ))

    T1.start()
    T2.start()
    time.sleep(1)
    print("Time published: %.20f" % time.time())
    pub1.publish("hello world")
    # 1 second wait
    time.sleep(1)
    stop.set()
Example #14
0
    def __init__(self):

        # Set False for forward testing.
        self.live_trading = True

        self.log_level = logging.INFO
        self.logger = self.setup_logger()

        # Check DB state OK before connecting to any exchanges
        self.db_client = MongoClient(
            self.DB_URL,
            serverSelectionTimeoutMS=self.DB_TIMEOUT_MS)
        self.db_prices = self.db_client[self.DB_PRICES]
        self.db_other = self.db_client[self.DB_OTHER]
        self.check_db_status(self.VENUES)

        self.exchanges = self.exchange_wrappers(self.logger, self.VENUES)
        self.telegram = Telegram(self.logger)

        # Main event queue.
        self.events = queue.Queue(0)

        # Producer/consumer worker classes.
        self.data = Datahandler(self.exchanges, self.logger, self.db_prices,
                                self.db_client)

        self.strategy = Strategy(self.exchanges, self.logger, self.db_prices,
                                 self.db_other, self.db_client)

        self.portfolio = Portfolio(self.exchanges, self.logger, self.db_other,
                                   self.db_client, self.strategy.models,
                                   self.telegram)

        self.broker = Broker(self.exchanges, self.logger, self.portfolio,
                             self.db_other, self.db_client, self.live_trading,
                             self.telegram)

        self.portfolio.broker = self.broker

        # Start flask api in separate process
        # p = subprocess.Popen(["python", "api.py"])
        # self.logger.info("Started flask API.")

        # Processing performance tracking variables.
        self.start_processing = None
        self.end_processing = None
        self.cycle_count = 0
Example #15
0
 def get_broker(self, broker_id):
     query = "SELECT * FROM brokers WHERE broker_id = %s"
     val = (broker_id, )
     self.cursor.execute(query, val)
     for brokers in self.cursor:
         broker = Broker(brokers[0], brokers[1], brokers[2])
         self.brokers_list.append(broker)
         return broker
Example #16
0
    def __init__(self, symbol1, symbol2, contract1, contract2):

        self.symbol1 = symbol1
        self.symbol2 = symbol2
        self.contract1 = contract1
        self.contract2 = contract2

        sprint("菲利普准备行动。")

        self.broker = Broker()
        sprint("菲利普呼叫了交易商。")

        self.long_on_match1 = lambda amount: self.broker.long_on_match(
            symbol1, contract1, amount)
        self.long_on_match2 = lambda amount: self.broker.long_on_match(
            symbol2, contract2, amount)

        self.short_on_match1 = lambda amount: self.broker.short_on_match(
            symbol1, contract1, amount)
        self.short_on_match2 = lambda amount: self.broker.short_on_match(
            symbol2, contract2, amount)

        self.close_long_on_match1 = lambda amount: self.broker.close_long_on_match(
            symbol1, contract1, amount)
        self.close_long_on_match2 = lambda amount: self.broker.close_long_on_match(
            symbol2, contract2, amount)

        self.close_short_on_match1 = lambda amount: self.broker.close_short_on_match(
            symbol1, contract1, amount)
        self.close_short_on_match2 = lambda amount: self.broker.close_short_on_match(
            symbol2, contract2, amount)

        self.long_position_info1 = self.broker.long_position_info(
            symbol1, contract1)
        self.long_position_info2 = self.broker.long_position_info(
            symbol2, contract2)

        self.short_position_info1 = self.broker.short_position_info(
            symbol1, contract1)
        self.short_position_info2 = self.broker.short_position_info(
            symbol2, contract2)

        sprint("菲利普准备完毕。")

        self.__refresh_position_info()
        sprint("菲利普开始刷新仓位信息。")
Example #17
0
def test_case_two():
    print('\nTest_Case_Two')

    test_position = Broker()
    midpoint = 100.
    fee = .003

    # ========

    order_open = {'price': midpoint + (midpoint * fee), 'side': 'short'}
    test_position.add(order=order_open)

    assert test_position.short_inventory.position_count == 1
    print('SHORT Unrealized_pnl: %f' %
          test_position.short_inventory.get_unrealized_pnl())

    assert test_position.long_inventory.position_count == 0
    assert test_position.long_inventory.get_unrealized_pnl() == 0.

    order_close = {'price': midpoint * 0.95, 'side': 'short'}
    test_position.remove(order=order_close)
    assert test_position.short_inventory.position_count == 0
    print('SHORT Unrealized_pnl: %f' %
          test_position.short_inventory.get_unrealized_pnl())

    assert test_position.long_inventory.position_count == 0
    assert test_position.long_inventory.get_unrealized_pnl() == 0.
    print('SHORT Realized_pnl: %f' % test_position.get_realized_pnl())
Example #18
0
def test_case_one():
    print('\nTest_Case_One')

    test_position = Broker()
    midpoint = 100.
    fee = .003

    # ========

    order_open = {'price': midpoint + (midpoint * fee), 'side': 'long'}
    test_position.add(order=order_open)

    assert test_position.long_inventory.position_count == 1
    print('LONG Unrealized_pnl: %f' %
          test_position.long_inventory.get_unrealized_pnl())

    assert test_position.short_inventory.position_count == 0
    assert test_position.short_inventory.get_unrealized_pnl() == 0.

    order_close = {'price': (midpoint * fee) * 5. + midpoint, 'side': 'long'}
    test_position.remove(order=order_close)
    assert test_position.long_inventory.position_count == 0
    print('LONG Unrealized_pnl: %f' %
          test_position.long_inventory.get_unrealized_pnl())

    assert test_position.short_inventory.position_count == 0
    assert test_position.short_inventory.get_unrealized_pnl() == 0.
    print('LONG Realized_pnl: %f' % test_position.get_realized_pnl())
Example #19
0
    def __init__(self, env_config):
        account = Account(000000, env_config['initial_cash'], 0,
                          env_config['initial_cash'],
                          env_config['initial_cash'], dict(), False)
        max_limit = env_config['window']
        self.resized_image_width = 100
        self.resized_image_height = 100
        image_channel = 4
        self.log_every = env_config['log_every']
        self.broker = Broker(account, max_limit=max_limit)
        self.all_tickers = self.broker.all_tickers
        self.n_symbols = env_config['n_symbols']
        self.tech_indicators = env_config['tech_indicators']
        length = OHCLVV + len(self.tech_indicators.split())
        self.use_image = env_config['use_image']

        if self.use_image:
            self.observation_space = gym.spaces.Dict({
                'data':
                gym.spaces.Box(-np.inf, np.inf,
                               (self.n_symbols, max_limit, length)),
                'images':
                gym.spaces.Box(-np.inf, np.inf,
                               (self.n_symbols, self.resized_image_height,
                                self.resized_image_width, image_channel)),
                'privates':
                gym.spaces.Box(-np.inf, np.inf, (5 + self.n_symbols * 2, ))
            })
        else:
            self.observation_space = gym.spaces.Dict({
                'data':
                gym.spaces.Box(-np.inf, np.inf,
                               (self.n_symbols, max_limit, length)),
                'privates':
                gym.spaces.Box(-np.inf, np.inf, (5 + self.n_symbols * 2, ))
            })

        self.action_space = gym.spaces.MultiDiscrete([env_config['bins']] *
                                                     self.n_symbols)
        self.current_tickers = None
        self.qty_val = np.linspace(-env_config['max_shares'],
                                   env_config['max_shares'],
                                   env_config['bins'])
        self.images = None
        self.refresh_data = True
Example #20
0
class BaseTask(abc.ABC):
    """
    Example Usage:
        class AdderTask(BaseTask):
            task_name = "AdderTask"
            def run(self, a, b):
                result = a + b
                return result
        adder = AdderTask()
        adder.delay(9, 34)
    """

    task_name = None

    def __init__(self):
        if not self.task_name:
            raise ValueError("task_name should be set")
        self.broker = Broker()

    @abc.abstractmethod  # abstractmethod 派生类必须重写实现逻辑
    def run(self, *args, **kwargs):
        # put your business logic here
        raise NotImplementedError("Task `run` method must be implemented.")

    # 更新状态
    def update_state(self, task_id, state, meta={}):
        _task = {"state": state, "meta": meta}
        serialized_task = json.dumps(_task)
        backend = Backend()
        backend.enqueue(queue_name=task_id, item=serialized_task)
        print(f"task info: {task_id} succesfully queued")

    # 异步执行
    def delay(self, *args, **kwargs):
        try:
            self.task_id = str(uuid.uuid4())
            _task = {"task_id": self.task_id, "args": args, "kwargs": kwargs}
            serialized_task = json.dumps(_task)
            # 加入redis中
            self.broker.enqueue(queue_name=self.task_name,
                                item=serialized_task)
            print(f"task: {self.task_id} succesfully queued")
        except Exception:
            traceback.print_exc()
        return self.task_id
Example #21
0
    def __init__(self, broker_personality, nr_transporters, nr_cargo_owners,
                 max_iterations, max_displacement_from_estimated_price,
                 gauss_stdev):

        # L
        self.broker_personality = broker_personality
        self.nr_transporters = nr_transporters
        self.nr_cargo_owners = nr_cargo_owners
        self.new_transport_providers()
        self.new_cargo_owners()
        self.broker = Broker(self.broker_personality)
        self.stats = Statistics(self.transporters_icnet,
                                self.transporters_aicnet, self.cargo_owners)
        self.max_iterations = max_iterations
        self.max_displacement_from_estimated_price = max_displacement_from_estimated_price
        self.gauss_stdev = gauss_stdev
        self.global_bid_iterator = 0
        self.multiple_yes = 0
Example #22
0
    def setup_broker(self, cfg=None):
        """
        Currently only creates a dummy Broker object for registering Applications to.

        :param cfg:
        :return:
        """
        self.__broker = Broker()
        return self.broker
Example #23
0
def main():
    broker = Broker()
    subscriber = Subscriber()

    subscriber.connect_broker(broker)

    topics = ["1", "2", "3", "4", '5', '6', '7', '8', '9']

    subscriber.subscribe_to_topics(topics)
Example #24
0
	def _broker(self, reqs, items):
		result = Broker._broker(self, reqs, items)
		for (rType, rValue) in reqs:
			if (rType == WMS.STORAGE) and rValue:
				if result == None:
					result = []
				for rval in rValue:
					result.extend(self.storageDict.get(rval, []))
		return result
Example #25
0
def main():
    broker = Broker()
    publisher = Publisher()

    publisher.connect_broker(broker)

    topics = ['1']

    publisher.publish_on_topics(topics)
Example #26
0
    def __init__(self):
        self.client = MongoClient(config.DBURI + config.DBNAME)
        self.db = self.client[config.DBNAME]
        self.active_readers = dict()
        self.broker = Broker()
        self.no_exception = True
        self.stopped = False

        signal('reader_updated').connect(self.update_readers)
Example #27
0
 def __init__(self, host, port):
     socket = TSocket.TSocket(host, port)
     self.transport = TTransport.TBufferedTransport(socket)
     try:
         self.transport.open()
     except:
         pass
     protocol = TBinaryProtocol.TBinaryProtocol(self.transport)
     self.client = Broker.Client(protocol)
Example #28
0
    def __init__(self):

        # Set False for forward testing.
        self.live_trading = True

        self.log_level = logging.DEBUG
        self.logger = self.setup_logger()

        self.exchanges = self.load_exchanges(self.logger)

        # Database.
        self.db_client = MongoClient(
            self.DB_URL, serverSelectionTimeoutMS=self.DB_TIMEOUT_MS)

        # Price data database.
        self.db_prices = self.db_client[self.DB_PRICES]

        # Non-price data database.
        self.db_other = self.db_client[self.DB_OTHER]

        self.check_db_connection()

        # Main event queue.
        self.events = queue.Queue(0)

        # Producer/consumer worker classes.
        self.data = Datahandler(self.exchanges, self.logger, self.db_prices,
                                self.db_client)

        self.strategy = Strategy(self.exchanges, self.logger, self.db_prices,
                                 self.db_other, self.db_client)

        self.portfolio = Portfolio(self.exchanges, self.logger, self.db_other,
                                   self.db_client, self.strategy.models)

        self.broker = Broker(self.exchanges, self.logger, self.db_other,
                             self.db_client, self.live_trading)

        # Processing performance tracking variables.
        self.start_processing = None
        self.end_processing = None
        self.cycle_count = 0

        self.run()
Example #29
0
def run(config_file: str, password: str):
    """Reads config file, initializes configuration and creates Broker object that runs in a separate thread"""

    logging.info('Starting broker with config')

    logging.info("Loading config file '%s'", config_file)
    try:
        config = ConfigParser()
        config.read(config_file)
    except Exception as ex:
        logging.error("Error reading config: %s, exiting", type(ex))
        logging.error(ex.args)
        return

    try:
        eth_section = config['Ethereum']
        eth_contracts = config['Contracts']
        eth_use = eth_section['use']
        eth_server = eth_section[eth_use]
        ipfs_section = config['IPFS']
        ipfs_use = config['IPFS.%s' % ipfs_section['use']]
        broker = Broker(eth_server=eth_server,
                        abi_path=eth_contracts['abi_path'],
                        pandora=eth_contracts['pandora'],
                        node=eth_contracts['worker_node'],
                        vault=config['Account']['vault'],
                        data_dir=ipfs_section['store_in'],
                        ipfs_server=ipfs_use['server'],
                        ipfs_port=int(ipfs_use['port']),
                        use_hooks=eth_contracts.getboolean('hooks'))
    except Exception as ex:
        logging.error("Error reading config: %s, exiting", type(ex))
        logging.error(ex.args)
        return

    if password is None:
        password = getpass(
            'Please provide password for unlocking private key: ')

    if broker.connect(password) is False:
        return

    # Remove the following line in order to put the app into a daemon mode (running on the background)
    broker.join()
Example #30
0
    def start(self):
        broker = Broker(self.rxq, self.txq)
        broker.find_plugins()
        irc = Irc(self.rxq, self.txq)
        self.irc_p = Process(target=irc.start)
        self.broker_p = Process(target=broker.start)
        self.irc_p.start()
        self.broker_p.start()

        for input in settings.INPUTS:
            input_path = path.join(settings.INPUTS_DIR, "%s.py" % input)
            if path.isfile(input_path):
                module = load_source(input, input_path)
                p = Process(target=module.input, args=(self.rxq, ))
                self.inputs.append(p)
                p.start()
            else:
                # warning
                pass
Example #31
0
 def __init_broker_and_processors(self):
     self.__broker = Broker(self)
     self.__event_processor = EventProcessor(0, self)
     self.__event_processors = list()
     for i in range(self.config.processor_num):
         id = i + 1
         event_channel = EventChannel()
         event_channel.register(self.__broker)
         event_processor = EventProcessor(id, self, event_channel)
         self.__event_processors.append(event_processor)
Example #32
0
 def start(self):
     broker = Broker(self.rxq, self.txq)
     broker.find_plugins()
     irc = Irc(self.rxq, self.txq)
     self.irc_p = Process(target=irc.start)
     self.broker_p = Process(target=broker.start)
     self.irc_p.start()
     self.broker_p.start()
     
     for input in settings.INPUTS:
         input_path = path.join(settings.INPUTS_DIR, "%s.py" % input)
         if path.isfile(input_path):
             module = load_source(input, input_path)
             p = Process(target=module.input, args=(self.rxq,))
             self.inputs.append(p)
             p.start()
         else:
             # warning
             pass
Example #33
0
    def get_brokers(self):
        query = ("SELECT * FROM brokers ORDER BY broker_name ASC")
        self.cursor.execute(query)
        self.brokers_list = []

        for brokers in self.cursor:
            broker = Broker(brokers[0], brokers[1], brokers[2])
            self.brokers_list.append(broker)

        return self.brokers_list
Example #34
0
 def update(self):
     """Returns: void
     Replace the candles with the most recent ones available.
     Algorithm for minimizing number of updated candles:
         Get the time difference from chart end to now.
         If the time difference is greater than the width of the chart,
             request <chart size> candles.
         else,
             request candles from end of chart to now.
     """
     new_history = None
     if self.get_lag() > self.get_time_span():
         # replace all candles
         new_history = Broker.get_instrument_history(
             instrument=self._instrument,
             granularity=self._granularity,
             count=self.get_size(), 
             to=datetime.datetime.utcnow()
         )
     else:
         # request new candles starting from end of chart
         # TODO verify candleFormat is same as existing chart
         new_history_ = broker.get_instrument_history(
             instrument=self._instrument,
             granularity=self._granularity,
             from_time=self.get_end_timestamp
         )
     if new_history == None:
         Log.write('chart.py update(): Failed to get new candles.')
         raise Exception
     else:
         # Got new candles. Stow them.
         new_candles = new_history['candles']
         # Iterate forwards from last candle. The last candle is probably
         # non-complete, so overwrite it. This thereby fills in the missing
         # gap between (end of chart) and (now). If the gap is smaller 
         # than the size of the chart, only the beginning of the chart is
         # overwritten. If the gap is bigger than the chart, all candles
         # get overwritten. 
         for i in range(0, len(self._candles)):
             # TODO assuming bid/ask candles
             new_candle = new_candles[i]
             self._candles[self._get_end_index()].timestamp    = util_date.string_to_date(new_candle['time'])
             self._candles[self._get_end_index()].volume       = float(new_candle['volume'])
             self._candles[self._get_end_index()].complete     = bool(new_candle['complete'])
             self._candles[self._get_end_index()].open_bid     = float(new_candle['bid']['o'])
             self._candles[self._get_end_index()].open_ask     = float(new_candle['ask']['o'])
             self._candles[self._get_end_index()].high_bid     = float(new_candle['bid']['h'])
             self._candles[self._get_end_index()].high_ask     = float(new_candle['ask']['h'])
             self._candles[self._get_end_index()].low_bid      = float(new_candle['bid']['l'])
             self._candles[self._get_end_index()].low_ask      = float(new_candle['ask']['l'])
             self._candles[self._get_end_index()].close_bid    = float(new_candle['bid']['c'])
             self._candles[self._get_end_index()].close_ask    = float(new_candle['ask']['c'])
             if i < len(self._candles) - 1:
                 self._increment_start_index() # increments end index too
Example #35
0
def main():
    portEnum = PortEnum
    broker = Broker()
    status = StatusChecker(broker)
    try:
        status.start()
        KeyListener('0.0.0.0', portEnum.broker.value, broker)
        loop_thread = Thread(target=asyncore.loop, name="Asyncore Loop")
        loop_thread.start()
    except Exception as e:
        print(e)
        status.kill()
Example #36
0
 def close_position(self):
     """
     close the position when current size of position is not zero
     """
     if position() != 0:
         # print("in close", position_list[-1])
         Broker(self.init_capital).make_order(unit=-1 * position(),
                                              limit_price=None,
                                              stop_loss=None,
                                              stop_profit=None)
     else:
         pass
Example #37
0
 def _curses_refresh(cls, stdcsr):
     ch = stdcsr.getch() # get one char
     if ch == 113: # q == quit
         stdcsr.addstr('\nInitiating shutdown...\n')
         stdcsr.refresh() # redraw
         curses.nocbreak()
         stdcsr.keypad(False)
         #curses.echo()   
         curses.endwin() # restore terminal
         cls.shutdown()
     elif ch == 109: # m == monitor
         account_id = Config.account_id
         balance = Broker.get_balance(account_id)
         msg = cls.msg_base.format(balance)
         stdcsr.clear()
         stdcsr.addstr(msg)
         stdcsr.refresh() # redraw
Example #38
0
    def test___init__(self):
        """
        Case:     count, no start, no end
        """
        COUNT = 100
        GRANULARITY = 'S5'
        NUM_EXTRA_SECONDS = 300
        sample_instrument = Instrument(4) # USD_JPY
        # Initialize a sample chart.
        chart = Chart(
            in_instrument=sample_instrument,
            count=COUNT
        )
        # check success
        self.assertNotEqual(chart._granularity, None)
        # check indecies
        self.assertTrue(
            chart._start_index == 0 and chart._get_end_index() == COUNT - 1
        )
        # check instrument
        self.assertEqual(sample_instrument.get_id(), chart._instrument.get_id())
        self.assertEqual(sample_instrument.get_name(), chart._instrument.get_name())
        # check granularity
        self.assertEqual(chart._granularity, GRANULARITY) # Oanda's default (S5)
        # check count
        self.assertEqual(chart.get_size(), COUNT)

        # check candle format
        # If 'bidask', then the midpoints will be None, and vice-versa
        self.assertNotEqual(chart[0].open_bid, None) # Oanda's default

        start = None
        chart = None
        

        """
        Case:     count, start, no end
        """
        COUNT = 100
        GRANULARITY = 'M1'
        WIGGLE_MINUTES = 5 # no price change -> no candle
        ADJUSTMENT = 120
        sample_instrument = Instrument(4) # USD_JPY

        # Initialize a sample chart with no market close gaps.
        # start = now - time since close - chart size 
        #   - (adjustment to busy time to avoid gaps) - skipped candle slack
        start = datetime.utcnow() \
            - Broker.get_time_since_close() \
            - timedelta(minutes = COUNT + ADJUSTMENT + WIGGLE_MINUTES)
        chart = Chart(
            in_instrument=sample_instrument,
            count=COUNT,
            start=start,
            granularity=GRANULARITY
        )

        # check success
        self.assertNotEqual(chart._granularity, None)
        # check indecies
        self.assertTrue(
            chart._start_index == 0 and chart._get_end_index() == COUNT - 1
        )
        # check instrument
        self.assertEqual(sample_instrument.get_id(), chart._instrument.get_id())
        self.assertEqual(sample_instrument.get_name(), chart._instrument.get_name())
        # check granularity
        self.assertEqual(chart._granularity, GRANULARITY)
        # check count
        self.assertEqual(chart.get_size(), COUNT)
        # check start time
        self.assertTrue(
            # Candles gap if there were no ticks, so allow some wiggle room.
            abs(start - chart.get_start_timestamp()) < timedelta(minutes=WIGGLE_MINUTES)
        )
        # check end time
        end_expected = start + timedelta(minutes=COUNT)
        end_real = chart.get_end_timestamp()
        self.assertTrue(
            # Candles gap if there were no ticks, so allow some wiggle room.
            abs(end_expected - end_real) < timedelta(minutes=WIGGLE_MINUTES)
        )
        # check candle format
        self.assertNotEqual(chart[0].open_bid, None)


        """
        count, no start, end
        """
        COUNT = 100
        GRANULARITY = 'H2'

        sample_instrument = Instrument(4) # USD_JPY

        # Initialize a sample chart.
        chart = Chart(
            in_instrument=sample_instrument,
            count=COUNT,
            end=datetime.utcnow(),
            granularity=GRANULARITY
        )

        # check success
        self.assertNotEqual(chart._granularity, None)
        # check indecies
        self.assertTrue(
            chart._start_index == 0 and chart._get_end_index() == COUNT - 1
        )
        # check instrument
        self.assertEqual(sample_instrument.get_id(), chart._instrument.get_id())
        self.assertEqual(sample_instrument.get_name(), chart._instrument.get_name())
        # check granularity
        self.assertEqual(chart._granularity, GRANULARITY)
        # check count
        self.assertEqual(chart.get_size(), COUNT)

        # check start time
        """self.assertTrue(
            # Candles gap if there were no ticks, so allow some wiggle room.
            abs(start - chart.get_start_timestamp()) < timedelta(minutes=5)
        )"""
        # check end time
        end_expected = datetime.utcnow()
        end_real = chart.get_end_timestamp()
        if Broker.get_time_until_close() == timedelta():
            self.assertTrue(
                abs(end_expected - end_real) < timedelta(days=3)
            )
        else:
            self.assertTrue(
                # Candles gap if there were no ticks, so allow some wiggle room.
                abs(end_expected - end_real) < timedelta(hours=5)
            )
        # check candle format
        # If 'bidask', then the midpoints will be None, and vice-versa
        self.assertNotEqual(chart[0].open_bid, None) # Oanda's default


        """
        no count, start, no end
        """
        COUNT = 24
        GRANULARITY = 'M' # month (Oanda)
        sample_instrument = Instrument(4) # USD_JPY

        # Initialize a sample chart.
        # start = now - 2 years
        start = datetime.utcnow() - timedelta(days=365*2)
        chart = Chart(
            in_instrument=sample_instrument,
            start=start,
            granularity=GRANULARITY
        )

        # check success
        self.assertNotEqual(chart._granularity, None)
        # check indecies
        self.assertTrue(
            chart._start_index == 0 and abs(chart._get_end_index() - COUNT) <= 1
        )
        # check instrument
        self.assertEqual(sample_instrument.get_id(), chart._instrument.get_id())
        self.assertEqual(sample_instrument.get_name(), chart._instrument.get_name())
        # check granularity
        self.assertEqual(chart._granularity, GRANULARITY)
        # check count
        self.assertTrue( abs(chart.get_size() - COUNT) <= 1 )

        # check start time
        self.assertTrue(
            # allow wiggle room.
            abs(start - chart.get_start_timestamp()) < timedelta(days=32)
        )
        # check end time
        end_expected = datetime.utcnow()
        end_real = chart.get_end_timestamp()
        self.assertTrue(
            # Allow wiggle room for market close.
            abs(end_expected - end_real) < timedelta(days=32)
        )
        # check candle format
        # If 'bidask', then the midpoints will be None, and vice-versa
        self.assertNotEqual(chart[0].open_bid, None) # Oanda's default
Example #39
0
    def test_update(self):
        """
        test: Chart.update()
        Constraints to verify:
            - Data is as recent as possible
            - start index has earliest timestamp
            - end index has latest timestamp
            - timestamps from start to end are sequential
        Cases:
            - old chart (complete update)
            - somewhat outdated chart (partially updated)
            - new chart (no updates other than last (incomplete) candle)
        """

        """
        case: old chart that gets completely updated
        """
        # initial "outdated" chart
        chart = Chart(
            in_instrument=Instrument(4),
            granularity='M1',
            count=4999,
            end=datetime(year=2017, month=12, day=5)
        )
        # Update chart
        chart.update()

        # Verify data is most recent
        time_since_close = Broker.get_time_since_close()
        now = datetime.utcnow()
        end_timestamp = chart.get_end_timestamp()       
        if (Broker.get_time_until_close() == timedelta()):
            # Time since last candle should be close to time since market
            # close. The leniency is high to allow for periods of no new
            # candles.
            self.assertTrue(
                abs((now - end_timestamp) - (time_since_close))
                     < timedelta(minutes=62)
            )
        else:
            # Time since last candle should be close to now.
            self.assertTrue(abs(now - end_timestamp) < timedelta(minutes=2))
        # verify candle at start index has earliest timestamp.
        earliest_timestamp = datetime.utcnow()
        for i in range(0, chart.get_size()):
            if chart[i].timestamp < earliest_timestamp:
                earliest_timestamp = chart[i].timestamp
        self.assertTrue(chart.get_start_timestamp() == earliest_timestamp)
        # verify candle at end index has latest timestamp.
        latest_timestamp = datetime(year=1999, month=1, day=1)
        for i in range(0, chart.get_size()):
            if chart[i].timestamp > latest_timestamp:
                latest_timestamp = chart[i].timestamp
        self.assertTrue(chart.get_end_timestamp() == latest_timestamp)
        # Verify sequential timestamps
        for i in range(0, chart.get_size() - 1):
            self.assertTrue(chart[i].timestamp < chart[i + 1].timestamp)
        """
        Chart that gets partially updated
        """
        # TODO
        """
Example #40
0
    def _babysit(self):
        """ (See strategy.py for documentation) """
        Log.write('fifty.py babysit(): _open_tade_ids: {}'.format(self.open_trade_ids))

        for open_trade_id in self.open_trade_ids:
            is_closed = Broker.is_trade_closed(open_trade_id)
            if is_closed[0]:
                Log.write('"fifty.py" _babysit(): Trade ({}) has closed with reason: {}'
                    .format( open_trade_id, str(is_closed[1]) )
                )
                # If SL hit, reverse direction.
                if is_closed[1] == TradeClosedReason.STOP_LOSS_ORDER:
                    if self.go_long:
                        self.go_long = False
                    else:
                        self.go_long = True
                self.open_trade_ids.remove(open_trade_id)
            else:
                trade = Broker.get_trade(open_trade_id)
                instrument = trade.instrument
                sl = round( float(trade.stop_loss), 2 )
                go_long = int(trade.units) > 0

                if go_long: # currently long
                    cur_bid = Broker.get_bid(instrument)
                    if cur_bid != None:
                        if cur_bid - sl > self.sl_price_diff:
                            new_sl = cur_bid - self.sl_price_diff
                            resp = Broker.modify_trade(
                                trade_id=open_trade_id,
                                stop_loss_price=str(round(new_sl, 2))
                            )
                            if resp == None:
                                Log.write('"fifty.py" _babysit(): Modify failed. Checking if trade is closed.')
                                closed = Broker.is_trade_closed(open_trade_id)
                                Log.write('fifty.py babysit(): is_trade_closed returned:\n{}'.format(closed))
                                if closed[0]:
                                    Log.write('"fifty.py" _babysit(): BUY trade has closed. (BUY)')
                                    self.open_trade_ids.remove(open_trade_id)
                                    # If SL hit, reverse direction.
                                    if closed[1] == TradeClosedReason.STOP_LOSS_ORDER:
                                        self.go_long = False
                                else:
                                    Log.write('"fifty.py" _babysit(): Failed to modify BUY trade.')
                                    raise Exception
                            else:                                       
                                Log.write('"fifty.py" _babysit(): Modified BUY trade with ID (',\
                                     open_trade_id, ').')
                    else:
                        Log.write('"fifty.py" _babysit(): Failed to get bid while babysitting.')
                        raise Exception
                else: # currently short
                    cur_bid = Broker.get_bid(instrument)
                    if cur_bid != None:
                        if sl - cur_bid > self.sl_price_diff:
                            new_sl = cur_bid + self.sl_price_diff
                            resp = Broker.modify_trade(
                                trade_id=open_trade_id, 
                                stop_loss_price=str(round(new_sl, 2))
                            )
                            if resp == None:
                                closed = Broker.is_trade_closed(open_trade_id)
                                Log.write('fifty.py babysit(): is_trade_closed returned:\n{}'.format(closed))
                                if closed[0]:
                                    Log.write('"fifty.py" _babysit(): SELL trade has closed. (BUY)')
                                    self.open_trade_ids.remove(open_trade_id)
                                    # If SL hit, reverse direction.
                                    if closed[1] == TradeClosedReason.STOP_LOSS_ORDER:
                                        self.go_long = True
                                else:
                                    Log.write('"fifty.py" in _babysit(): Failed to modify SELL trade.')
                                    raise Exception
                            else:
                                Log.write('"fifty.py" _babysit(): Modified SELL trade with ID (',\
                                    open_trade_id, ').')
                    else:
                        Log.write('"fifty.py" _babysit(): Failed to get ask while babysitting.')
                        raise Exception
Example #41
0
    def run(self):
        # 1. Start Broker
        broker = Broker()
        broker.setDaemon(True)
        broker.start()

        # 2. Start Senders
        try:
            local_logger = LocalLogger(
                queue_size=4096,
                config=self._config['sender']['local_logger'])
            local_logger.setDaemon(True)
        except Exception as e:
            print e
            print "ERROR: Can\'t create local looger"
        else:
            broker.register_sender(local_logger)
            local_logger.start()

        try:
            mac_tracker_server = MacTrackerServer(
                queue_size=4096,
                config=self._config['sender']['mac_tracker_server']
            )
            mac_tracker_server.setDaemon(True)
        except Exception as e:
            print e
            print "ERROR: Can\'t create MAC tracker server"
        else:
            broker.register_sender(mac_tracker_server)
            mac_tracker_server.start()

        # 3. Start Reader
        try:
            reader = Reader(broker, config=self._config['pipe_file'])
            reader.setDaemon(True)
        except Exception as e:
            #sys.stdout.write("Error: Can\'t create PipeReader\n")
            print e
            print "Error: Can\'t create PipeReader\n"
        else:
            reader.start()

        broker.join()
        reader.join()


        #Agent Thread Start Idle Here
        while (True):
            time.sleep(1)
            pass
Example #42
0
	def __init__(self, config, name, userOpt, itemName, discoverFun):
		Broker.__init__(self, config, name, userOpt, itemName, discoverFun)
		self.storageDict = config.getDict('%s storage access' % userOpt, {}, onChange = None,
			parser = lambda x: utils.parseList(x, ' '))[0]
Example #43
0
    def __init__(
        self,
        in_instrument,              # <Instrument>    
        granularity='S5',           # string - See Oanda's documentation
        count=None,                 # int - number of candles
        start=None,                 # datetime - UTC
        end=None,                   # datetime - UTC
        price='MBA',                # string
        include_first=None,         # bool
        daily_alignment=None,       # int
        alignment_timezone=None,    # string - timezone
        weekly_alignment=None
    ):
        self._candles = []
        # verify instance of <Instrument> by accessing a member.
        if in_instrument.get_id() == 0:
            pass            

        if not count in [0]: # do send None
            # get candles from broker
            instrument_history = Broker.get_instrument_history(
                instrument=in_instrument,
                granularity=granularity,
                count=count,
                from_time=start,
                to=end,
                price=price,
                include_first=include_first,
                daily_alignment=daily_alignment,
                alignment_timezone=alignment_timezone,
                weekly_alignment=weekly_alignment
            )
            if instrument_history == None:
                Log.write('chart.py __init__(): Failed to get instrument history.')
                raise Exception
            else:
                candles_raw = instrument_history['candles']
                for c_r in candles_raw:
                    new_candle = Candle(
                        timestamp=util_date.string_to_date(c_r['time']),
                        volume=float(c_r['volume']),
                        complete=bool(c_r['complete']),
                        open_bid=float(c_r['bid']['o']),
                        high_bid=float(c_r['bid']['h']),
                        low_bid=float(c_r['bid']['l']),
                        close_bid=float(c_r['bid']['c']),
                        open_ask=float(c_r['ask']['o']),
                        high_ask=float(c_r['ask']['h']),
                        low_ask=float(c_r['ask']['l']),
                        close_ask=float(c_r['ask']['c'])
                    )
                    self._candles.append(new_candle)

        self._instrument = in_instrument
        self._granularity = granularity
        self._start_index = 0 # start
        self._price = price
        self.include_first = include_first
        self.daily_alignment = daily_alignment
        self._alignment_timezone = alignment_timezone
        self.weekly_alignment = weekly_alignment
Example #44
0
    def recover_trades(cls):
        """Returns: None on failure, any value on success
        See if there are any open trades, and resume babysitting.
        -
        If trades are opened without writing their info to the db,
        the trade cannot be distributed back to the strategy that opened
        it, because it is unknown what strategy placed the order.
        This could be solved by writing to the db before placing the order,
        synchronously. However if placing the order failed, then the database
        record would have to be deleted, and this would be messy.
        Instead, designate a backup strategy that adopts orphan trades.
        """

        # Get trades from broker.
        open_trades_broker = Broker.get_open_trades() # instance of <Trades>
        if open_trades_broker == None:
            Log.write('daemon.py recover_trades(): Broker.get_open_trades() failed.')
            return None 

        # Delete any trades from the database that are no longer open.
        #   First, ignore trades that the broker has open.
        db_trades = DB.execute('SELECT trade_id FROM open_trades_live')
        Log.write('"daemon.py" recover_trades():\ndb open trades: {}\nbroker open trades: {}'
            .format(db_trades, open_trades_broker))
        for index, dbt in enumerate(db_trades): # O(n^3)
            for otb in open_trades_broker:
                if str(dbt[0]) == str(otb.trade_id): # compare trade_id
                    del db_trades[index]
        #   The remaining trades are in the "open trades" db table, but 
        #   the broker is not listing them as open.
        #   They may have closed since the daemon last ran; confirm this.
        #   Another cause is that trades are automatically removed from
        #   Oanda's history after much time passes.
        for dbt in db_trades:
            if Broker.is_trade_closed(dbt[0])[0]:
                # Trade is definitely closed; update db.
                Log.write('"daemon.py" recover_trades(): Trade {} is closed. Deleting from db.'
                    .format(dbt[0]))
            else:
                # Trade is in "open trades" db table and the broker
                # says the trade is neither open nor closed.
                DB.bug('Trade w/ID ({}) is neither open nor closed.'
                    .format(dbt[0]))
                Log.write('"daemon.py" recover_trades(): Trade w/ID (',
                    '{}) is neither open nor closed.'.format(dbt[0]))
            DB.execute('DELETE FROM open_trades_live WHERE trade_id="{}"'
                .format(dbt[0]))
            
        """
        Fill in info not provided by the broker, e.g.
        the name of the strategy that opened the trade.

        It's possible that a trade will be opened then the system is
        unexpectedly terminated before info about the trade can be saved to
        the database. Thus a trade may not have a corresponding trade in the database.
        """
        for i in range(0,len(open_trades_broker)):
            broker_trade = open_trades_broker[i]
            db_trade_info = DB.execute(
                'SELECT strategy, broker FROM open_trades_live WHERE trade_id="{}"'
                .format(broker_trade.trade_id)
            )
            if len(db_trade_info) > 0:
                # Verify broker's info and database info match, just to be safe.
                # - broker name
                if db_trade_info[0][1] != broker_trade.broker_name:
                    Log.write('"daemon.py" recover_trades(): ERROR: "{}" != "{}"'
                        .format(db_trade_info[0][1], broker_trade.broker_name))
                    raise Exception
                # set strategy
                broker_trade.strategy = None # TODO: use a different default?
                for s in cls.strategies:
                    if s.get_name == db_trade_info[0][0]:
                        broker_trade.strategy = s # reference to class instance
            else:
                # Trade in broker but not db.
                # Maybe the trade was opened manually. Ignore it.
                # Remove from list.
                open_trades_broker = open_trades_broker[0:i] + open_trades_broker[i+1:len(open_trades_broker)] # TODO: optimize speed

        # Distribute trades to their respective strategy modules
        for broker_trade in open_trades_broker:
            if broker_trade.strategy != None:
                # Find the strategy that made this trade and notify it.
                for s in cls.strategies:
                    if broker_trade.strategy.get_name() == s.get_name(): 
                        s.adopt(broker_trade.trade_id)
                        open_trades_broker.remove(broker_trade.trade_id)
                        break
            else:
                # It is not known what strategy opened this trade.
                # One possible reason is that the strategy that opened the
                # trade is no longer open.
                # Assign it to the backup strategy.
                Log.write('"daemon.py" recover_trades(): Assigning trade ',
                    ' ({}) to backup strategy ({}).'
                    .format(broker_trade.trade_id, cls.backup_strategy.get_name()))
                cls.backup_strategy.adopt(broker_trade.trade_id)
        return 0 # success
Example #45
0
if DEBUG:
    print "Currencies I'm looking for:"
    pp.pprint(wanted_currencies)


# Set up the record keeper to store our history
recorder = False
if Config.has_section("RecordKeeper"):
    from recordkeeper import RecordKeeper

    recorder = RecordKeeper("./price-watcher.cfg")


# Set up the broker object for dealing with the money
broker = Broker("./price-watcher.cfg", wanted_currencies, base_currency, DEBUG)

current_portfolio = broker.get_portfolio()
if DEBUG:
    print "Current portfolio:"
    pp.pprint(current_portfolio)


# Find the new currency equilibrium
money_maker = MoneyMaker("./price-watcher.cfg", base_currency, wanted_currencies, DEBUG)
transactions = money_maker.shake(current_portfolio)


# Get the notifier ready
notifier = False
if Config.has_section("Notifier"):
Example #46
0
	def __init__(self, config, name, userOpt, itemName, discoverFun):
		Broker.__init__(self, config, name, userOpt, itemName, discoverFun)
		self._itemsStart = self._discover(discoverFun)
Example #47
0
	def __init__(self, config, name, userOpt, itemName, discoverFun):
		Broker.__init__(self, config, name, userOpt, itemName, discoverFun)
		self._itemsStart = config.getList(userOpt, [], onChange = None)
		if not self._itemsStart:
			self._itemsStart = None
Example #48
0
class Simulator(object):
  '''
  simulation manager
  '''  
  def __init__(self, instrument, strategies, start_date, end_date, opening_bal,time_stamp=None):
    '''
    constructs message queues
    initialises brokers and traders
    '''    
    self.instrument = instrument
    
    self.start_date = start_date
    self.end_date = end_date
    
    self.opening_bal = opening_bal
    
    self.datafeed = DataFeed(instrument)
    
    self.orderQ = MsgQ()    
    self.receiptQ = MsgQ()
    
    self.term_req_Q = MsgQ()
    self.term_notice_Q = MsgQ()
    
    self.broker = Broker(self.datafeed, self.orderQ, self.receiptQ, self.term_req_Q, self.term_notice_Q)   

    self.traders = []    
    for strategy in strategies:
      trader = Trader(self.datafeed, self.broker, self.opening_bal, self.instrument, strategy, self.start_date, self.end_date)
      self.traders.append(trader)
      
    self.time_stamp = time_stamp
 
  def run(self):    
    '''
    simulate event series
    '''   
    current_date = date(self.start_date.year, self.start_date.month, self.start_date.day)
    
    length = self.end_date - self.start_date
    d_total = length.days
    display_int = d_total / 10
      
    while (current_date <= self.end_date):    
      
      # PROCESS TRADING DAYS
      if (self.datafeed.date_is_trading_day(current_date) == True):        

        self.broker.open_manage_and_close(current_date)                
        
        # book keeping
        for trader in self.traders:
          trader.ac.tally_individual_open_positions(current_date)
          trader.ac.record_net_end_of_day_pos(current_date)
          trader.ac.record_end_of_day_balances(current_date)            
        for trader in self.traders:
          trader.execute_strategy(current_date)          

        #self.broker.log_closed_positions()
        self.broker.log_all_positions(current_date)

      # IGNORE NON-TRADING DAYS
      else:
        pass
    
      current_date = current_date + timedelta(days=1)  
      
      elapsed = (self.end_date - current_date)
      d_elapsed = elapsed.days
      progress = (float(d_total) - float(d_elapsed)) / float(d_total) * 100.0
      if (d_elapsed % display_int == 0):
        print('%i/100' % int(progress))
      
    self.traders[0].strategy.log_self()

  def plot(self):
    '''
    analyse & report on simulation path and outcome
    '''        
    d = date(self.start_date.year, self.start_date.month, self.start_date.day)
      
    dates = []
    prices = []
   
    cash_bal = []
    margin_bal = []
    net_booked_position = []
    net_open_position = []
    
    daily_high = []
    daily_low = []
    
    mavg_band_ceiling = []
    mavg_band_floor = []
    
    trader = self.broker.traders[0]   
    ac = trader.ac
   
    df = self.datafeed
      
    pMin = None
    pMax = None
      
    while (d <= self.end_date):          
      # TRADING DAYS
      if (self.datafeed.date_is_trading_day(d) == True):        
        dates.append(d) 
          
        mavg_top = df.n_day_moving_avg(None, d, 'high', Strategy.n)
        mavg_bottom = df.n_day_moving_avg(None, d, 'low', Strategy.n)        
          
        mavg_band_ceiling.append(mavg_top)
        mavg_band_floor.append(mavg_bottom)
        
        pinfo = df.get_price_info(None, d)        
        prices.append(pinfo['close'])
        daily_high.append(pinfo['high'])
        daily_low.append(pinfo['low'])
        
        s = str(d) + ',' + str(mavg_band_ceiling[len(mavg_band_ceiling) - 1]) + ',' + str(mavg_band_floor[len(mavg_band_floor) - 1]) + ',' + str(pinfo['close'])
        logging.info(s)
        
        cash_bal.append(ac.d_cash_bal[d])
        margin_bal.append(ac.d_margin_bal[d])
        net_booked_position.append(ac.d_net_booked_position[d])
        net_open_position.append(ac.net_open_position[d])
        
        if (pMin == None):          
          pMin = pinfo['low']
          pMax = pinfo['high']
        else:
          if pinfo['low'] < pMin:
            pMin = pinfo['low']
          if pinfo['high'] > pMax: 
            pMax = pinfo['high']       
                
      # NON-TRADING DAYS
      else:
        pass    
      d = d + timedelta(days=1)  

    aDate = np.array(dates)
    aPrice = np.array(prices)
    
    fig = plt.figure(figsize=(20, 20))
    
    ax = fig.add_subplot(111)
   
    #ax.plot(aDate, aPrice, color='blue')        
    
    for series in [mavg_band_ceiling, mavg_band_floor]:
      y = np.array(series)
      t = np.array(dates)
      ax.plot(t, y, color='red')  
    
    for series in [daily_high, daily_low]:
      y = np.array(series)
      t = np.array(dates)
      ax.plot(t, y, color='blue')      
    
    plt.ylim([float(pMin), float(pMax)])    

    for series in [net_booked_position]:
      y = np.array(series)
      t = np.array(dates)
      ax2 = ax.twinx()   
      ax2.plot(t, y, color='green')  
    
    ax.grid(False)
    fig.autofmt_xdate(rotation=90)
    
    fname = 'plot/plot_' + self.time_stamp
    fig.savefig(fname) 
Example #49
0
	def _broker(self, reqs, items):
		items = Broker._broker(self, reqs, items)
		if items:
			items = [items[self._nIndex % len(items)]]
			self._nIndex += 1
		return items
Example #50
0
class ScaleClient(object):
    """This class parses a configuration file and subsequently creates, configures, and manages each component of the
    overall Scale Client software.
    """
    def __init__(self, quit_time=None, raise_errors=False):
        super(ScaleClient, self).__init__()

        self._quit_time = quit_time
        self._raise_errors = raise_errors

        self.__broker = None
        self.__reporter = None
        self.__sensors = None
        self.__applications = None
        self.__networks = None

    def setup_reporter(self, cfg=None):

        if self.__broker is None:
            self.setup_broker(cfg)

        self.__reporter = EventReporter(self.__broker)

    @property
    def event_reporter(self):
        """
        :rtype: EventReporter
        """
        return self.__reporter

    def setup_broker(self, cfg=None):
        """
        Currently only creates a dummy Broker object for registering Applications to.

        :param cfg:
        :return:
        """
        self.__broker = Broker()
        return self.broker

    @property
    def broker(self):
        """
        :rtype: Broker
        """
        return self.__broker

    def schedule_quit_time(self, quit_time):
        """
        Terminate the client and all Applications at the given time.
        :param quit_time: time to quit after in seconds
        :return:
        """
        # HACK: create a dummy app that just calls Broker.stop() at the requested quit_time.
        # We need to do it in an app like this to get the self variables bound in correctly.
        # This is circuits-specific!
        class QuitApp(Application):
            def _quitter(self):
                log.info("Stopping client...")
                self._broker.stop()

                # TODO: put this in a thread? that comes back and finishes quitting in case it hangs?
                # if you REALLY can't get the client to quit, do this:
                # os._exit(0)

        quit_app = QuitApp(self.__broker)
        quit_app.timed_call(quit_time, QuitApp._quitter)

    def run(self):
        """Currently just loop forever to allow the other threads to do their work."""

        if self._quit_time is not None:
            self.schedule_quit_time(self._quit_time)

        # Run the broker until it, and thus the whole scale client, have a stop event fully propagated
        self.__broker.run()

    def setup_sensors(self, configs):
        """
        Configure the sensors expressed in the name-keyed configs dict.
        :param configs:
        """
        self.__sensors = self.setup_components(configs, 'scale_client.sensors')

    def setup_networks(self, configs):
        """
        Configure the sensors expressed in the name-keyed configs dict.
        :param configs:
        """
        self.__networks = self.setup_components(configs, 'scale_client.networks')

    def setup_applications(self, configs):
        """
        Configure the sensors expressed in the name-keyed configs dict.
        :param configs:
        """
        self.__applications = self.setup_components(configs, 'scale_client.applications')

    def setup_components(self, configs, package_name, helper_fun=None, *args):
        """
        Iterate over each component configuration in configs, import the specified class
        (possibly using package_name as the root for the import statement), and then call
        helper_fun with the class and requested configuration to finish its setup.  If
        helper_fun isn't specified, the default simply calls the class's constructor
        with the given arguments parsed from the configuration.
        :param configs: a dict where each entry is a component to set up; each component has a unique
        name as its key and the value is a dict of parameters, which should at least include
         'class' for importing the class using python's import
        :type configs: dict
        :param package_name: root package_name for the class paths specified
         (these will be classes in the scale_client package e.g. sensors, event_sinks)
        :param helper_fun: responsible for creating the component in question and doing any bookkeeping
        :param args: these positional args will be passed to helper_fun
        :return: list of constructed classes
        """

        if helper_fun is None:
            def helper_fun(_class, broker, **kwargs):
                return _class(broker, *args, **kwargs)

        results = []

        for comp_name, cfg in list(configs.items()):
            # need to get class definition to call constructor
            if 'class' not in cfg:
                log.warn("Skipping %s config with no class definition: %s" % (comp_name, cfg))
                continue

            # try importing the specified class extended by package_name first, then just 'class' if error
            cls_name = '.'.join([package_name, cfg['class']])
            other_cls_name = cfg['class']
            try:
                cls = _get_class_by_name(cls_name)

            except ImportError as e:
                try:
                    cls = _get_class_by_name(other_cls_name)
                except ImportError as e2:
                    log.error("ImportErrors while creating %s class: %s\n"
                              "Did you remember to put the repository in your PYTHONPATH??? "
                              "skipping import..." % (other_cls_name, cfg))
                    log.debug("Errors were: %s (import %s)\n%s (import %s)" % (e, cls_name, e2, other_cls_name))
                    continue
            try: # building the class
                # copy config s so we can tweak it as necessary to expose only correct kwargs
                new_config = cfg.copy()
                new_config.pop('class')
                if 'name' not in new_config:
                    new_config['name'] = comp_name

                res = helper_fun(cls, self.__broker, *args, **new_config)
                results.append(res)
                log.info("%s created from config: %s" % (comp_name, cfg))

            except Exception as e:
                log.error("Unexpected error while creating %s class: %s\nError: %s" % (cls.__name__, cfg, e))
                if self._raise_errors:
                    raise

        return results

    @classmethod
    def build_from_configuration_parameters(cls, config_filename, args=None):
        """
        Builds an instance using the (optionally) specified configuration file.  If any args are specified
        (e.g. from parse_args()), they may overwrite the configurations in the file.  However,
        such args that create sensors, apps, etc. will be interpreted as additional
        parameters for components sharing the same names: IT'S UP TO YOU to ensure there aren't conflicts!
        If config_filename is None, we just build using the specified args.
        :param config_filename: optional filename to read config parameters from
        :param args: optional additional configuration arguments as parsed from command line
        :return:
        """

        # XXX: in case the user doesn't specify a name,
        # this will help auto-generate unique ones in a sequence.
        global __scale_client_n_anon_apps_added__
        __scale_client_n_anon_apps_added__ = 0

        if config_filename is None and args is None:
            raise ValueError("can't build from configuration parameters when both filename and args are None!")

        # Dummy config dict in case no config file
        cfg = {'eventsinks': {}, 'sensors': {}, 'applications': {}, 'networks': {}}

        if config_filename is not None:
            try:
                cfg = cls.load_configuration_file(config_filename)
                # log.debug("Final configuration: %s" % cfg)
            except IOError as e:
                log.error("Error reading config file: %s" % e)
                exit(1)

        def __make_event_sink(_class, broker, event_reporter, **config):
            res = _class(broker, **config)
            event_reporter.add_sink(res)
            return res

        ### BEGIN ACTUAL CONFIG FILE USAGE
        # We call appropriate handlers for each section in the appropriate order,
        # starting by getting any relevant command line parameters to create the client.

        client = cls(quit_time=args.quit_time, raise_errors=args.raise_errors)

        # TODO: include command line arguments when some are added
        if 'main' in cfg:
            client.setup_broker(cfg['main'])
            client.setup_reporter(cfg['main'])
        else:  # use defaults
            client.setup_broker({})
            client.setup_reporter({})

        # These components are all handled almost identically.

        # EventSinks
        configs = cls.__join_configs_with_args(cfg.get('eventsinks', {}), args.event_sinks \
            if args is not None and args.event_sinks is not None else [])
        client.setup_components(configs, 'scale_client.event_sinks', __make_event_sink, client.__reporter)

        # Set defaults if none were made
        if len(client.__reporter.get_sinks()) == 0:
            log.info("No event_sinks loaded: adding default LogEventSink")
            LogEventSink = None
            try:
                from ..event_sinks.log_event_sink import LogEventSink
            except ValueError:
                # relative import error when this script called directly (isn't a package)
                try:
                    from scale_client.event_sinks.log_event_sink import LogEventSink
                except ImportError as e:
                    log.error("can't import LogEventSink! Error: %s" % e)
                    exit(1)
            default_sink = LogEventSink(client.__broker)
            client.__reporter.add_sink(default_sink)

        # Sensors
        log.info("Setting up Sensors...")
        configs = cls.__join_configs_with_args(cfg.get('sensors', {}), args.sensors \
            if args is not None and args.sensors is not None else [], "anon_vs")
        client.setup_sensors(configs)

        # Networks
        log.info("Setting up Networks...")
        configs = cls.__join_configs_with_args(cfg.get('networks', {}), args.networks \
            if args is not None and args.networks is not None else [], "anon_network_app")
        client.setup_networks(configs)

        # Applications
        log.info("Setting up other Applications...")
        configs = cls.__join_configs_with_args(cfg.get('applications', {}), args.applications \
            if args is not None and args.applications is not None else [])
        client.setup_applications(configs)

        # TODO: set some defaults if no applications, sensors, or networking components are enabled (heartbeat?)

        return client

    @staticmethod
    def __join_configs_with_args(configs, relevant_args, anon_component_prefix="anon_app"):
        """
        Join the command-line arguments with the configuration file's parsed args in order to add to
        or even modify the file-specified configuration, if there even was one!
        :param configs:
        :type configs: dict
        :param relevant_args: list of individual YAML-encoded dict-like args, possibly without a top-level key for the name
        :param anon_component_prefix: for anonymous components (no name as config dict key), use this string with
        a unique sequential number appended
        :return:
        """
        new_configs = ScaleClient._parse_yaml_configs(relevant_args, anon_component_prefix)
        return ScaleClient.__merge_configs(new_configs, configs)

    @staticmethod
    def _parse_yaml_configs(args, anon_component_prefix="anon_app"):
        """
        Parses the given list of YAML-encoded config dicts and returns all of them as a dict.
        If one of the args doesn't have a top-level key for the name, a unique one is generated
        for you prefixed with the given parameter
        :param args:
        :param anon_component_prefix: prefix for 'anonymous' component configs with no name
        :return:
        """
        # Configuration files are basically nested dictionaries and the command-line arguments
        # are a list with each element being a dictionary. If the dict in the args has the key
        # 'class', then it is anonymous and we should just give it a sequential unique name to
        # ensure it is run.  If, however, it does not, then we should assume that it's a NAMED
        # configuration and so we can actually use that to overwrite/modify the configurations
        # pulled in from a file.

        new_configs = {}
        for arg in args:
            try:
                arg = yaml.load(arg)
            except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
                raise ValueError("error parsing manual configuration: %s\nError:%s" % (arg, e))

            # If this config is anonymous, give it a unique name and add it to configs
            # since it couldn't possibly overwrite another config entry.
            # NOTE: if user specified a 'name' entry directly, we will still take that later on...
            if 'class' in arg:
                # TODO: perhaps register these names somewhere to ensure uniqueness?
                global __scale_client_n_anon_apps_added__
                unique_key = anon_component_prefix + str(__scale_client_n_anon_apps_added__)
                __scale_client_n_anon_apps_added__ += 1
                new_configs[unique_key] = arg
            else:
                try:
                    new_configs.update(arg)
                except TypeError as e:
                    raise ValueError("error in your manual configuration: %s\n"
                                     "couldn't be interpreted as a dict due to error: %s" % (arg, e))

        return new_configs

    @classmethod
    def __merge_configs(cls, a, b, path=None):
        """Merges values found in b into a if a didn't have them. It does this
        recursively so that dicts as values will be merged too.  If they're lists
        they are merged in a similar way except that the top-level key of each contained
        dict is assumed to be the unique id so that, for example, two entries of
        'TestApp' will take the first one even if the parameters are different."""

        # This function is modeled after:
        # https://stackoverflow.com/questions/7204805/dictionaries-of-dictionaries-merge

        if path is None: path = []

        for key in b:
            if key in a:
                if isinstance(a[key], dict) and isinstance(b[key], dict):
                    cls.__merge_configs(a[key], b[key], path + [str(key)])
                elif isinstance(a[key], list) and isinstance(b[key], list):
                    # Easy enough to merge lists of non-dict items...
                    try:
                        al = set(a[key])
                        bl = set(b[key])
                        bl.update(al)
                        a[key] = list(bl)
                    except (TypeError, AttributeError) as e:
                        log.warning('problem merging lists when merging configurations'
                                    '(are there dict args in one at path %s?):'
                                    '\n%s\n%s\nKeeping the first one due to error %s'
                                    % (path, a[key], b[key], e))
                else:
                    # Same key, but different value type.  This is difficult to
                    # handle so we just keep the value of the first one.
                    pass
            else:
                a[key] = b[key]
        return a

    @classmethod
    def load_configuration_file(cls, config_filename):
        """
        Reads the YAML-based configuration file specified and optionally recurses on itself
        to read other config files that were included in this one.
        :param config_filename:
        :return: the dict-like configuration
        """
        log.info("Reading config file: %s" % config_filename)
        with open(config_filename) as cfile:
            cfg = yaml.load(cfile)
            # lower-case all top-level headings to tolerate different capitalizations
            # also filter out any sections that didn't include anything
            cfg = {k.lower(): v for k, v in cfg.items() if v is not None}

            # Recursively call this function and merge the results back together
            if 'main' in cfg and 'include_config_files' in cfg['main']:
                # We want to do the update in reverse order so that the top-level config file
                # overwrites the lower-level ones (same with left-most sub config).
                sub_cfgs = [cfg]

                for sub_fname in cfg['main']['include_config_files']:
                    log.debug("recursively loading config file %s" % sub_fname)
                    sub_fname = cls._build_config_file_path(sub_fname)
                    sub_cfg = cls.load_configuration_file(sub_fname)
                    sub_cfgs.append(sub_cfg)

                cfg = reduce(cls.__merge_configs, sub_cfgs)

        return cfg

    @classmethod
    def _build_config_file_path(cls, filename):
        """Returns the complete path to the given config filename,
        assuming it's been placed in the proper 'config' directory
        or the filename is an absolute path."""
        if os.path.exists(filename):
            return filename
        res = os.path.join(os.path.dirname(__file__), '..', 'config', filename)
        if not os.path.exists(res):
            raise ValueError("requested config file %s does not exist!" % filename)
        return res

    @classmethod
    def parse_args(cls, args=None):
        """
        Parses the given arguments (formatted like sys.argv) or returns default configuration if None specified.
        :param args:
        :return:
        """
        # ArgumentParser.add_argument(name or flags...[, action][, nargs][, const][, default][, type][, choices][, required][, help][, metavar][, dest])
        # action is one of: store[_const,_true,_false], append[_const], count
        # nargs is one of: N, ?(defaults to const when no args), *, +, argparse.REMAINDER
        # help supports %(var)s: help='default value is %(default)s'

        test_default_quit_time = 20
        test_config_filename = 'test_config.yml'
        default_config_filename = cls._build_config_file_path('default_config.yml')

        parser = argparse.ArgumentParser(description='''Scale Client main process.  You can specify a configuration
        file for generating the client's components that will run and/or manually configure them using command
        line arguments (NOTE: these may overwrite parameters in the configuration file if there are conflicts,
        but things like sensors, apps, etc. will be interpreted as additional components).''')

        # Configure what components run
        config_group = parser.add_mutually_exclusive_group()
        config_group.add_argument('--config-file', '-f', type=str, dest='config_filename',
                            default=None,
                            help='''file from which to read configuration (NOTE: if you don't
                            specify an absolute path, SCALE assumes you're referring to a relative
                            path within the 'config' directory).  Default is %s when no
                            manual configurations specified.  If manual configurations are in use,
                            no configuration file is used unless you explicitly set one.''' % default_config_filename)
        config_group.add_argument('--test', '-t', action='store_true',
                            help='''run client with simple test configuration found in file %s
                            (publishes dummy sensor data to simple logging sink).
                            It also quits after %d seconds if you don't specify --quit-time'''
                                 % (test_config_filename, test_default_quit_time))

        # Manually configure components
        parser.add_argument('--sensors', '-s', type=str, nargs='+', default=None,
                            help='''manually specify sensors (and their configurations) to run.
                            Arguments should be in YAML format (JSON is a subset of YAML!)
                            e.g. can specify two sensors using:
                            --sensors '{class: "network.heartbeat_virtual_sensor.HeartbeatSensor",
                            interval: 5}' '{class:
                             "dummy.dummy_gas_virtual_sensor.DummyGasPhysicalSensor",
                             name: "gas0", interval: 3}'

                            Alternatively, you can also assign a name to your custom component, which
                            can be used to overwrite or modify one of the same name in your configuration
                            file such as the following to change the actual class definition used:
                            '{TempSensor: {class: "environment.usb_temperature_virtual_sensor.UsbTemperaturePhysicalSensor"}'
                            ''')
        parser.add_argument('--applications', '-a', type=str, nargs='+', default=None,
                            help='''manually specify applications (and their configurations) to run.
                            See --sensors help description for example.''')
        parser.add_argument('--event-sinks', '-e', type=str, nargs='+', default=None, dest='event_sinks',
                            help='''manually specify event sinks (and their configurations) to run.
                            See --sensors help description for example.''')
        parser.add_argument('--networks', '-n', type=str, nargs='+', default=None,
                            help='''manually specify network components (and their configurations) to run.
                            See --sensors help description for example.''')

        # Configure logging
        parser.add_argument('--log-level', type=str, default='WARNING', dest='log_level',
                            help='''one of debug, info, error, warning''')
        parser.add_argument('--enable-log-modules', dest='enable_log_modules', nargs='+', default=(),
                            help='''explicitly enable logging for the specified modules
                            (by default all of %s are disabled)''' % DEFAULT_DISABLED_LOG_MODULES)
        parser.add_argument('--disable-log-modules', dest='disable_log_modules', nargs='+', default=(),
                            help='''explicitly disable logging for the specified modules''')
        parser.add_argument('--format-logging', type=str, default=DEFAULT_LOG_FORMAT, dest='log_format',
                            help='''formats logging as per the given argument (default=%(default)s).
                            NOTE: add timestamp by doing %%(asctime)s''')

        # Misc config params
        parser.add_argument('--quit-time', '-q', type=int, default=None, dest='quit_time',
                            help='''quit the client after specified number of seconds
                             (default is to never quit)''')
        parser.add_argument('--raise-errors', action='store_true', dest='raise_errors',
                            help='''when constructing a component, raise the (non-import) errors to allow printing
                            a stack trace rather than trying to gracefully skip it and logging the error''')

        parsed_args = parser.parse_args(args if args is not None else '')

        # Correct configuration filename
        if parsed_args.test:
            parsed_args.config_filename = cls._build_config_file_path(test_config_filename)
        elif parsed_args.config_filename is not None:
            parsed_args.config_filename = cls._build_config_file_path(parsed_args.config_filename)
        # Set default config file if no files or manual configurations are specified
        elif parsed_args.config_filename is None and not any((parsed_args.sensors, parsed_args.applications,
                                                             parsed_args.event_sinks, parsed_args.networks)):
            parsed_args.config_filename = default_config_filename

        # Testing configuration quits after a time
        if parsed_args.test and parsed_args.quit_time is None:
            parsed_args.quit_time = test_default_quit_time

        return parsed_args
Example #51
0
 def _scan(self):
     Log.write('fifty.py scan()')
     """ (see strategy.py for documentation) """
     # If we're babysitting a trade, don't open a new one.
     if len(self.open_trade_ids) > 0:
         Log.write('fifty.py _scan(): Trades open; no suggestions.')
         return None
     instrument = Instrument(Instrument.get_id_from_name('USD_JPY'))
     spreads = Broker.get_spreads([instrument])
     if spreads == None:
         Log.write('"fifty.py" in _scan(): Failed to get spread of {}.'
             .format(instrument.get_name())) 
         raise Exception
     elif len(spreads) < 1:
         Log.write('"fifty.py" in _scan(): len(spreads) == {}.'
             .format(len(spreads))) 
         raise Exception
     # This only checks for one instrument.
     elif not spreads[0]['tradeable']:
             Log.write('"fifty.py" in _scan(): Instrument {} not tradeable.'
                 .format(instrument.get_name())) 
             return None
     else:
         spread = spreads[0]['spread']
         if spread < 3:
             Log.write('fifty.py _scan(): spread = {}'.format(spread))
             if self.go_long: # buy
                 Log.write('"fifty.py" _scan(): Going long.') 
                 cur_bid = Broker.get_bid(instrument)
                 if cur_bid != None:
                     # Rounding the raw bid didn't prevent float inaccuracy
                     # cur_bid = round(cur_bid_raw, 2)
                     tp = round(cur_bid + self.tp_price_diff, 2)
                     sl = round(cur_bid - self.sl_price_diff, 2)
                 else:
                     Log.write('"fifty.py" in _scan(): Failed to get bid.')
                     raise Exception
             else: # sell
                 Log.write('"fifty.py" _scan(): Shorting.') 
                 self.go_long = False
                 cur_bid = Broker.get_bid(instrument)
                 if cur_bid != None:
                     tp = round(cur_bid - self.tp_price_diff, 2)
                     sl = round(cur_bid + self.sl_price_diff, 2)
                 else:
                     Log.write('"fifty.py" in _scan(): Failed to get ask.') 
                     raise Exception
             # Prepare the order and sent it back to daemon.
             units = 1 if self.go_long else -1
             confidence = 50
             order = Order(
                 instrument=instrument,
                 order_type="MARKET", # matches Oanda's OrderType definition
                 stop_loss={ "price" : str(sl) },
                 take_profit={ "price" : str(tp) },
                 units=units
             )
             reason = 'happy day'
             opp = Opportunity(order, confidence, self, reason)
             Log.write('"fifty.py" _scan(): Returning opportunity with \
                 order:\n{}'.format(opp))
             return opp
         else:
             Log.write('fifty.py _scan(): Spread is high; no suggestions.')
             return None
Example #52
0
class App(object):

    def __init__(self, name, config={}):
        self.name = name
        self.config = Configure(config)
        self.__init_local_and_global()
        self.__init_logger()
        self.__init_broker_and_processors()
        self.__init_runtime_tree_storage()

    def __init_local_and_global(self):
        local_defaults = {
            'event_handler_map': dict,
        }
        self.__local = Local(**local_defaults)
        self.__global = self
        self.__global.event_handler_map = {}

    def __init_logger(self):
        self.logger = logging.getLogger(self.name)
        self.logger.propagate = 0
        formatter = logging.Formatter(
            '[%(asctime)s: %(levelname)s] %(message)s')
        if self.config.debug:
            ch = logging.StreamHandler(sys.stdout)
            ch.setFormatter(formatter)
            ch.setLevel(logging.DEBUG)
            self.logger.addHandler(ch)
            self.logger.setLevel(logging.DEBUG)
        else:
            fh = logging.FileHandler(self.config.log_path)
            fh.setFormatter(formatter)
            fh.setLevel(logging.INFO)
            self.logger.addHandler(fh)
            self.logger.setLevel(logging.INFO)

    def __init_broker_and_processors(self):
        self.__broker = Broker(self)
        self.__event_processor = EventProcessor(0, self)
        self.__event_processors = list()
        for i in range(self.config.processor_num):
            id = i + 1
            event_channel = EventChannel()
            event_channel.register(self.__broker)
            event_processor = EventProcessor(id, self, event_channel)
            self.__event_processors.append(event_processor)

    def __init_runtime_tree_storage(self):
        self.runtime_tree_storage = RuntimeTreeStorage(self.config.runtime_db)

    def __load_include_modules(self):
        pass

    def start(self):
        self.__broker.start()
        for event_processor in self.__event_processors:
            event_processor.start()

    def stop(self):
        self.__broker.stop()
        for event_processor in self.__event_processors:
            event_processor.stop()

    def find_handlers(self, event_name):
        handlers = []
        if event_name in self.__global.event_handler_map:
            handlers.extend(self.__global.event_handler_map[event_name])
        if event_name in self.__local.event_handler_map:
            handlers.extend(self.__local.event_handler_map[event_name])
        return handlers

    def on(self, event_name, handler, local=False):
        if local:
            if event_name not in self.__local.event_handler_map:
                self.__local.event_handler_map[event_name] = []
            self.__local.event_handler_map[event_name].append(handler)
        else:
            if event_name not in self.__global.event_handler_map:
                self.__global.event_handler_map[event_name] = []
            self.__global.event_handler_map[event_name].append(handler)

    def fire(self, event, background=True):
        if background:
            self.__broker.put(event)
        else:
            return self.__event_processor.process_event(event)

    def handler(self, event_name, throws_events=list(), local=False):
        def decorator(func):
            self.on(event_name, Handler(func, throws_events), local)
            def wrapper(*args, **kw):
                return func(*args, **kw)
            return wrapper
        return decorator
Example #53
0
    def run(cls, stdcsr):
        """Returns: void
        This is the main program loop.
        """
        # initialize user interface
        cls._curses_init(stdcsr)

        # Read in existing trades
        while not cls.stopped and cls.recover_trades() == None:
            Log.write('"daemon.py" run(): Recovering trades...')
            cls._curses_refresh(stdcsr)

        # logging
        if Config.live_trading:
            Log.write('"daemon.py" start(): Using live account.')
        else:
            Log.write('"daemon.py" start(): Using practice mode.')

        """
        Main loop:
        1. Gather opportunities from each strategy.
        2. Decide which opportunities to execute.
        3. Clear the opportunity list.
        """
        while not cls.stopped:
            # refresh user interface
            cls._curses_refresh(stdcsr)

            # Let each strategy suggest an order
            for s in cls.strategies:
                new_opp = s.refresh()
                if new_opp == None:
                    Log.write('daemon.py run(): {} has nothing to offer now.'
                        .format(s.get_name()))
                    pass
                else:
                    cls.opportunities.push(new_opp)
        
            # Decide which opportunity (or opportunities) to execute
            Log.write('"daemon.py" run(): Picking best opportunity...')
            best_opp = cls.opportunities.pick()
            if best_opp == None:
                # Nothing is being suggested.
                pass
            else:
                # An order was suggested by a strategy, so place the order.
                #   Don't use all the money available.
                SLIPPAGE_WIGGLE = 0.95
                ###available_money = Broker.get_margin_available(Config.account_id) * SLIPPAGE_WIGGLE
                available_money = 100 # USD - testing
                #   Get the current price of one unit.
                instrument_price = 0
                Log.write('best opp: {}'.format(best_opp))
                go_long = best_opp.order.units > 0
                if go_long:
                    instrument_price = Broker.get_ask(best_opp.order.instrument)
                else:
                    instrument_price = Broker.get_bid(best_opp.order.instrument)
                #   How much leverage available:
                margin_rate = Broker.get_margin_rate(best_opp.order.instrument) 
                #   TODO: A bit awkward, but overwrite the existing value that was used to 
                #   determine long/short.
                units = available_money
                units /= cls.num_strategies_with_no_positions() # save money for other strategies
                units /= margin_rate
                units = int(units) # floor
                if units <= 0: # verify
                    Log.write('daemon.py run(): units <= 0')
                    raise Exception # abort
                if not go_long: # negative means short
                    units = -units
                best_opp.order.units = units
                Log.write('daemon.py run(): Executing opportunity:\n{}'.format(best_opp))
                order_result = Broker.place_order(best_opp.order)
                # Notify the strategies.
                if 'orderFillTransaction' in order_result:
                    try:
                        opened_trade_id = order_result['orderFillTransaction']['tradeOpened']['tradeID']
                        best_opp.strategy.trade_opened(trade_id=opened_trade_id)
                    except:
                        Log.write(
                            'daemon.py run(): Failed to extract opened trade from order result:\n{}'
                            .format(order_result) )
                        raise Exception
                elif 'tradesClosed' in order_result:
                    try:
                        for trade in order_result['orderFillTransaction']['tradesClosed']:
                            best_opp.strategy.trade_closed(trade_id=trade['tradeID'])
                    except:
                        Log.write(
                            'daemon.py run(): Failed to extract closed trades from order result:\n{}'
                            .format(order_result) )
                        raise Exception
                elif 'tradeReduced' in order_result:
                    try:
                        closed_trade_id = order_result['orderFillTransaction']['tradeReduced']['tradeID']
                        best_opp.strategy.trade_reduced(
                            closed_trade_id,
                            instrument_id=Instrument.get_id_from_name(order_result['instrument'])
                        )
                    except:
                        Log.write(
                            'daemon.py run(): Failed to extract reduced trades from order result:\n{}'
                            .format(order_result) )
                        raise Exception
                else:
                    Log.write(
                        '"daemon.py" run(): Unrecognized order result:\n{}'
                        .format(order_result) )
                    raise Exception

            """
            Clear opportunity list.
            Opportunities should be considered to exist only in the moment,
            so there is no need to save them for later.
            """
            cls.opportunities.clear()
        """
        Shutdown stuff. This runs after shutdown() is called, and is the
        last code that runs before returning to algo.py.
        """
        DB.shutdown()  # atexit() used in db.py, but call to be safe.