Exemplo n.º 1
0
    def handle_data(self, data, *args, **kwargs):
        """
        Point of entry. Process an event frame.
        """
        # extract dates
        dts = [event.dt for event in itervalues(data._data)]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = {k: v.__dict__ for k, v in iteritems(data._data)
                      # Need to check if data has a 'length' to filter
                      # out sids without trade data available.
                      # TODO: expose more of 'no trade available'
                      # functionality to zipline
                      if len(v)}

        # only modify the trailing window if this is
        # a new event. This is intended to make handle_data
        # idempotent.
        if self.last_dt < event.dt:
            self.updated = True
            self._append_to_window(event)
        else:
            self.updated = False

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 2
0
    def handle_data(self, data, *args, **kwargs):
        """
        Point of entry. Process an event frame.
        """
        # extract dates
        dts = [event.datetime for event in data.itervalues()]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = {k: v.__dict__ for k, v in data.iteritems()
                      # Need to check if data has a 'length' to filter
                      # out sids without trade data available.
                      # TODO: expose more of 'no trade available'
                      # functionality to zipline
                      if len(v)}

        # only modify the trailing window if this is
        # a new event. This is intended to make handle_data
        # idempotent.
        if self.last_dt < event.dt:
            self.updated = True
            self._append_to_window(event)
        else:
            self.updated = False

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 3
0
    def handle_data(self, data, *args, **kwargs):
        """
        New method to handle a data frame as sent to the algorithm's
        handle_data method.
        """
        # extract dates
        # dts = [data[sid].datetime for sid in self.sids]
        dts = [event.datetime for event in data.itervalues()]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = {
            k: v.__dict__
            for k, v in data.iteritems()
            # Need to check if data has a 'length' to filter
            # out sids without trade data available.
            # TODO: expose more of 'no trade available'
            # functionality to zipline
            if len(v)
        }

        # append data frame to window. update() will call handle_add() and
        # handle_remove() appropriately
        self.update(event)

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 4
0
    def test_handle_data_on_market(self):
        """
        Ensure that handle_data is only called on market minutes.

        i.e. events that come in at midnight should be processed at market
        open.
        """
        from zipline.finance.trading import SimulationParameters
        sim_params = SimulationParameters(period_start=datetime(
            2012, 7, 30, tzinfo=pytz.utc),
                                          period_end=datetime(2012,
                                                              7,
                                                              30,
                                                              tzinfo=pytz.utc),
                                          data_frequency='minute')
        algo = TestAlgo(self, sim_params=sim_params)

        midnight_custom_source = [
            Event({
                'custom_field': 42.0,
                'sid': 'custom_data',
                'source_id': 'TestMidnightSource',
                'dt': pd.Timestamp('2012-07-30', tz='UTC'),
                'type': DATASOURCE_TYPE.CUSTOM
            })
        ]
        minute_event_source = [
            Event({
                'volume':
                100,
                'price':
                200.0,
                'high':
                210.0,
                'open_price':
                190.0,
                'low':
                180.0,
                'sid':
                8229,
                'source_id':
                'TestMinuteEventSource',
                'dt':
                pd.Timestamp('2012-07-30 9:31 AM',
                             tz='US/Eastern').tz_convert('UTC'),
                'type':
                DATASOURCE_TYPE.TRADE
            })
        ]

        algo.set_sources([midnight_custom_source, minute_event_source])

        gen = algo.get_generator()
        # Consume the generator
        list(gen)

        # Though the events had different time stamps, handle data should
        # have only been called once, at the market open.
        self.assertEqual(algo.num_bars, 1)
Exemplo n.º 5
0
def create_split(sid, ratio, date):
    return Event({
        'sid': sid,
        'ratio': ratio,
        'dt': date.replace(hour=0, minute=0, second=0, microsecond=0),
        'type': DATASOURCE_TYPE.SPLIT
    })
Exemplo n.º 6
0
 def create_close_position_event(asset):
     event = Event({
         'dt': dt,
         'type': DATASOURCE_TYPE.CLOSE_POSITION,
         'sid': asset.sid,
     })
     return event
Exemplo n.º 7
0
    def _create_data_generator(self, source_filter):
        """
        Create a merged data generator using the sources and
        transforms attached to this algorithm.

        ::source_filter:: is a method that receives events in date
        sorted order, and returns True for those events that should be
        processed by the zipline, and False for those that should be
        skipped.
        """
        benchmark_return_source = [
            Event({'dt': ret.date,
                   'returns': ret.returns,
                   'type': zipline.protocol.DATASOURCE_TYPE.BENCHMARK,
                   'source_id': 'benchmarks'})
            for ret in trading.environment.benchmark_returns
            if ret.date.date() >= self.sim_params.period_start.date()
            and ret.date.date() <= self.sim_params.period_end.date()
        ]

        date_sorted = date_sorted_sources(*self.sources)

        if source_filter:
            date_sorted = ifilter(source_filter, date_sorted)

        with_tnfms = sequential_transforms(date_sorted,
                                           *self.transforms)
        with_alias_dt = alias_dt(with_tnfms)

        with_benchmarks = date_sorted_sources(benchmark_return_source,
                                              with_alias_dt)

        # Group together events with the same dt field. This depends on the
        # events already being sorted.
        return groupby(with_benchmarks, attrgetter('dt'))
Exemplo n.º 8
0
    def test_volume_share_slippage(self):
        event = Event({
            'volume':
            200,
            'type':
            4,
            'price':
            3.0,
            'datetime':
            datetime.datetime(2006, 1, 5, 14, 31, tzinfo=pytz.utc),
            'high':
            3.15,
            'low':
            2.85,
            'sid':
            133,
            'source_id':
            'test_source',
            'close':
            3.0,
            'dt':
            datetime.datetime(2006, 1, 5, 14, 31, tzinfo=pytz.utc),
            'open':
            3.0
        })

        slippage_model = VolumeShareSlippage()

        open_orders = [
            Order(
                **{
                    'dt': datetime.datetime(
                        2006, 1, 5, 14, 30, tzinfo=pytz.utc),
                    'amount': 100,
                    'filled': 0,
                    'sid': 133
                })
        ]

        orders_txns = list(slippage_model.simulate(event, open_orders))

        self.assertEquals(len(orders_txns), 1)
        _, txn = orders_txns[0]

        expected_txn = {
            'price': float(3.01875),
            'dt': datetime.datetime(2006, 1, 5, 14, 31, tzinfo=pytz.utc),
            'amount': int(50),
            'sid': int(133),
            'commission': None,
            'type': DATASOURCE_TYPE.TRANSACTION,
            'order_id': open_orders[0].id
        }

        self.assertIsNotNone(txn)

        # TODO: Make expected_txn an Transaction object and ensure there
        # is a __eq__ for that class.
        self.assertEquals(expected_txn, txn.__dict__)
Exemplo n.º 9
0
def create_txn(sid, price, amount, datetime):
    txn = Event({
        'sid': sid,
        'amount': amount,
        'dt': datetime,
        'price': price,
    })
    return txn
Exemplo n.º 10
0
def create_commission(sid, value, datetime):
    txn = Event({
        'dt': datetime,
        'type': DATASOURCE_TYPE.COMMISSION,
        'cost': value,
        'sid': sid
    })
    return txn
Exemplo n.º 11
0
def create_txn(sid, price, amount, datetime):
    txn = Event({
        'sid': sid,
        'amount': amount,
        'dt': datetime,
        'price': price,
        'type': DATASOURCE_TYPE.TRANSACTION
    })
    return txn
Exemplo n.º 12
0
def create_txn(sid, price, amount, datetime):
    txn = Event({
        'sid': sid,
        'amount': amount,
        'dt': datetime,
        'price': price,
        'type': DATASOURCE_TYPE.TRANSACTION,
        'source_id': 'MockTransactionSource'
    })
    return txn
Exemplo n.º 13
0
def benchmark_events_in_range(sim_params):
    return [
        Event({'dt': dt,
               'returns': ret,
               'type':
               zipline.protocol.DATASOURCE_TYPE.BENCHMARK,
               'source_id': 'benchmarks'})
        for dt, ret in trading.environment.benchmark_returns.iterkv()
        if dt.date() >= sim_params.period_start.date()
        and dt.date() <= sim_params.period_end.date()
    ]
Exemplo n.º 14
0
    def handle_data(self, data, *args, **kwargs):
        """
        New method to handle a data frame as sent to the algorithm's
        handle_data method.
        """
        # extract dates
        #dts = [data[sid].datetime for sid in self.sids]
        dts = [event.datetime for event in data.itervalues()]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = {
            k: v.__dict__
            for k, v in data.iteritems()
            # Need to check if data has a 'length' to filter
            # out sids without trade data available.
            # TODO: expose more of 'no trade available'
            # functionality to zipline
            if len(v)
        }

        # only modify the trailing window if this is
        # a new event. This is intended to make handle_data
        # idempotent.
        if event not in self.ticks:
            # append data frame to window. update() will call handle_add() and
            # handle_remove() appropriately, and self.updated
            # will be modified based on the refresh_period
            self.update(event)
        else:
            # we are recalculating based on an old event, so
            # there is no change in the contents of the trailing
            # window
            self.updated = False

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 15
0
def create_dividend(sid, payment, declared_date, ex_date, pay_date):
    div = Event({
        'sid': sid,
        'gross_amount': payment,
        'net_amount': payment,
        'dt': declared_date.replace(hour=0, minute=0, second=0),
        'ex_date': ex_date.replace(hour=0, minute=0, second=0),
        'pay_date': pay_date.replace(hour=0, minute=0, second=0),
        'type': DATASOURCE_TYPE.DIVIDEND
    })

    return div
Exemplo n.º 16
0
def benchmark_events_in_range(sim_params):
    return [
        Event({
            'dt': dt,
            'returns': ret,
            'type': zp.DATASOURCE_TYPE.BENCHMARK,
            # We explicitly rely on the behavior that benchmarks sort before
            # any other events.
            'source_id': '1Abenchmarks'
        }) for dt, ret in trading.environment.benchmark_returns.iterkv()
        if dt.date() >= sim_params.period_start.date()
        and dt.date() <= sim_params.period_end.date()
    ]
Exemplo n.º 17
0
    def handle_data(self, data, *args, **kwargs):
        """
        New method to handle a data frame as sent to the algorithm's
        handle_data method.
        """
        # extract dates
        #dts = [data[sid].datetime for sid in self.sids]
        dts = [event.datetime for event in data.itervalues()]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = {k: v.__dict__ for k, v in data.iteritems()
                      # Need to check if data has a 'length' to filter
                      # out sids without trade data available.
                      # TODO: expose more of 'no trade available'
                      # functionality to zipline
                      if len(v)}

        # only modify the trailing window if this is
        # a new event. This is intended to make handle_data
        # idempotent.
        if event not in self.ticks:
            # append data frame to window. update() will call handle_add() and
            # handle_remove() appropriately, and self.updated
            # will be modified based on the refresh_period
            self.update(event)
        else:
            # we are recalculating based on an old event, so
            # there is no change in the contents of the trailing
            # window
            self.updated = False

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 18
0
    def _create_data_generator(self, source_filter, sim_params=None):
        """
        Create a merged data generator using the sources attached to this
        algorithm.

        ::source_filter:: is a method that receives events in date
        sorted order, and returns True for those events that should be
        processed by the zipline, and False for those that should be
        skipped.
        """
        if sim_params is None:
            sim_params = self.sim_params

        if self.benchmark_return_source is None:
            env = trading.environment
            if sim_params.data_frequency == 'minute' or \
               sim_params.emission_rate == 'minute':

                def update_time(date):
                    return env.get_open_and_close(date)[1]
            else:

                def update_time(date):
                    return date

            benchmark_return_source = [
                Event({
                    'dt': update_time(dt),
                    'returns': ret,
                    'type': zipline.protocol.DATASOURCE_TYPE.BENCHMARK,
                    'source_id': 'benchmarks'
                }) for dt, ret in
                trading.environment.benchmark_returns.iteritems()
                if dt.date() >= sim_params.period_start.date()
                and dt.date() <= sim_params.period_end.date()
            ]
        else:
            benchmark_return_source = self.benchmark_return_source

        date_sorted = date_sorted_sources(*self.sources)

        if source_filter:
            date_sorted = filter(source_filter, date_sorted)

        with_benchmarks = date_sorted_sources(benchmark_return_source,
                                              date_sorted)

        # Group together events with the same dt field. This depends on the
        # events already being sorted.
        return groupby(with_benchmarks, attrgetter('dt'))
Exemplo n.º 19
0
    def handle_data(self, data, *args, **kwargs):
        """
        New method to handle a data frame as sent to the algorithm's
        handle_data method.
        """
        # extract dates
        #dts = [data[sid].datetime for sid in self.sids]
        dts = [event.datetime for event in data.itervalues()]
        # we have to provide the event with a dt. This is only for
        # checking if the event is outside the window or not so a
        # couple of seconds shouldn't matter. We don't add it to
        # the data parameter, because it would mix dt with the
        # sid keys.
        event = Event()
        event.dt = max(dts)
        event.data = data

        # append data frame to window. update() will call handle_add() and
        # handle_remove() appropriately
        self.update(event)

        # return newly computed or cached value
        return self.get_transform_value(*args, **kwargs)
Exemplo n.º 20
0
def create_stock_dividend(sid, payment_sid, ratio, declared_date, ex_date,
                          pay_date):
    return Event({
        'sid': sid,
        'payment_sid': payment_sid,
        'ratio': ratio,
        'net_amount': None,
        'gross_amount': None,
        'dt': pd.tslib.normalize_date(declared_date),
        'ex_date': pd.tslib.normalize_date(ex_date),
        'pay_date': pd.tslib.normalize_date(pay_date),
        'type': DATASOURCE_TYPE.DIVIDEND,
        'source_id': 'MockDividendSource'
    })
Exemplo n.º 21
0
 def intersperse_custom_events(self, events):
     """
     Take a stream of events and return the same stream with a minimal event
     of type CUSTOM following each trade event.  Used to test graceful
     handling of CUSTOM events that are missing required transform fields.
     """
     return list(
         chain.from_iterable((event,
                              Event(
                                  initial_values={
                                      'dt': event.dt,
                                      'sid': event.sid,
                                      'source_id': "fake_custom_source",
                                      'type': DATASOURCE_TYPE.CUSTOM
                                  })) for event in events))
Exemplo n.º 22
0
    def test_orders_stop(self, name, order_data, event_data, expected):
        order = Order(**order_data)
        event = Event(initial_values=event_data)

        slippage_model = VolumeShareSlippage()

        try:
            _, txn = slippage_model.simulate(event, [order]).next()
        except StopIteration:
            txn = None

        if expected['transaction'] is None:
            self.assertIsNone(txn)
        else:
            self.assertIsNotNone(txn)

            for key, value in expected['transaction'].items():
                self.assertEquals(value, txn[key])
Exemplo n.º 23
0
 def raw_data_gen(self):
     for row in self.rows:
         event_dict = {}
         for i, col in enumerate(self.cols):
             row_type = type(row[i])
             if row_type == datetime.datetime:
                 # localize any datetimes
                 dt = row[i]
                 dt = self.time_zone.localize(dt).astimezone(utc)
                 event_dict[col] = dt
             elif row_type == decimal.Decimal:
                 # cast any Decimal types into floats
                 event_dict[col] = float(row[i])
             else:
                 event_dict[col] = row[i]
         event_dict['type'] = self.datasource_type
         event = Event(event_dict)
         yield event
Exemplo n.º 24
0
    def zmq_event_gen(self, port=5555):
        context = zmq.Context()
        price_socket = context.socket(zmq.REQ)
        price_socket.connect('tcp://localhost:%s' % port)

        orders_socket = context.socket(zmq.PUB)
        orders_socket.bind('tcp://*:%s' % (port+1))

        for dt in self.sim_params.trading_days:
            prices = []

            # Investigate the asset_finder class. How might data be stored?
            for sid_id in self.trading_environment.asset_finder.sids:
                prompt =  "{}  [{}]".format(dt, sid(sid_id))
                price_socket.send_string(prompt)
                data = price_socket.recv_string()
                price = float(data)

                # Look at DataFrameSource to see that the dataframe input
                # quickly gets turned into a series of events (that are yielded)
                event = {
                    'dt': dt,
                    'sid': sid(sid_id),
                    'price': float(price),
                    'volume': 1e9,
                    'type': DATASOURCE_TYPE.TRADE,
                }
                event = Event(event)
                prices.append(event)

            # We return a generator. Zipline makes heavy use of `yield` and generators
            # to build an event-driven model that runs syncronously].
            # How could we modify an algoithm to run async?
            yield dt, prices

            # What columns are being displayed?
            # Why might some orders have a commission and others are NAN?
            # 'orders' is a collection of all orders placed.
            # How could we change this to include open_orders?
            show_orders(self.blotter.orders, orders_socket)
Exemplo n.º 25
0
    def auto_close_position_events(self, next_trading_day):
        """
        Generates CLOSE_POSITION events for any SIDs whose auto-close date is
        before or equal to the given date.

        Parameters
        ----------
        next_trading_day : pandas.Timestamp
            The time before-which certain Assets need to be closed

        Yields
        ------
        Event
            A close position event for any sids that should be closed before
            the next_trading_day parameter
        """
        past_asset_end_dates = set()

        # Check the auto_close_position_dates dict for SIDs to close
        for date, sids in self._auto_close_position_sids.items():
            if date > next_trading_day:
                continue
            past_asset_end_dates.add(date)

            for sid in sids:
                # Yield a CLOSE_POSITION event
                event = Event({
                    'dt': date,
                    'type': DATASOURCE_TYPE.CLOSE_POSITION,
                    'sid': sid,
                })
                yield event

        # Clear out past dates
        while past_asset_end_dates:
            self._auto_close_position_sids.pop(past_asset_end_dates.pop())
Exemplo n.º 26
0
    def transaction_sim(self, **params):
        """ This is a utility method that asserts expected
        results for conversion of orders to transactions given a
        trade history"""

        trade_count = params['trade_count']
        trade_interval = params['trade_interval']
        order_count = params['order_count']
        order_amount = params['order_amount']
        order_interval = params['order_interval']
        expected_txn_count = params['expected_txn_count']
        expected_txn_volume = params['expected_txn_volume']
        # optional parameters
        # ---------------------
        # if present, alternate between long and short sales
        alternate = params.get('alternate')
        # if present, expect transaction amounts to match orders exactly.
        complete_fill = params.get('complete_fill')

        sid = 1
        sim_params = factory.create_simulation_parameters()
        blotter = Blotter()
        price = [10.1] * trade_count
        volume = [100] * trade_count
        start_date = sim_params.first_open

        generated_trades = factory.create_trade_history(
            sid, price, volume, trade_interval, sim_params)

        if alternate:
            alternator = -1
        else:
            alternator = 1

        order_date = start_date
        for i in range(order_count):

            blotter.set_date(order_date)
            blotter.order(sid, order_amount * alternator**i, MarketOrder())

            order_date = order_date + order_interval
            # move after market orders to just after market next
            # market open.
            if order_date.hour >= 21:
                if order_date.minute >= 00:
                    order_date = order_date + timedelta(days=1)
                    order_date = order_date.replace(hour=14, minute=30)

        # there should now be one open order list stored under the sid
        oo = blotter.open_orders
        self.assertEqual(len(oo), 1)
        self.assertTrue(sid in oo)
        order_list = oo[sid][:]  # make copy
        self.assertEqual(order_count, len(order_list))

        for i in range(order_count):
            order = order_list[i]
            self.assertEqual(order.sid, sid)
            self.assertEqual(order.amount, order_amount * alternator**i)

        tracker = PerformanceTracker(sim_params)

        benchmark_returns = [
            Event({
                'dt': dt,
                'returns': ret,
                'type': zipline.protocol.DATASOURCE_TYPE.BENCHMARK,
                'source_id': 'benchmarks'
            })
            for dt, ret in trading.environment.benchmark_returns.iteritems()
            if dt.date() >= sim_params.period_start.date()
            and dt.date() <= sim_params.period_end.date()
        ]

        generated_events = date_sorted_sources(generated_trades,
                                               benchmark_returns)

        # this approximates the loop inside TradingSimulationClient
        transactions = []
        for dt, events in itertools.groupby(generated_events,
                                            operator.attrgetter('dt')):
            for event in events:
                if event.type == DATASOURCE_TYPE.TRADE:

                    for txn, order in blotter.process_trade(event):
                        transactions.append(txn)
                        tracker.process_transaction(txn)
                elif event.type == DATASOURCE_TYPE.BENCHMARK:
                    tracker.process_benchmark(event)
                elif event.type == DATASOURCE_TYPE.TRADE:
                    tracker.process_trade(event)

        if complete_fill:
            self.assertEqual(len(transactions), len(order_list))

        total_volume = 0
        for i in range(len(transactions)):
            txn = transactions[i]
            total_volume += txn.amount
            if complete_fill:
                order = order_list[i]
                self.assertEqual(order.amount, txn.amount)

        self.assertEqual(total_volume, expected_txn_volume)
        self.assertEqual(len(transactions), expected_txn_count)

        cumulative_pos = tracker.cumulative_performance.positions[sid]
        self.assertEqual(total_volume, cumulative_pos.amount)

        # the open orders should not contain sid.
        oo = blotter.open_orders
        self.assertNotIn(sid, oo, "Entry is removed when no open orders")
Exemplo n.º 27
0
def create_trade(sid, price, amount, datetime, source_id="test_factory"):

    trade = Event()

    trade.source_id = source_id
    trade.type = DATASOURCE_TYPE.TRADE
    trade.sid = sid
    trade.dt = datetime
    trade.price = price
    trade.close_price = price
    trade.open_price = price
    trade.low = price * .95
    trade.high = price * 1.05
    trade.volume = amount

    return trade
Exemplo n.º 28
0
def to_dt(msg):
    return Event({'dt': msg})
Exemplo n.º 29
0
    def test_daily_buy_and_hold(self):

        start_date = datetime.datetime(year=2006,
                                       month=1,
                                       day=3,
                                       hour=0,
                                       minute=0,
                                       tzinfo=pytz.utc)
        end_date = datetime.datetime(year=2006,
                                     month=1,
                                     day=5,
                                     hour=0,
                                     minute=0,
                                     tzinfo=pytz.utc)

        sim_params = SimulationParameters(period_start=start_date,
                                          period_end=end_date,
                                          emission_rate='daily')

        algo = BuyAndHoldAlgorithm(sim_params=sim_params,
                                   data_frequency='daily')

        first_date = datetime.datetime(2006, 1, 3, tzinfo=pytz.utc)
        second_date = datetime.datetime(2006, 1, 4, tzinfo=pytz.utc)
        third_date = datetime.datetime(2006, 1, 5, tzinfo=pytz.utc)

        trade_bar_data = [
            Event({
                'open_price': 10,
                'close_price': 15,
                'price': 15,
                'volume': 1000,
                'sid': 1,
                'dt': first_date,
                'source_id': 'test-trade-source',
                'type': DATASOURCE_TYPE.TRADE
            }),
            Event({
                'open_price': 15,
                'close_price': 20,
                'price': 20,
                'volume': 2000,
                'sid': 1,
                'dt': second_date,
                'source_id': 'test_list',
                'type': DATASOURCE_TYPE.TRADE
            }),
            Event({
                'open_price': 20,
                'close_price': 15,
                'price': 15,
                'volume': 1000,
                'sid': 1,
                'dt': third_date,
                'source_id': 'test_list',
                'type': DATASOURCE_TYPE.TRADE
            }),
        ]
        benchmark_data = [
            Event({
                'returns': 0.1,
                'dt': first_date,
                'source_id': 'test-benchmark-source',
                'type': DATASOURCE_TYPE.BENCHMARK
            }),
            Event({
                'returns': 0.2,
                'dt': second_date,
                'source_id': 'test-benchmark-source',
                'type': DATASOURCE_TYPE.BENCHMARK
            }),
            Event({
                'returns': 0.4,
                'dt': third_date,
                'source_id': 'test-benchmark-source',
                'type': DATASOURCE_TYPE.BENCHMARK
            }),
        ]

        algo.benchmark_return_source = benchmark_data
        algo.sources = list([trade_bar_data])
        gen = algo._create_generator(sim_params)

        # TODO: Hand derive these results.
        #       Currently, the output from the time of this writing to
        #       at least be an early warning against changes.
        expected_algorithm_returns = {
            first_date: 0.0,
            second_date: -0.000350,
            third_date: -0.050018
        }

        # TODO: Hand derive these results.
        #       Currently, the output from the time of this writing to
        #       at least be an early warning against changes.
        expected_sharpe = {
            first_date: np.nan,
            second_date: -31.56903265,
            third_date: -11.459888981,
        }

        for bar in gen:
            current_dt = algo.datetime
            crm = algo.perf_tracker.cumulative_risk_metrics

            np.testing.assert_almost_equal(
                crm.algorithm_returns[current_dt],
                expected_algorithm_returns[current_dt],
                decimal=6)

            np.testing.assert_almost_equal(crm.metrics.sharpe[current_dt],
                                           expected_sharpe[current_dt],
                                           decimal=6,
                                           err_msg="Mismatch at %s" %
                                           (current_dt, ))
Exemplo n.º 30
0
    def test_minute_buy_and_hold(self):
        with trading.TradingEnvironment():
            start_date = datetime.datetime(year=2006,
                                           month=1,
                                           day=3,
                                           hour=0,
                                           minute=0,
                                           tzinfo=pytz.utc)
            end_date = datetime.datetime(year=2006,
                                         month=1,
                                         day=5,
                                         hour=0,
                                         minute=0,
                                         tzinfo=pytz.utc)

            sim_params = SimulationParameters(period_start=start_date,
                                              period_end=end_date,
                                              emission_rate='daily',
                                              data_frequency='minute')

            algo = BuyAndHoldAlgorithm(sim_params=sim_params,
                                       data_frequency='minute')

            first_date = datetime.datetime(2006, 1, 3, tzinfo=pytz.utc)
            first_open, first_close = \
                trading.environment.get_open_and_close(first_date)

            second_date = datetime.datetime(2006, 1, 4, tzinfo=pytz.utc)
            second_open, second_close = \
                trading.environment.get_open_and_close(second_date)

            third_date = datetime.datetime(2006, 1, 5, tzinfo=pytz.utc)
            third_open, third_close = \
                trading.environment.get_open_and_close(third_date)

            benchmark_data = [
                Event({
                    'returns': 0.1,
                    'dt': first_close,
                    'source_id': 'test-benchmark-source',
                    'type': DATASOURCE_TYPE.BENCHMARK
                }),
                Event({
                    'returns': 0.2,
                    'dt': second_close,
                    'source_id': 'test-benchmark-source',
                    'type': DATASOURCE_TYPE.BENCHMARK
                }),
                Event({
                    'returns': 0.4,
                    'dt': third_close,
                    'source_id': 'test-benchmark-source',
                    'type': DATASOURCE_TYPE.BENCHMARK
                }),
            ]

            trade_bar_data = [
                Event({
                    'open_price': 10,
                    'close_price': 15,
                    'price': 15,
                    'volume': 1000,
                    'sid': 1,
                    'dt': first_open,
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
                Event({
                    'open_price': 10,
                    'close_price': 15,
                    'price': 15,
                    'volume': 1000,
                    'sid': 1,
                    'dt': first_open + datetime.timedelta(minutes=10),
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
                Event({
                    'open_price': 15,
                    'close_price': 20,
                    'price': 20,
                    'volume': 2000,
                    'sid': 1,
                    'dt': second_open,
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
                Event({
                    'open_price': 15,
                    'close_price': 20,
                    'price': 20,
                    'volume': 2000,
                    'sid': 1,
                    'dt': second_open + datetime.timedelta(minutes=10),
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
                Event({
                    'open_price': 20,
                    'close_price': 15,
                    'price': 15,
                    'volume': 1000,
                    'sid': 1,
                    'dt': third_open,
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
                Event({
                    'open_price': 20,
                    'close_price': 15,
                    'price': 15,
                    'volume': 1000,
                    'sid': 1,
                    'dt': third_open + datetime.timedelta(minutes=10),
                    'source_id': 'test-trade-source',
                    'type': DATASOURCE_TYPE.TRADE
                }),
            ]

            algo.benchmark_return_source = benchmark_data
            algo.sources = list([trade_bar_data])
            gen = algo._create_generator(sim_params)

            crm = algo.perf_tracker.cumulative_risk_metrics

            first_msg = gen.next()

            self.assertIsNotNone(first_msg,
                                 "There should be a message emitted.")

            # Protects against bug where the positions appeared to be
            # a day late, because benchmarks were triggering
            # calculations before the events for the day were
            # processed.
            self.assertEqual(
                1, len(algo.portfolio.positions), "There should "
                "be one position after the first day.")

            self.assertEquals(
                0, crm.metrics.algorithm_volatility[algo.datetime.date()],
                "On the first day algorithm volatility does not exist.")

            second_msg = gen.next()

            self.assertIsNotNone(second_msg, "There should be a message "
                                 "emitted.")

            self.assertEqual(1, len(algo.portfolio.positions),
                             "Number of positions should stay the same.")

            # TODO: Hand derive. Current value is just a canary to
            # detect changes.
            np.testing.assert_almost_equal(0.050022510129558301,
                                           crm.algorithm_returns[-1],
                                           decimal=6)

            third_msg = gen.next()

            self.assertEqual(1, len(algo.portfolio.positions),
                             "Number of positions should stay the same.")

            self.assertIsNotNone(third_msg, "There should be a message "
                                 "emitted.")

            # TODO: Hand derive. Current value is just a canary to
            # detect changes.
            np.testing.assert_almost_equal(-0.047639464532418657,
                                           crm.algorithm_returns[-1],
                                           decimal=6)
Exemplo n.º 31
0
 def mapped_data(self):
     for row in self.raw_data:
         yield Event(self.apply_mapping(row))
Exemplo n.º 32
0
def create_trade(sid, price, amount, datetime, source_id="test_factory"):

    trade = Event()

    trade.source_id = source_id
    trade.type = DATASOURCE_TYPE.TRADE
    trade.sid = sid
    trade.dt = datetime
    trade.price = price
    trade.close_price = price
    trade.open_price = price
    trade.low = price * .95
    trade.high = price * 1.05
    trade.volume = amount

    return trade
Exemplo n.º 33
0
 def gen_trades(self):
     # create a sequence of trades
     events = [
         Event({
             'volume': 2000,
             'TRANSACTION': None,
             'type': 4,
             'price': 3.0,
             'datetime': datetime.datetime(
                 2006, 1, 5, 14, 31, tzinfo=pytz.utc),
             'high': 3.15,
             'low': 2.85,
             'sid': 133,
             'source_id': 'test_source',
             'close': 3.0,
             'dt':
             datetime.datetime(2006, 1, 5, 14, 31, tzinfo=pytz.utc),
             'open': 3.0
         }),
         Event({
             'volume': 2000,
             'TRANSACTION': None,
             'type': 4,
             'price': 3.5,
             'datetime': datetime.datetime(
                 2006, 1, 5, 14, 32, tzinfo=pytz.utc),
             'high': 3.15,
             'low': 2.85,
             'sid': 133,
             'source_id': 'test_source',
             'close': 3.5,
             'dt':
             datetime.datetime(2006, 1, 5, 14, 32, tzinfo=pytz.utc),
             'open': 3.0
         }),
         Event({
             'volume': 2000,
             'TRANSACTION': None,
             'type': 4,
             'price': 4.0,
             'datetime': datetime.datetime(
                 2006, 1, 5, 14, 33, tzinfo=pytz.utc),
             'high': 3.15,
             'low': 2.85,
             'sid': 133,
             'source_id': 'test_source',
             'close': 4.0,
             'dt':
             datetime.datetime(2006, 1, 5, 14, 33, tzinfo=pytz.utc),
             'open': 3.5
         }),
         Event({
             'volume': 2000,
             'TRANSACTION': None,
             'type': 4,
             'price': 3.5,
             'datetime': datetime.datetime(
                 2006, 1, 5, 14, 34, tzinfo=pytz.utc),
             'high': 3.15,
             'low': 2.85,
             'sid': 133,
             'source_id': 'test_source',
             'close': 3.5,
             'dt':
             datetime.datetime(2006, 1, 5, 14, 34, tzinfo=pytz.utc),
             'open': 4.0
         }),
         Event({
             'volume': 2000,
             'TRANSACTION': None,
             'type': 4,
             'price': 3.0,
             'datetime': datetime.datetime(
                 2006, 1, 5, 14, 35, tzinfo=pytz.utc),
             'high': 3.15,
             'low': 2.85,
             'sid': 133,
             'source_id': 'test_source',
             'close': 3.0,
             'dt':
             datetime.datetime(2006, 1, 5, 14, 35, tzinfo=pytz.utc),
             'open': 3.5
         })
     ]
     return events
Exemplo n.º 34
0
    def test_minute_tracker(self):
        """ Tests minute performance tracking."""
        with trading.TradingEnvironment():
            start_dt = trading.environment.exchange_dt_in_utc(
                datetime.datetime(2013, 3, 1, 9, 31))
            end_dt = trading.environment.exchange_dt_in_utc(
                datetime.datetime(2013, 3, 1, 16, 0))

            sim_params = SimulationParameters(
                period_start=start_dt,
                period_end=end_dt,
                emission_rate='minute'
            )
            tracker = perf.PerformanceTracker(sim_params)

            foo_event_1 = factory.create_trade('foo', 10.0, 20, start_dt)
            order_event_1 = Order(sid=foo_event_1.sid,
                                  amount=-25,
                                  dt=foo_event_1.dt)
            bar_event_1 = factory.create_trade('bar', 100.0, 200, start_dt)
            txn_event_1 = Transaction(sid=foo_event_1.sid,
                                      amount=-25,
                                      dt=foo_event_1.dt,
                                      price=10.0,
                                      commission=0.50,
                                      order_id=order_event_1.id)
            benchmark_event_1 = Event({
                'dt': start_dt,
                'returns': 0.01,
                'type': DATASOURCE_TYPE.BENCHMARK
            })

            foo_event_2 = factory.create_trade(
                'foo', 11.0, 20, start_dt + datetime.timedelta(minutes=1))
            bar_event_2 = factory.create_trade(
                'bar', 11.0, 20, start_dt + datetime.timedelta(minutes=1))
            benchmark_event_2 = Event({
                'dt': start_dt + datetime.timedelta(minutes=1),
                'returns': 0.02,
                'type': DATASOURCE_TYPE.BENCHMARK
            })

            events = [
                foo_event_1,
                order_event_1,
                benchmark_event_1,
                txn_event_1,
                bar_event_1,
                foo_event_2,
                benchmark_event_2,
                bar_event_2,
            ]

            grouped_events = itertools.groupby(
                events, operator.attrgetter('dt'))

            messages = {}
            for date, group in grouped_events:
                tracker.set_date(date)
                for event in group:
                    tracker.process_event(event)
                tracker.handle_minute_close(date)
                msg = tracker.to_dict()
                messages[date] = msg

            self.assertEquals(2, len(messages))

            msg_1 = messages[foo_event_1.dt]
            msg_2 = messages[foo_event_2.dt]

            self.assertEquals(1, len(msg_1['minute_perf']['transactions']),
                              "The first message should contain one "
                              "transaction.")
            # Check that transactions aren't emitted for previous events.
            self.assertEquals(0, len(msg_2['minute_perf']['transactions']),
                              "The second message should have no "
                              "transactions.")

            self.assertEquals(1, len(msg_1['minute_perf']['orders']),
                              "The first message should contain one orders.")
            # Check that orders aren't emitted for previous events.
            self.assertEquals(0, len(msg_2['minute_perf']['orders']),
                              "The second message should have no orders.")

            # Ensure that period_close moves through time.
            # Also, ensure that the period_closes are the expected dts.
            self.assertEquals(foo_event_1.dt,
                              msg_1['minute_perf']['period_close'])
            self.assertEquals(foo_event_2.dt,
                              msg_2['minute_perf']['period_close'])

            # Ensure that a Sharpe value for cumulative metrics is being
            # created.
            self.assertIsNotNone(msg_1['cumulative_risk_metrics']['sharpe'])
            self.assertIsNotNone(msg_2['cumulative_risk_metrics']['sharpe'])