Ejemplo n.º 1
0
def _fetch_candles(params):
    """Fetch the given URL from OANDA and return a list of (utc-time, price).

    Args:
      params: A dict of URL params values.
    Returns:
      A sorted list of (time, price) points.
    """

    url = '?'.join((URL, parse.urlencode(sorted(params.items()))))
    logging.info("Fetching '%s'", url)

    # Fetch the data.
    response = net_utils.retrying_urlopen(url)
    if response is None:
        return None
    data_string = response.read().decode('utf-8')

    # Parse it.
    data = json.loads(data_string, parse_float=D)
    try:
        # Find the candle with the latest time before the given time we're searching
        # for.
        time_prices = []
        candles = sorted(data['candles'], key=lambda candle: candle['time'])
        for candle in candles:
            candle_dt_utc = datetime.datetime.strptime(
                candle['time'],
                r"%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=tz.tzutc())
            candle_price = D(candle['openMid'])
            time_prices.append((candle_dt_utc, candle_price))
    except KeyError:
        logging.error("Unexpected response data: %s", data)
        return None
    return sorted(time_prices)
Ejemplo n.º 2
0
    def get_ft_symbol(self, security_type, exchange, ticker):
        template = 'http://beta.morningstar.com/{}/{}/{}/quote.html'
        url = template.format(security_type, exchange, ticker)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode('utf-8').strip()
        except error.HTTPError:
            return None

        soup = BeautifulSoup(response, 'html.parser')

        def make_finder(name):
            def meta_finder(a):
                return a.name == 'meta' and 'name' in a.attrs and a[
                    'name'] == name

            return meta_finder

        def get_meta(name):
            attr = soup.find_all(make_finder(name))[0]
            return attr['content']

        fetched_exchange_id = get_meta('exchangeId')
        fetched_ticker = get_meta('ticker')
        sec_id = get_meta('secId')

        return sec_id
Ejemplo n.º 3
0
 def test_timeout_once(self):
     response = http.client.HTTPResponse(mock.MagicMock())
     response.status = 200
     with mock.patch('urllib.request.urlopen', side_effect=[None,
                                                            response]):
         self.assertIs(net_utils.retrying_urlopen('http://nowhere.com'),
                       response)
Ejemplo n.º 4
0
    def get_historical_price(self, compound_ticker, date):
        """See contract in beancount.prices.source.Source."""

        # security_type, exchange, ticker = compound_ticker.lower().split(':')
        # symbol = self.get_ft_symbol(security_type, exchange, ticker)
        symbol = compound_ticker

        if not symbol:
            logging.info("Could not find secId for %s" % compound_ticker)
            return None

        # Look back some number of days in the past in order to make sure we hop
        # over national holidays.
        begin_date = date - datetime.timedelta(days=5)
        end_date = date

        # template = 'http://mschart.morningstar.com/chartweb/defaultChart?type=getcc&secids={}&dataid={}&startdate={}&enddate={}&currency=&format=1'
        template = 'https://markets.ft.com/data/equities/ajax/get-historical-prices?startDate={}&endDate={}&symbol={}'

        def fmt(d):
            return d.strftime('%Y/%m/%d')

        # symbol = 19753923
        url = template.format(fmt(begin_date), fmt(end_date), symbol)
        logging.info("Fetching %s", url)

        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode('utf-8').strip()
            response = json.loads(response)
            if 'status' in response:
                status = response['status']
                if status['code'] != 200:
                    logging.info("HTTP Status: [%s] %s" %
                                 (status['code'], status['message']))
                    return None
            soup = BeautifulSoup(response['html'], 'html.parser')
        except error.HTTPError:
            return None

        try:
            entries = soup.find_all('td')
            trade_date = entries[0].find_all('span')[0].contents[0]
            trade_date = datetime.datetime.strptime(trade_date,
                                                    '%A, %B %d, %Y')
            trade_date = trade_date.replace(tzinfo=pytz.UTC)
            price = D(entries[4].contents[0])

            return source.SourcePrice(price, trade_date, None)
        except:
            import sys
            logging.error("Error parsing data.", sys.exc_info()[0])
            return None
Ejemplo n.º 5
0
    def get_csv(self, date):
        template = 'https://firststatesuper.com.au/content/dam/ftc/superunitprices/super-{0:0>2}-{1}.csv'
        url = template.format(date.month, date.year)
        logging.info("Fetching %s", url)

        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            else:
                return response
        except error.HTTPError:
            return None
Ejemplo n.º 6
0
    def get_historical_price(self, ticker, date):
        """See contract in beancount.prices.source.Source."""

        # Look back some number of days in the past in order to make sure we hop
        # over national holidays.
        begin_date = date - datetime.timedelta(days=5)
        end_date = date

        # Make the query.
        params = parse.urlencode(
            sorted({
                'q': ticker,
                'startdate': begin_date.strftime('%b+%d,%Y'),
                'enddate': end_date.strftime('%b+%d,%Y'),
                'num': 5,
                'output': 'csv',
            }.items()))
        url = 'http://www.google.com/finance/historical?{}'.format(params)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            data = response.read()
        except socket.timeout:
            logging.error("Connection timed out")
            return None
        except error.HTTPError:
            # When the instrument is incorrect, you will get a 404.
            return None

        # Note: utf-8-sig automatically skips the BOM here.
        data = data.decode('utf-8-sig').strip()

        lines = data.splitlines()
        assert len(lines) >= 2, "Too few lines in returned data: {}".format(
            len(lines))

        # Parse the header, find the column for the adjusted close.
        columns = lines[0].split(',')
        index_price = columns.index('Close')
        assert index_price >= 0, "Could not find 'Adj Close' data column."
        index_date = columns.index('Date')
        assert index_date >= 0, "Could not find 'Date' data column."

        # Get the latest data returned.
        most_recent_data = lines[1].split(',')
        close_price = D(most_recent_data[index_price])
        date = datetime.datetime.strptime(most_recent_data[index_date],
                                          '%d-%b-%y')

        return source.SourcePrice(close_price, date, None)
Ejemplo n.º 7
0
    def get_historical_price(self, ticker, date):
        """See contract in beancount.prices.source.Source."""

        # Look back some number of days in the past in order to make sure we hop
        # over national holidays.
        begin_date = date - datetime.timedelta(days=5)
        end_date = date

        # Make the query.
        params = parse.urlencode(
            sorted({
                's': ticker,
                'a': begin_date.month - 1,
                'b': begin_date.day,
                'c': begin_date.year,
                'd': end_date.month - 1,
                'e': end_date.day,
                'f': end_date.year,
                'g': 'd',
                'ignore': '.csv',
            }.items()))
        url = 'http://ichart.yahoo.com/table.csv?{}'.format(params)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            data = response.read().decode('utf-8').strip()
        except error.HTTPError:
            return None
        if response is None:
            return None

        lines = data.splitlines()
        assert len(lines) >= 2, "Too few lines in returned data: {}".format(
            len(lines))

        # Parse the header, find the column for the adjusted close.
        columns = lines[0].split(',')
        index_price = columns.index('Adj Close')
        assert index_price >= 0, "Could not find 'Adj Close' data column."
        index_date = columns.index('Date')
        assert index_date >= 0, "Could not find 'Date' data column."

        # Get the latest data returned.
        most_recent_data = lines[1].split(',')
        close_price = D(most_recent_data[index_price])
        date = datetime.datetime.strptime(most_recent_data[index_date],
                                          '%Y-%m-%d')

        return source.SourcePrice(close_price, date, None)
Ejemplo n.º 8
0
 def get_latest_price(self, ticker):
     commodity, currency = ticker.split(':')
     url = 'https://min-api.cryptocompare.com/data/price?fsym={}&tsyms={}'.format(
         commodity, currency)
     logging.info("Fetching %s", url)
     try:
         response = net_utils.retrying_urlopen(url)
         if response is None:
             return None
         response = response.read().decode('utf-8').strip()
         response = json.loads(response)
     except error.HTTPError:
         return None
     price = D(response[currency]).quantize(D('1.000000000000000000'))
     trade_date = utils.default_tzinfo(datetime.now(), tz.gettz())
     return source.SourcePrice(
         D('0') if price == 0 else price, trade_date, currency)
Ejemplo n.º 9
0
    def get_latest_price(self, ticker):
        """
        Fetch the current price from Realt API

        Args:
          ticker: The ticker symbol of the cryptocurrency

        Returns:
          A SourcePrice object
        """

        url = "https://api.realt.community/v1/token/{}".format(ticker)
        logging.info("Fetching %s", url)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode("utf-8").strip()
            response = json.loads(response)
            logging.info("Reponse: %s", response)
        except error.HTTPError:
            return None
        logging.info("Price: %s", response["tokenPrice"])
        logging.info("updatedate: %s", response["lastUpdate"]["date"])
        trade_date = datetime.now()
        trade_date = trade_date.replace(tzinfo=pytz.UTC)
        logging.info("trade_date: %s", trade_date)
        try:
            price = D(response["tokenPrice"])
            trade_date = datetime.now()
            trade_date = trade_date.replace(tzinfo=pytz.UTC)
            return (
                None
                if price == 0
                else source.SourcePrice(
                    price, trade_date, response["currency"]
                )
            )
        except Exception:
            raise RealtError("Pas de cours disponible ?")
            return None
Ejemplo n.º 10
0
    def get_historical_price(self, ticker, date):
        commodity, currency = ticker.split(':')
        trade_date = datetime.combine(date, datetime.max.time())
        trade_date = trade_date.replace(tzinfo=pytz.UTC)
        ts = int(time.mktime(trade_date.timetuple()))
        url = 'https://min-api.cryptocompare.com/data/pricehistorical?fsym={}&tsyms={}&ts={}'.format(
            commodity, currency, ts)
        logging.info("Fetching %s", url)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode('utf-8').strip()
            response = json.loads(response)
        except error.HTTPError:
            return None

        price = D(response[commodity][currency]).quantize(
            D('1.000000000000000000'))
        return source.SourcePrice(
            D('0') if price == 0 else price, trade_date, currency)
Ejemplo n.º 11
0
    def get_url(self, url_template, ticker):
        app_id, currencies = ticker.split(':')
        from_currency, to_currency = currencies.split('_')

        url = url_template.format(app_id, from_currency, to_currency)
        logging.info("Fetching %s", url)
        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode('utf-8').strip()
            response = json.loads(response)
        except error.HTTPError:
            return None

        # we use quantize because otherwise the conversion from an float to a Decimal
        # leaves tons of cruft (i.e. dozens of digits of meaningless precision) that
        # just clutters up the price file
        price = D(response['rates'][to_currency]).quantize(D('1.000000'))
        trade_date = datetime.datetime.fromtimestamp(response['timestamp'], datetime.timezone.utc)
        return source.SourcePrice(price, trade_date, from_currency)
Ejemplo n.º 12
0
    def get_latest_price(self, ticker):
        """See contract in beancount.prices.source.Source."""

        # Try "realtime" and just regular bid/ask pairs.
        for fields, num_prices in [('l1d1', 1), ('b3b2d2', 2), ('b0a0d2', 2),
                                   ('p0d2', 1)]:
            url = 'http://finance.yahoo.com/d/quotes.csv?s={}&f=c4{}'.format(
                ticker, fields)
            logging.info("Fetching %s", url)
            try:
                response = net_utils.retrying_urlopen(url)
                if response is None:
                    return None
                data = response.read().decode('utf-8').strip()
            except error.HTTPError:
                return None
            if data and not re.match('N/A', data):
                break
        else:
            return None
        components = data.split(',')

        # Get the currency.
        currency = components[0].strip('"')

        # Get the
        if num_prices == 1:
            # Process just a price.
            price = D(components[1])
        else:
            # Process separate bid/offer.
            bid = D(components[1])
            ask = D(components[2])
            price = (bid + ask) / 2

        # Get the trade date for that price.
        trade_date = datetime.datetime.strptime(components[-1], '"%m/%d/%Y"')

        return source.SourcePrice(price, trade_date, currency)
Ejemplo n.º 13
0
 def test_max_retry(self):
     with mock.patch('urllib.request.urlopen',
                     side_effect=[None, None, None, None, None, None]):
         self.assertIsNone(net_utils.retrying_urlopen('http://nowhere.com'))
Ejemplo n.º 14
0
 def test_success_other(self):
     response = http.client.HTTPResponse(mock.MagicMock())
     with mock.patch('urllib.request.urlopen', return_value=response):
         self.assertIsNone(net_utils.retrying_urlopen('http://nowhere.com'))
Ejemplo n.º 15
0
    def get_historical_price(self, compound_ticker, date):
        """See contract in beancount.prices.source.Source."""

        security_type, exchange, ticker = compound_ticker.lower().split(':')
        sec_id = self.get_morningstar_secid(security_type, exchange, ticker)

        if not sec_id:
            logging.info("Could not find secId for %s:%s:%s" %
                         (security_type, exchange, ticker))
            return None

        # Look back some number of days in the past in order to make sure we hop
        # over national holidays.
        begin_date = date - datetime.timedelta(days=5)
        end_date = date

        template = 'http://mschart.morningstar.com/chartweb/defaultChart?type=getcc&secids={}&dataid={}&startdate={}&enddate={}&currency=&format=1'

        def fmt(d):
            return d.strftime('%Y-%m-%d')

        # The data_id is a magic number that tells the Morningstar backend
        # exactly what type of data you want. (i.e. price data, growth of $10,000,
        # and so on) For some reason it uses a different number when talking about
        # mutual funds vs etfs and stocks
        if security_type == 'funds':
            data_id = 8217
        else:
            data_id = 8225

        url = template.format(sec_id, data_id, fmt(begin_date), fmt(end_date))
        logging.info("Fetching %s", url)

        current_tz = datetime.datetime.now(
            datetime.timezone.utc).astimezone().tzinfo

        def hook(dct):
            """ An ad-hoc parser for Morningstar's weird format. """
            if 'code' in dct:
                return {'code': int(dct['code']), 'message': dct['message']}
            elif set(dct.keys()) == set(('i', 'd')):
                return {'i': int(dct['i']), 'd': dct['d']}
            elif set(dct.keys()) == set(('i', 'v')):
                return {
                    'i':
                    datetime.datetime.strptime(
                        dct['i'], '%Y-%m-%d').replace(tzinfo=current_tz),
                    'v':
                    D(dct['v'])
                }
            elif set(dct.keys()) == set(('i', 't')):
                return dct
            elif set(dct.keys()) == set('r'):
                return dct
            elif set(dct.keys()) == set(('data', 'status')):
                return dct
            else:
                return dct

        try:
            response = net_utils.retrying_urlopen(url)
            if response is None:
                return None
            response = response.read().decode('utf-8').strip()
            response = json.loads(response, object_hook=hook)
            data = response['data']
            status = response['status']
            if status['code'] != 200:
                logging.info("HTTP Status: [%s] %s" %
                             (status['code'], status['message']))
                return None
        except error.HTTPError:
            return None

        try:
            last_price = data['r'][-1]['t'][-1]['d'][-1]
            price = last_price['v']
            trade_date = last_price['i']

            return source.SourcePrice(price, trade_date, None)
        except:
            import sys
            logging.error("Error parsing data.", sys.exc_info()[0])
            return None
Ejemplo n.º 16
0
    def get_latest_price(self, ticker):
        """See contract in beancount.prices.source.Source."""

        if ':' in ticker:
            exchange, symbol = ticker.split(':')
        else:
            exchange = None
            symbol = ticker

        # Build the query.
        params_dict = {
            'q': symbol,
            'f': 'd,c',  # Date,Close
        }
        if exchange:
            params_dict['x'] = exchange

        # Always reach back 5 days in time because of long weekends.
        if exchange in ('MUTF', 'MUTF_CA'):
            params_dict['p'] = '5d'
        else:
            params_dict['p'] = '5d'
            params_dict['i'] = 300  # secs, to get the most recent.

        if exchange in ('TSE', 'MUTF_CA'):
            quote_currency = 'CAD'
        else:
            quote_currency = 'USD'

        url = 'http://www.google.com/finance/getprices?{}'.format(
            parse.urlencode(sorted(params_dict.items())))
        logging.info("Fetching %s", url)

        # Fetch the data.
        response = net_utils.retrying_urlopen(url)
        if response is None:
            return None
        try:
            data = response.read().decode('utf-8')
        except socket.timeout:
            logging.error("Connection timed out")
            return None
        data = parse.unquote(data).strip()

        # Process the meta-data.
        metadata = {}
        lines = data.splitlines()
        for index, line in enumerate(lines):
            match = re.match('([A-Z_+]+)=(.*)$', line)
            if not match:
                break
            metadata[match.group(1)] = match.group(2)
        else:
            # No data was found.
            return None

        # Initialize a custom timezone, if there was one.
        try:
            offset = int(metadata['TIMEZONE_OFFSET']) * 60
            zone = tz.tzoffset("Custom", offset)
        except KeyError:
            zone = None

        interval = int(metadata['INTERVAL'])
        data_lines = lines[index:]
        for line in data_lines:
            # Process an update on timezone (I'm not sure if this will ever be
            # seen, but we handle it).
            match = re.match('TIMEZONE_OFFSET=(.*)', line)
            if match:
                # Associate an appropriately defined timezone matching that of
                # the response. This is extra... we could just as well return a
                # UTC time.
                zone = tz.tzoffset("Custom", int(match.group(1)))
                continue

            time_str, price_str = line.split(',')

            match = re.match('a(\d+)', time_str)
            if match:
                # Create time from the UNIX timestamp. Note: This must be
                # initialized in UTC coordinates.
                time_marker = datetime.datetime.fromtimestamp(
                    int(match.group(1)), tz.tzutc())
                # Convert to the local timezone if required.
                if zone is not None:
                    time_marker = time_marker.astimezone(zone)
                time = time_marker
            else:
                # Add time as relative from previous timestamp.
                seconds = int(time_str) * interval
                time = time_marker + datetime.timedelta(seconds=seconds)

            price = D(price_str)

        return source.SourcePrice(price, time, quote_currency)