Esempio n. 1
0
    def _request_paths_from_ds(self, data_source, path):
        """Does the work of retrieving the available paths from the specified
        data source.

        If path is falsey, all of the top-level paths that the data source
        can serve are returned. If a path is provided, a regular list_sources
        request is sent to the underlying data server.
        """
        token = util.generate_access_token(data_source['secret_key'])
        request_path = [data_source['data_source_hash']]

        if path:
            request_path.extend(path)

        base_url = '/'.join(
            (data_source['data_server_url'].rstrip('/'), 'sources'))

        request_params = urlencode({
            'path': json.dumps(request_path),
            'token': token
        })

        request_path = '?'.join((base_url, request_params))

        try:
            response = urlopen(request_path)
            return json.loads(response.read())
        except URLError:
            self.logger.exception("Failed to fetch paths for %s from %s" %
                                  (path, data_source['data_server_url']))
            return []
        except ValueError:
            self.logger.exception("Invalid response received from %s" %
                                  data_source['data_server_url'])
            return []
	def test_request_paths_from_ds_no_path(self):
		"""Tests the behavior of _request_paths_from_ds when no path is
		specified."""
		test_data_source = self.data_source.data_sources[0]
		test_path = None

		mock_paths = ['src.A', 'src.B', 'src.C']

		expected_ask_path = json.dumps([test_data_source['data_source_hash']])
		expected_paths = mock_paths
		expected_token = util.generate_access_token(test_data_source['secret_key'])

		with self._patch_urlopen() as mock_urlopen:
			mock_urlopen.return_value = StringIO(json.dumps(mock_paths))
			actual_paths = self.data_source._request_paths_from_ds(test_data_source, test_path)

			T.assert_equal(mock_urlopen.call_count, 1)
			T.assert_equal(mock_paths, actual_paths)  # 'cause why not?
			T.assert_equal(len(mock_urlopen.call_args[0]), 1)

			submitted_url = mock_urlopen.call_args[0][0]
			parsed_url = urlparse.urlparse(submitted_url)
			query_params = urlparse.parse_qs(parsed_url.query)

			T.assert_equal(parsed_url.scheme, 'http')
			T.assert_equal(parsed_url.path, '/sources')
			T.assert_equal(parsed_url.fragment, '')

			T.assert_equal(query_params['path'], [expected_ask_path])
			T.assert_equal(query_params['token'], [expected_token])

			T.assert_equal(actual_paths, expected_paths)
    def _request_paths_from_ds(self, data_source, path):
        """Does the work of retrieving the available paths from the specified
        data source.

        If path is falsey, all of the top-level paths that the data source
        can serve are returned. If a path is provided, a regular list_sources
        request is sent to the underlying data server.
        """
        token = util.generate_access_token(data_source['secret_key'])
        request_path = [data_source['data_source_hash']]

        if path:
            request_path.extend(path)

        base_url = '/'.join((data_source['data_server_url'].rstrip('/'), 'sources'))

        request_params = urlencode({
            'path': json.dumps(request_path),
            'token': token
        })

        request_path = '?'.join((base_url,
            request_params))

        try:
            response = urlopen(request_path)
            return json.loads(response.read())
        except URLError:
            self.logger.exception("Failed to fetch paths for %s from %s" % (
                path, data_source['data_server_url']))
            return []
        except ValueError:
            self.logger.exception("Invalid response received from %s" % data_source['data_server_url'])
            return []
    def test_request_paths_from_ds_no_path(self):
        """Tests the behavior of _request_paths_from_ds when no path is
		specified."""
        test_data_source = self.data_source.data_sources[0]
        test_path = None

        mock_paths = ['src.A', 'src.B', 'src.C']

        expected_ask_path = json.dumps([test_data_source['data_source_hash']])
        expected_paths = mock_paths
        expected_token = util.generate_access_token(
            test_data_source['secret_key'])

        with self._patch_urlopen() as mock_urlopen:
            mock_urlopen.return_value = StringIO(json.dumps(mock_paths))
            actual_paths = self.data_source._request_paths_from_ds(
                test_data_source, test_path)

            T.assert_equal(mock_urlopen.call_count, 1)
            T.assert_equal(mock_paths, actual_paths)  # 'cause why not?
            T.assert_equal(len(mock_urlopen.call_args[0]), 1)

            submitted_url = mock_urlopen.call_args[0][0]
            parsed_url = urlparse.urlparse(submitted_url)
            query_params = urlparse.parse_qs(parsed_url.query)

            T.assert_equal(parsed_url.scheme, 'http')
            T.assert_equal(parsed_url.path, '/sources')
            T.assert_equal(parsed_url.fragment, '')

            T.assert_equal(query_params['path'], [expected_ask_path])
            T.assert_equal(query_params['token'], [expected_token])

            T.assert_equal(actual_paths, expected_paths)
Esempio n. 5
0
    def _request_data_from_ds(self, data_source, sources, start, end, width):
        """Does the work of retrieving stats data from a given data source.

        Sources is a list of sources (which are lists of strings) to retrieve
        stats for; this allows callers to batch requests for data from the same
        data source into a single request. Data is returned in the same format
        that the data server returns it in, which is a list of dicts containing
        two keys: 't' and 'v'. 't' is always the timestamp of the data points,
        and 'v' is a list of values for time 't'. The values in 'v' are always
        in the order that they were requested; if sources was ['a', 'b'], 'v'
        will always be data for 'a', then data for 'b'. Overall, the returned
        structure looks like so:

        [
            {
                't': 10000000,
                'v': [500, 400, ...]
            },
            {
                't': 10000001,
                'v': [505, 391, ...]
            },
            ...
        ]

        `start`, `end`, and `width` are all passed along unmodified to the
        data source.
        """
        token = util.generate_access_token(data_source['secret_key'])
        base_url = '/'.join(
            (data_source['data_server_url'].rstrip('/'), 'data'))

        ds_hash = data_source['data_source_hash']
        # Sprinkle in the hash of the data source we're talking to so complete
        # the source specification
        request_sources = [[ds_hash] + source for source in sources]

        data_params = {
            'sources': json.dumps(request_sources),
            'start': start,
            'end': end,
            'width': width,
            'token': token
        }
        encoded_data_params = urlencode(data_params)

        url = '?'.join((base_url, encoded_data_params))

        try:
            response = urlopen(url)
            return json.loads(response.read())
        except URLError:
            self.logger.exception("Failed to fetch data for %s from %s" %
                                  (source, data_source['data_server_url']))
            return []
    def _request_data_from_ds(self, data_source, sources, start, end, width):
        """Does the work of retrieving stats data from a given data source.

        Sources is a list of sources (which are lists of strings) to retrieve
        stats for; this allows callers to batch requests for data from the same
        data source into a single request. Data is returned in the same format
        that the data server returns it in, which is a list of dicts containing
        two keys: 't' and 'v'. 't' is always the timestamp of the data points,
        and 'v' is a list of values for time 't'. The values in 'v' are always
        in the order that they were requested; if sources was ['a', 'b'], 'v'
        will always be data for 'a', then data for 'b'. Overall, the returned
        structure looks like so:

        [
            {
                't': 10000000,
                'v': [500, 400, ...]
            },
            {
                't': 10000001,
                'v': [505, 391, ...]
            },
            ...
        ]

        `start`, `end`, and `width` are all passed along unmodified to the
        data source.
        """
        token = util.generate_access_token(data_source['secret_key'])
        base_url = '/'.join((data_source['data_server_url'].rstrip('/'), 'data'))

        ds_hash = data_source['data_source_hash']
        # Sprinkle in the hash of the data source we're talking to so complete
        # the source specification
        request_sources = [[ds_hash] + source for source in sources]

        data_params = {
            'sources': json.dumps(request_sources),
            'start': start,
            'end': end,
            'width': width,
            'token': token
        }
        encoded_data_params = urlencode(data_params)

        url = '?'.join((base_url, encoded_data_params))

        try:
            response = urlopen(url)
        except URLError:
            self.logger.exception("Failed to fetch data for %s from %s" % (
                source, data_source['data_server_url']))

        return json.loads(response.read())
	def test_request_data_from_ds(self):
		"""Checks that _request_data_from_ds knows how to ask for data from
		other data servers correctly.

		TODO(fhats): This can certainly benefit from some integration tests.
		"""
		test_data_source = self.data_source.data_sources[0]
		test_sources = [
			['src.EndpointTIming', 'stat.A', 'variant.logged_in'],
			['src.ErrorCount', 'stat.B', 'variant.logged_out']
		]
		test_start = 100
		test_end = 200
		test_width = 30

		expected_sources = map(lambda x: [test_data_source['data_source_hash']] + x, test_sources)
		expected_token = util.generate_access_token(test_data_source['secret_key'])

		with self._patch_urlopen() as mock_urlopen:
			# Just sprinkle some mock data to return
			mock_data = [
				{
					't': 150,
					'v': [5, 6]
				},
				{
					't': 160,
					'v': [9, 10]
				}
			]
			mock_urlopen.return_value = StringIO(json.dumps(mock_data))
			returned_data = self.data_source._request_data_from_ds(test_data_source,
				test_sources,
				test_start,
				test_end,
				test_width)
			T.assert_equal(mock_urlopen.call_count, 1)
			T.assert_equal(mock_data, returned_data)  # 'cause why not?
			T.assert_equal(len(mock_urlopen.call_args[0]), 1)

			submitted_url = mock_urlopen.call_args[0][0]
			parsed_url = urlparse.urlparse(submitted_url)
			query_params = urlparse.parse_qs(parsed_url.query)

			T.assert_equal(parsed_url.scheme, 'http')
			T.assert_equal(parsed_url.path, '/data')
			T.assert_equal(parsed_url.fragment, '')

			T.assert_equal(query_params['sources'], [json.dumps(expected_sources)])
			T.assert_equal(query_params['start'], [str(test_start)])
			T.assert_equal(query_params['end'], [str(test_end)])
			T.assert_equal(query_params['width'], [str(test_width)])
			T.assert_equal(query_params['token'], [expected_token])
    def test_request_data_from_ds(self):
        """Checks that _request_data_from_ds knows how to ask for data from
		other data servers correctly.

		TODO(fhats): This can certainly benefit from some integration tests.
		"""
        test_data_source = self.data_source.data_sources[0]
        test_sources = [['src.EndpointTIming', 'stat.A', 'variant.logged_in'],
                        ['src.ErrorCount', 'stat.B', 'variant.logged_out']]
        test_start = 100
        test_end = 200
        test_width = 30

        expected_sources = map(
            lambda x: [test_data_source['data_source_hash']] + x, test_sources)
        expected_token = util.generate_access_token(
            test_data_source['secret_key'])

        with self._patch_urlopen() as mock_urlopen:
            # Just sprinkle some mock data to return
            mock_data = [{'t': 150, 'v': [5, 6]}, {'t': 160, 'v': [9, 10]}]
            mock_urlopen.return_value = StringIO(json.dumps(mock_data))
            returned_data = self.data_source._request_data_from_ds(
                test_data_source, test_sources, test_start, test_end,
                test_width)
            T.assert_equal(mock_urlopen.call_count, 1)
            T.assert_equal(mock_data, returned_data)  # 'cause why not?
            T.assert_equal(len(mock_urlopen.call_args[0]), 1)

            submitted_url = mock_urlopen.call_args[0][0]
            parsed_url = urlparse.urlparse(submitted_url)
            query_params = urlparse.parse_qs(parsed_url.query)

            T.assert_equal(parsed_url.scheme, 'http')
            T.assert_equal(parsed_url.path, '/data')
            T.assert_equal(parsed_url.fragment, '')

            T.assert_equal(query_params['sources'],
                           [json.dumps(expected_sources)])
            T.assert_equal(query_params['start'], [str(test_start)])
            T.assert_equal(query_params['end'], [str(test_end)])
            T.assert_equal(query_params['width'], [str(test_width)])
            T.assert_equal(query_params['token'], [expected_token])