def __init__(self, klass, values, meta): self.klass = klass if 'columns' in meta.keys(): meta['column_types'] = Util.convert_to_columns_list( meta['columns'], 'type') meta['columns'] = Util.convert_to_columns_list( meta['columns'], 'name') # Since we are iterating over a list of data be sure to only compute the # methodized column names once and pass that down to the objects that are being created. converted_column_names = Util.convert_column_names(meta) if hasattr(klass, 'get_code_from_meta'): self.values = list([ klass(klass.get_code_from_meta(x), x, meta=meta, converted_column_names=converted_column_names) for x in values ]) else: self.values = list([ klass(x, meta=meta, converted_column_names=converted_column_names) for x in values ]) self.meta = meta
def page(cls, datatable, **options): params = {'id': str(datatable.code)} path = Util.constructed_path(datatable.default_path(), params) r = Connection.request('get', path, **options) response_data = r.json() Util.convert_to_dates(response_data) resource = cls.create_datatable_list_from_response(response_data) return resource
def all(cls, **options): if 'params' not in options: options['params'] = {} path = Util.constructed_path(cls.list_path(), options['params']) r = Connection.request('get', path, **options) response_data = r.json() Util.convert_to_dates(response_data) resource = cls.create_list_from_response(response_data) return resource
def __get_raw_data__(self): if self._raw_data: return self._raw_data cls = self.__class__ params = {'id': str(self.code)} options = Util.merge_options('params', params, **self.options) path = Util.constructed_path(cls.get_path(), options['params']) r = Connection.request('get', path, **options) response_data = r.json() Util.convert_to_dates(response_data) self._raw_data = response_data[singularize(cls.lookup_key())] return self._raw_data
def test_convert_options_get_request_with_dictionary_params_and_array_values( self): options = {'params': {'foo': {'bar': ['baz', 'bax']}}} expected_result = {'params': {'foo.bar[]': ['baz', 'bax']}} result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_convert_to_dates(self): d = '2015-04-09' dt = '2015-07-24T02:39:40.624Z' dic = {'foo': d, d: {'bar': dt}} result = Util.convert_to_dates(dic) self.assertIsInstance(result['foo'], datetime.date) self.assertIsInstance(result[d]['bar'], datetime.datetime)
def test_convert_options_get_request_all_param_types(self): options = { 'params': { 'foo': 'bar', 'ticker': ['AAPL', 'MSFT'], 'per_end_date': { 'gte': '2015-01-01' }, 'qopts': { 'columns': ['ticker', 'per_end_date'], 'per_page': 5 } } } expected_result = { 'params': { 'foo': 'bar', 'qopts.per_page': 5, 'per_end_date.gte': '2015-01-01', 'ticker[]': ['AAPL', 'MSFT'], 'qopts.columns[]': ['ticker', 'per_end_date'] } } result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def _request_file_info(self, file_or_folder_path, **options): url = self._download_request_path() code_name = self.code options['params']['qopts.export'] = 'true' request_type = RequestType.get_request_type(url, **options) updated_options = Util.convert_options(request_type=request_type, **options) r = Connection.request(request_type, url, **updated_options) response_data = r.json() file_info = response_data['datatable_bulk_download']['file'] status = file_info['status'] if status == 'fresh': file_link = file_info['link'] self._download_file_with_link(file_or_folder_path, file_link, code_name) return True else: return False
def data(self, **options): # handle_not_found_error if set to True will add an empty DataFrame # for a non-existent dataset instead of raising an error handle_not_found_error = options.pop('handle_not_found_error', False) handle_column_not_found = options.pop('handle_column_not_found', False) # default order to ascending, and respect whatever user passes in params = { 'database_code': self.database_code, 'dataset_code': self.dataset_code, 'order': 'asc' } updated_options = Util.merge_options('params', params, **options) try: return Data.all(**updated_options) except NotFoundError: if handle_not_found_error: return DataList( Data, [], {'column_names': [six.u('None'), six.u('Not Found')]}) raise except ColumnNotFound: if handle_column_not_found: return DataList( Data, [], {'column_names': [six.u('None'), six.u('Not Found')]}) raise
def test_merge_options_when_key_doesnt_exist_in_options(self): params = {'foo': 'bar', 'foo2': 'bar2'} options = {'params': {'foo3': 'bar3'}} merged = Util.merge_options('params', params, **options) self.assertDictEqual( merged, {'params': {'foo': 'bar', 'foo2': 'bar2', 'foo3': 'bar3'}})
def __init__(self, data, **options): self.meta = options['meta'] self._raw_data = Util.convert_to_dates(data) # Optimization for when a list of data points are created from a # dataset (via the model_list class) if 'converted_column_names' in options.keys(): self._converted_column_names = options['converted_column_names']
def test_convert_options(self): options = { 'params': { 'ticker': ['AAPL', 'MSFT'], 'per_end_date': { 'gte': {'2015-01-01'} }, 'qopts': { 'columns': ['ticker', 'per_end_date'], 'per_page': 5 } } } expect_result = { 'params': { 'qopts.per_page': 5, 'per_end_date.gte': set(['2015-01-01']), 'ticker[]': ['AAPL', 'MSFT'], 'qopts.columns[]': ['ticker', 'per_end_date'] } } result = Util.convert_options(**options) self.assertEqual(cmp(result, expect_result), 0) options = { 'params': { 'ticker': 'AAPL', 'per_end_date': { 'gte': {'2015-01-01'} }, 'qopts': { 'columns': ['ticker', 'per_end_date'] } } } expect_result = { 'params': { 'per_end_date.gte': set(['2015-01-01']), 'ticker': 'AAPL', 'qopts.columns[]': ['ticker', 'per_end_date'] } } result = Util.convert_options(**options) self.assertEqual(cmp(result, expect_result), 0)
def _get_dataset_data(self, dataset, **options): updated_options = options # if we have only one column index, let the api # handle the column filtering since the api supports this if len(dataset.requested_column_indexes) == 1: params = {'column_index': dataset.requested_column_indexes[0]} # only change the options per request updated_options = options.copy() updated_options = Util.merge_options('params', params, **updated_options) return dataset.data(**updated_options)
def test_convert_options_get_request_all_param_types(self): options = {'params': {'foo': 'bar', 'ticker': ['AAPL', 'MSFT'], 'per_end_date': {'gte': '2015-01-01'}, 'qopts': {'columns': ['ticker', 'per_end_date'], 'per_page': 5}}} expected_result = {'params': {'foo': 'bar', 'qopts.per_page': 5, 'per_end_date.gte': '2015-01-01', 'ticker[]': ['AAPL', 'MSFT'], 'qopts.columns[]': ['ticker', 'per_end_date']}} result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def __init__(self, klass, values, meta): self.klass = klass if 'columns' in meta.keys(): meta['column_types'] = Util.convert_to_columns_list(meta['columns'], 'type') meta['columns'] = Util.convert_to_columns_list(meta['columns'], 'name') # Since we are iterating over a list of data be sure to only compute the # methodized column names once and pass that down to the objects that are being created. converted_column_names = Util.convert_column_names(meta) if hasattr(klass, 'get_code_from_meta'): self.values = list([klass( klass.get_code_from_meta(x), x, meta=meta, converted_column_names=converted_column_names ) for x in values]) else: self.values = list([klass( x, meta=meta, converted_column_names=converted_column_names ) for x in values]) self.meta = meta
def data(self, **options): # handle_not_found_error if set to True will add an empty DataFrame # for a non-existent dataset instead of raising an error handle_not_found_error = options.pop('handle_not_found_error', False) handle_column_not_found = options.pop('handle_column_not_found', False) # default order to ascending, and respect whatever user passes in params = { 'database_code': self.database_code, 'dataset_code': self.dataset_code, 'order': 'asc' } updated_options = Util.merge_options('params', params, **options) try: return Data.all(**updated_options) except NotFoundError: if handle_not_found_error: return DataList(Data, [], {'column_names': [six.u('None'), six.u('Not Found')]}) raise except ColumnNotFound: if handle_column_not_found: return DataList(Data, [], {'column_names': [six.u('None'), six.u('Not Found')]}) raise
def test_convert_options_post_request_with_series_params(self): options = {'params': {'foo': pandas.Series(['bar', 'baz'])}} expected_result = {'json': options['params']} result = Util.convert_options(request_type='post', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_constructed_path(self): path = '/hello/:foo/world/:id' params = {'foo': 'bar', 'id': 1, 'another': 'a'} result = Util.constructed_path(path, params) self.assertEqual(result, '/hello/bar/world/1') self.assertDictEqual(params, {'another': 'a'})
def _bulk_download_path(self): url = self.default_path() + '/data' url = Util.constructed_path(url, {'id': self.code}) return url
def test_methodize(self): self.assertEqual( Util.methodize(six.u('Hello World...Foo-Bar')), 'hello_worldfoo_bar')
def data_fields(self): if not self._converted_column_names and self.meta: self._converted_column_names = Util.convert_column_names(self.meta) return self._converted_column_names
def _download_request_path(self): url = self.default_path() url = Util.constructed_path(url, {'id': self.code}) url += '.json' return url
def test_convert_options_get_request_with_array_params(self): options = {'params': {'foo': ['bar', 'baz']}} expected_result = {'params': {'foo[]': ['bar', 'baz']}} result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_convert_options_post_request_with_dictionary_params(self): options = {'params': {'foo': {'bar': 'baz'}}} expected_result = {'json': {'foo.bar': 'baz'}} result = Util.convert_options(request_type='post', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_convert_options_get_request_with_dictionary_params_and_array_values(self): options = {'params': {'foo': {'bar': ['baz', 'bax']}}} expected_result = {'params': {'foo.bar[]': ['baz', 'bax']}} result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_convert_options_get_request_with_simple_params(self): options = {'params': {'foo': 'bar'}} expected_result = options result = Util.convert_options(request_type='get', **options) self.assertEqual(cmp(result, expected_result), 0)
def test_convert_options_post_request_with_empty_params(self): options = {'params': {}} expected_result = {'json': {}} result = Util.convert_options(request_type='post', **options) self.assertEqual(cmp(result, expected_result), 0)
def data(self, **options): updated_options = Util.convert_options(**options) return Data.page(self, **updated_options)
def datasets(self, **options): params = {'database_code': self.code, 'query': '', 'page': 1} options = Util.merge_options('params', params, **options) return quandl.model.dataset.Dataset.all(**options)
def test_methodize(self): self.assertEqual(Util.methodize(six.u('Hello World...Foo-Bar')), 'hello_worldfoo_bar')