def test_parse_input_params_success(self, mock_get_user_id, mock_is_authenticated, mock_get_input): fmt = '%Y-%m-%d %H:%M:%S' self.CONF.set_override('input_date_format', fmt, 'api') raw_filters = { const.START_DATE: '2015-03-26 15:04:40', const.END_DATE: '2015-03-26 15:04:50', const.CPID: '12345', const.SIGNED: True } expected_params = mock.Mock() mock_get_input.return_value = raw_filters parsed_start_date = timeutils.parse_strtime( raw_filters[const.START_DATE], fmt) parsed_end_date = timeutils.parse_strtime(raw_filters[const.END_DATE], fmt) expected_result = { const.START_DATE: parsed_start_date, const.END_DATE: parsed_end_date, const.CPID: '12345', const.SIGNED: True, const.OPENID: 'fake_id', } result = api_utils.parse_input_params(expected_params) self.assertEqual(expected_result, result) mock_get_input.assert_called_once_with(expected_params)
def test_parse_input_params_success( self, mock_get_user_pubkeys, mock_get_user_id, mock_is_authenticated, mock_get_input ): fmt = "%Y-%m-%d %H:%M:%S" self.CONF.set_override("input_date_format", fmt, "api") raw_filters = { const.START_DATE: "2015-03-26 15:04:40", const.END_DATE: "2015-03-26 15:04:50", const.CPID: "12345", const.SIGNED: True, } fake_pubkeys = ({"format": "fake", "pubkey": "fake_pk"},) mock_get_user_pubkeys.return_value = fake_pubkeys expected_params = mock.Mock() mock_get_input.return_value = raw_filters parsed_start_date = timeutils.parse_strtime(raw_filters[const.START_DATE], fmt) parsed_end_date = timeutils.parse_strtime(raw_filters[const.END_DATE], fmt) expected_result = { const.START_DATE: parsed_start_date, const.END_DATE: parsed_end_date, const.CPID: "12345", const.SIGNED: True, const.OPENID: "fake_id", const.USER_PUBKEYS: ["fake fake_pk"], } result = api_utils.parse_input_params(expected_params) self.assertEqual(result, expected_result) mock_get_input.assert_called_once_with(expected_params)
def test_parse_input_params_success(self, mock_get_input): fmt = '%Y-%m-%d %H:%M:%S' self.CONF.set_override('input_date_format', fmt, 'api') raw_filters = { const.START_DATE: '2015-03-26 15:04:40', const.END_DATE: '2015-03-26 15:04:50', const.CPID: '12345', } expected_params = mock.Mock() mock_get_input.return_value = raw_filters parsed_start_date = timeutils.parse_strtime( raw_filters[const.START_DATE], fmt ) parsed_end_date = timeutils.parse_strtime( raw_filters[const.END_DATE], fmt ) expected_result = { const.START_DATE: parsed_start_date, const.END_DATE: parsed_end_date, const.CPID: '12345' } result = api_utils.parse_input_params(expected_params) self.assertEqual(result, expected_result) mock_get_input.assert_called_once_with(expected_params)
def get(self): """Get information of all uploaded test results. Get information of all uploaded test results in descending chronological order. Make it possible to specify some input parameters for filtering. For example: /v1/results?page=<page number>&cpid=1234. By default, page is set to page number 1, if the page parameter is not specified. """ expected_input_params = [const.START_DATE, const.END_DATE, const.CPID, const.SIGNED] filters = api_utils.parse_input_params(expected_input_params) records_count = db.get_test_records_count(filters) page_number, total_pages_number = api_utils.get_page_number(records_count) try: per_page = CONF.api.results_per_page results = db.get_test_records(page_number, per_page, filters) for result in results: result.update({"url": parse.urljoin(CONF.ui_url, CONF.api.test_results_url) % result["id"]}) page = {"results": results, "pagination": {"current_page": page_number, "total_pages": total_pages_number}} except Exception as ex: LOG.debug("An error occurred during " "operation with database: %s" % ex) pecan.abort(400) return page
def get(self): """ Get information of all uploaded test results in descending chronological order. Make it possible to specify some input parameters for filtering. For example: /v1/results?page=<page number>&cpid=1234. By default, page is set to page number 1, if the page parameter is not specified. """ expected_input_params = [ const.START_DATE, const.END_DATE, const.CPID, ] try: filters = api_utils.parse_input_params(expected_input_params) records_count = db.get_test_records_count(filters) page_number, total_pages_number = \ api_utils.get_page_number(records_count) except api_utils.ParseInputsError as ex: pecan.abort(400, 'Reason: %s' % ex) except Exception as ex: LOG.debug('An error occurred: %s' % ex) pecan.abort(500) try: per_page = CONF.api.results_per_page records = db.get_test_records(page_number, per_page, filters) results = [] for r in records: results.append({ 'test_id': r.id, 'created_at': r.created_at, 'cpid': r.cpid, 'url': CONF.api.test_results_url % r.id }) page = { 'results': results, 'pagination': { 'current_page': page_number, 'total_pages': total_pages_number } } except Exception as ex: LOG.debug('An error occurred during ' 'operation with database: %s' % ex) pecan.abort(400) return page
def get(self): """ Get information of all uploaded test results in descending chronological order. Make it possible to specify some input parameters for filtering. For example: /v1/results?page=<page number>&cpid=1234. By default, page is set to page number 1, if the page parameter is not specified. """ expected_input_params = [ const.START_DATE, const.END_DATE, const.CPID, ] try: filters = api_utils.parse_input_params(expected_input_params) records_count = db.get_test_records_count(filters) page_number, total_pages_number = \ api_utils.get_page_number(records_count) except api_utils.ParseInputsError as ex: pecan.abort(400, 'Reason: %s' % ex) except Exception as ex: LOG.debug('An error occurred: %s' % ex) pecan.abort(500) try: per_page = CONF.api.results_per_page records = db.get_test_records(page_number, per_page, filters) results = [] for r in records: results.append({ 'test_id': r.id, 'created_at': r.created_at, 'cpid': r.cpid, 'url': CONF.api.test_results_url % r.id }) page = {'results': results, 'pagination': { 'current_page': page_number, 'total_pages': total_pages_number }} except Exception as ex: LOG.debug('An error occurred during ' 'operation with database: %s' % ex) pecan.abort(400) return page
def get(self): """Get information of all uploaded test results. Get information of all uploaded test results in descending chronological order. Make it possible to specify some input parameters for filtering. For example: /v1/results?page=<page number>&cpid=1234. By default, page is set to page number 1, if the page parameter is not specified. """ expected_input_params = [ const.START_DATE, const.END_DATE, const.CPID, const.SIGNED ] filters = api_utils.parse_input_params(expected_input_params) records_count = db.get_test_records_count(filters) page_number, total_pages_number = \ api_utils.get_page_number(records_count) try: per_page = CONF.api.results_per_page results = db.get_test_records(page_number, per_page, filters) for result in results: result.update({'url': parse.urljoin( CONF.ui_url, CONF.api.test_results_url ) % result['id']}) cloud_id = result['cpid'] cloud = db.get_cloud(cloud_id) if cloud: result.update({'cloud_name': cloud['name'], 'cloud_description': cloud['description']}) page = {'results': results, 'pagination': { 'current_page': page_number, 'total_pages': total_pages_number }} except Exception as ex: LOG.debug('An error occurred during ' 'operation with database: %s' % ex) pecan.abort(400) return page
def get(self): """Get information of all products.""" filters = api_utils.parse_input_params(['organization_id']) allowed_keys = [ 'id', 'name', 'description', 'product_ref_id', 'type', 'product_type', 'public', 'organization_id' ] user = api_utils.get_user_id() is_admin = user in db.get_foundation_users() try: if is_admin: products = db.get_products(allowed_keys=allowed_keys, filters=filters) for s in products: s['can_manage'] = True else: result = dict() filters['public'] = True products = db.get_products(allowed_keys=allowed_keys, filters=filters) for s in products: _id = s['id'] result[_id] = s result[_id]['can_manage'] = False filters.pop('public') products = db.get_products_by_user(user, allowed_keys=allowed_keys, filters=filters) for s in products: _id = s['id'] if _id not in result: result[_id] = s result[_id]['can_manage'] = True products = list(result.values()) except Exception as ex: LOG.exception('An error occurred during ' 'operation with database: %s' % ex) pecan.abort(400) products.sort(key=lambda x: x['name']) return {'products': products}
def get(self): """Get information for leader board.""" expected_input_params = ['version', 'target'] params = api_utils.parse_input_params(expected_input_params) if 'version' not in params: raise api_exc.ValidationError( 'Version parameters can not be null.') if 'target' not in params: raise api_exc.ValidationError( 'Target parameters can not be null.') ref_tests = caps_utils.get_capability_tests(params['target'], params['version']) ref_tests_count = len(ref_tests) LOG.debug("All tests count: %s" % float(len(ref_tests))) clouds = db.get_shared_clouds() for cloud in clouds: if ref_tests_count == 0: cloud.update({'coef': 'N/A'}) continue result = db.get_cloud_last_results(cloud['id']) tests = result['tests'] tests_count = 0 for test in tests: if test['name'] in ref_tests: tests_count += 1 LOG.debug("Passed tests count: %s" % len(tests)) coef = int(100 * tests_count / float(ref_tests_count)) cloud.update({'coef': coef, 'last_result_date': str(result['date'])}) # TODO: add paging page = {'results': clouds, 'pagination': { 'current_page': 1, 'total_pages': 1 }} return page
def get(self): """Get information of all uploaded test results. Get information of all uploaded test results in descending chronological order. Make it possible to specify some input parameters for filtering. For example: /v1/results?page=<page number>&cpid=1234. By default, page is set to page number 1, if the page parameter is not specified. """ expected_input_params = [ const.START_DATE, const.END_DATE, const.CPID, const.SIGNED, const.VERIFICATION_STATUS, const.PRODUCT_ID ] filters = api_utils.parse_input_params(expected_input_params) if const.PRODUCT_ID in filters: product = db.get_product(filters[const.PRODUCT_ID]) vendor_id = product['organization_id'] is_admin = (api_utils.check_user_is_foundation_admin() or api_utils.check_user_is_vendor_admin(vendor_id)) if is_admin: filters[const.ALL_PRODUCT_TESTS] = True elif not product['public']: pecan.abort(403, 'Forbidden.') records_count = db.get_test_records_count(filters) page_number, total_pages_number = \ api_utils.get_page_number(records_count) try: per_page = CONF.api.results_per_page results = db.get_test_records(page_number, per_page, filters) is_foundation = api_utils.check_user_is_foundation_admin() for result in results: if not (api_utils.check_owner(result['id']) or is_foundation): # Don't expose product info if the product is not public. if (result.get('product_version') and not result['product_version']['product_info'] ['public']): result['product_version'] = None # Only show all metadata if the user is the owner or a # member of the Foundation group. result['meta'] = { k: v for k, v in result['meta'].items() if k in MetadataController.rw_access_keys } result.update({ 'url': parse.urljoin(CONF.ui_url, CONF.api.test_results_url) % result['id'] }) page = { 'results': results, 'pagination': { 'current_page': page_number, 'total_pages': total_pages_number } } except Exception as ex: LOG.debug('An error occurred during ' 'operation with database: %s' % str(ex)) pecan.abort(500) return page