def test_create(self, client, params): create_product(client, params) history = client.adapter.request_history # Requests 0 and 1 are GET requests for the scrapers: assert history[0].method == 'GET' assert history[1].method == 'GET' # This request creates the product: assert history[2].method == 'POST' assert history[2].url == 'https://errata.devel.redhat.com/products' body = parse_qs(history[2].text) expected = { 'product[default_solution_id]': ['2'], 'product[description]': ['Red Hat Ceph Storage'], 'product[is_internal]': ['0'], 'product[isactive]': ['1'], 'product[move_bugs_on_qe]': ['0'], 'product[name]': ['Red Hat Ceph Storage'], 'product[short_name]': ['RHCEPH'], 'product[state_machine_rule_set_id]': ['1'], 'product[valid_bug_states][]': ['MODIFIED', 'VERIFIED'], } assert body == expected # GET requests for the scrapers again: assert history[4].method == 'GET' assert history[5].method == 'GET' # This request edits push_targets on the new product: assert history[6].method == 'POST' assert history[6].url == 'https://errata.devel.redhat.com/products/123' body = parse_qs(history[6].text) expected['_method'] = ['patch'] expected['product[push_targets][]'] = ['8', '4'] assert body == expected
def update_url_query(url, **kwargs): parse_result = urlparse(url) query = parse_qs(parse_result.query) query.update({k: v for k, v in kwargs.items() if v is not None}) query_string = urlencode(query, doseq=True) parse_result = parse_result._replace(query=query_string) return urlunparse(parse_result)
def test_with_exd_org_group(self, client, params): params['exd_org_group'] = 'Cloud' create_product(client, params) history = client.adapter.request_history # Requests 0 and 1 are GET requests for the scrapers. # This request creates the product: assert history[2].method == 'POST' assert history[2].url == 'https://errata.devel.redhat.com/products' body = parse_qs(history[2].text) assert body['product[exd_org_group_id]'] == ['2']
def test_with_docs_reviewer(self, client, params): params['default_docs_reviewer'] = '*****@*****.**' create_product(client, params) history = client.adapter.request_history # Requests 0 and 1 are GET requests for the scrapers, # request 2 is for the docs_reviewer user ID. # This request creates the product: assert history[3].method == 'POST' assert history[3].url == 'https://errata.devel.redhat.com/products' body = parse_qs(history[3].text) assert body['product[default_docs_reviewer_id]'] == ['1001']
def test_path_with_query(self, mocker, query): request_mock = mocker.patch.object(client, "Request").return_value raw_request = mocker.MagicMock(status=200) raw_request.read.return_value = "{}" c = client.Client("https://instance.com", "user", "pass") c.request("GET", "api/now/some/path", query=query) request_mock.open.assert_called_once() path_arg = request_mock.open.call_args.args[1] parsed_query = parse_qs(urlparse(path_arg).query) assert parsed_query == dict((k, [str(v)]) for k, v in query.items())
def _extract_query(url): query_index = url.find('?') fragment_index = url.find('#') if query_index > fragment_index and fragment_index > 0: query_index = -1 if query_index < 0: return {} if fragment_index > 0: query = url[query_index + 1:fragment_index] else: query = url[query_index + 1:] return parse_qs(query, keep_blank_values=True)
def _validate_form(self, call, data): form = {} if data is not None: form = parse_qs(data, keep_blank_values=True) for k in call.form_present: assert k in form for k, v in call.form_values.items(): if len(v) == 0: assert k not in form else: assert form[k] == v for k, v in call.form_values_one.items(): assert v <= set(form[k])
def _validate_form(call, data): ''' Validate form contents. ''' form = {} if data is not None: form = parse_qs(to_native(data), keep_blank_values=True) for k in call.form_present: assert k in form, 'Form key "{0}" not present'.format(k) for k, v in call.form_values.items(): if len(v) == 0: assert k not in form, 'Form key "{0}" not absent'.format(k) else: assert form[k] == v, 'Form key "{0}" has not values {1}, but {2}'.format(k, v, form[k])
def test_edit_live(self, client, params): params['description'] = 'Red Hat Ceph Storage Is Cool' client.adapter.register_uri( 'POST', 'https://errata.devel.redhat.com/products', status_code=302, headers={ 'Location': 'https://errata.devel.redhat.com/products/104' }) client.adapter.register_uri( 'POST', 'https://errata.devel.redhat.com/products/104', status_code=302, headers={ 'Location': 'https://errata.devel.redhat.com/products/104' }) client.adapter.register_uri( 'GET', 'https://errata.devel.redhat.com/products/104') result = ensure_product(client, params, check_mode=False) assert result['changed'] is True expected = 'changing description from Red Hat Ceph Storage ' \ 'to Red Hat Ceph Storage Is Cool' if PY2: expected = u'changing description from Red Hat Ceph Storage ' \ 'to Red Hat Ceph Storage Is Cool' # XXX BUG, issue 129 bz_name_hack = 'changing bugzilla_product_name from None to ' assert set(result['stdout_lines']) == set([expected, bz_name_hack]) history = client.adapter.request_history assert history[-2].method == 'POST' assert history[-2].url == \ 'https://errata.devel.redhat.com/products/104' body = parse_qs(history[-2].text) expected = { '_method': ['patch'], 'product[default_solution_id]': ['2'], 'product[description]': ['Red Hat Ceph Storage Is Cool'], 'product[is_internal]': ['0'], 'product[isactive]': ['1'], 'product[move_bugs_on_qe]': ['0'], 'product[name]': ['Red Hat Ceph Storage'], 'product[push_targets][]': ['3', '5', '9', '8', '4'], 'product[short_name]': ['RHCEPH'], 'product[state_machine_rule_set_id]': ['1'], 'product[valid_bug_states][]': [ 'VERIFIED', 'ON_QA', 'MODIFIED', 'ASSIGNED', 'NEW', 'ON_DEV', 'POST' ] } assert body == expected
def _call_galaxy(self, url, args=None, headers=None, method=None, auth_required=False, error_context_msg=None, cache=False): url_info = urlparse(url) cache_id = get_cache_id(url) query = parse_qs(url_info.query) if cache and self._cache: server_cache = self._cache.setdefault(cache_id, {}) iso_datetime_format = '%Y-%m-%dT%H:%M:%SZ' valid = False if url_info.path in server_cache: expires = datetime.datetime.strptime( server_cache[url_info.path]['expires'], iso_datetime_format) valid = datetime.datetime.utcnow() < expires is_paginated_url = 'page' in query or 'offset' in query if valid and not is_paginated_url: # Got a hit on the cache and we aren't getting a paginated response path_cache = server_cache[url_info.path] if path_cache.get('paginated'): if '/v3/' in url_info.path: res = {'links': {'next': None}} else: res = {'next': None} # Technically some v3 paginated APIs return in 'data' but the caller checks the keys for this so # always returning the cache under results is fine. res['results'] = [] for result in path_cache['results']: res['results'].append(result) else: res = path_cache['results'] return res elif not is_paginated_url: # The cache entry had expired or does not exist, start a new blank entry to be filled later. expires = datetime.datetime.utcnow() expires += datetime.timedelta(days=1) server_cache[url_info.path] = { 'expires': expires.strftime(iso_datetime_format), 'paginated': False, } headers = headers or {} self._add_auth_token(headers, url, required=auth_required) try: display.vvvv("Calling Galaxy at %s" % url) resp = open_url(to_native(url), data=args, validate_certs=self.validate_certs, headers=headers, method=method, timeout=20, http_agent=user_agent(), follow_redirects='safe') except HTTPError as e: raise GalaxyError(e, error_context_msg) except Exception as e: raise AnsibleError( "Unknown error when attempting to call Galaxy at '%s': %s" % (url, to_native(e))) resp_data = to_text(resp.read(), errors='surrogate_or_strict') try: data = json.loads(resp_data) except ValueError: raise AnsibleError( "Failed to parse Galaxy response from '%s' as JSON:\n%s" % (resp.url, to_native(resp_data))) if cache and self._cache: path_cache = self._cache[cache_id][url_info.path] # v3 can return data or results for paginated results. Scan the result so we can determine what to cache. paginated_key = None for key in ['data', 'results']: if key in data: paginated_key = key break if paginated_key: path_cache['paginated'] = True results = path_cache.setdefault('results', []) for result in data[paginated_key]: results.append(result) else: path_cache['results'] = data return data
def __init__(self, url): parts = urlparse(url) _query = frozenset(parse_qs(parts.query)) _path = unquote_plus(parts.path) self.parts = parts._replace(query=_query, path=_path)