def verify_link(user): """ Used for verifying an e-mail address when the user clicks the link in the verification mail. """ proofing_user = ProofingUser.from_user(user, current_app.private_userdb) code = request.args.get('code') email = request.args.get('email') if code and email: current_app.logger.debug('Trying to save email address {} as verified for user {}'.format(email, proofing_user)) url = urlappend(current_app.config['DASHBOARD_URL'], 'emails') scheme, netloc, path, query_string, fragment = urlsplit(url) try: state = current_app.proofing_statedb.get_state_by_eppn_and_email(proofing_user.eppn, email) timeout = current_app.config.get('EMAIL_VERIFICATION_TIMEOUT', 24) if state.is_expired(timeout): current_app.logger.info("Verification code is expired. Removing the state") current_app.logger.debug("Proofing state: {}".format(state)) current_app.proofing_statedb.remove_state(state) new_query_string = urlencode({'msg': ':ERROR:emails.code_invalid_or_expired'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) except DocumentDoesNotExist: current_app.logger.info('Could not find proofing state for email {}'.format(email)) new_query_string = urlencode({'msg': ':ERROR:emails.unknown_email'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) if code == state.verification.verification_code: try: verify_mail_address(state, proofing_user) current_app.logger.info('Email successfully verified') current_app.logger.debug('Email address: {}'.format(email)) new_query_string = urlencode({'msg': 'emails.verification-success'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) except UserOutOfSync: current_app.logger.info('Could not confirm email, data out of sync') current_app.logger.debug('Mail address: {}'.format(email)) new_query_string = urlencode({'msg': ':ERROR:user-out-of-sync'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) current_app.logger.info("Invalid verification code") current_app.logger.debug("Email address: {}".format(state.verification.email)) new_query_string = urlencode({'msg': ':ERROR:emails.code_invalid_or_expired'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) abort(400)
def authorize(user): if user.orcid is None: proofing_state = current_app.proofing_statedb.get_state_by_eppn(user.eppn, raise_on_missing=False) if not proofing_state: current_app.logger.debug('No proofing state found for user {!s}. Initializing new proofing state.'.format( user)) proofing_state = OrcidProofingState({'eduPersonPrincipalName': user.eppn, 'state': get_unique_hash(), 'nonce': get_unique_hash()}) current_app.proofing_statedb.save(proofing_state) claims_request = ClaimsRequest(userinfo=Claims(id=None)) oidc_args = { 'client_id': current_app.oidc_client.client_id, 'response_type': 'code', 'scope': 'openid', 'claims': claims_request.to_json(), 'redirect_uri': url_for('orcid.authorization_response', _external=True), 'state': proofing_state.state, 'nonce': proofing_state.nonce, } authorization_url = '{}?{}'.format(current_app.oidc_client.authorization_endpoint, urlencode(oidc_args)) current_app.logger.debug('Authorization url: {!s}'.format(authorization_url)) current_app.stats.count(name='authn_request') return redirect(authorization_url) # Orcid already connected to user url = urlappend(current_app.config['DASHBOARD_URL'], 'accountlinking') scheme, netloc, path, query_string, fragment = urlsplit(url) new_query_string = urlencode({'msg': ':ERROR:orc.already_connected'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url)
def _build_url(self): url = list(urlsplit(self._path)) qs = parse_qs(url[3]) qs["count"] = self._count qs["page"] = self._page url[3] = urlencode(qs, doseq=True) return urlunsplit(url)
def redirect_action(): # Setup a redirect url to action app root scheme, netloc, path, query_string, fragment = urlsplit(request.url) path = url_for('actions.authn') return_url = urlunsplit((scheme, netloc, path, query_string, fragment)) # TODO: Look in ret to figure out if we need to add a query string with a user message ret = _do_action() return redirect(return_url)
def redirect_action(): # Setup a redirect url to action app root scheme, netloc, path, query_string, fragment = urlsplit(request.url) path = url_for('actions.authn') return_url = urlunsplit((scheme, netloc, path, query_string, fragment)) # TODO: Look in ret to figure out if we need to add a query string with a user message _ = _do_action() return redirect(return_url)
def forward(apps, schema_editor): FormulaComponent = apps.get_model('formulas', 'FormulaComponent') Formula = apps.get_model('formulas', 'Formula') formulas = {} for formula in Formula.objects.all(): # Get the username out of the URI if it's a private formula if formula.git_username: parse_res = urlsplit(formula.uri) if '@' in parse_res.netloc: new_netloc = parse_res.netloc.split('@')[-1] formula.uri = urlunsplit(( parse_res.scheme, new_netloc, parse_res.path, parse_res.query, parse_res.fragment )) formula.save() if formula.uri not in formulas: formulas[formula.uri] = formula continue # Otherwise we need to delete the formula and everything associated with it for component in FormulaComponent.objects.filter(formula=formula): for bhfc in component.blueprinthostformulacomponent_set.all(): try: bhfc.component = FormulaComponent.objects.get(sls_path=bhfc.component.sls_path, formula=formulas[formula.uri]) bhfc.save() except FormulaComponent.DoesNotExist: bhfc.component.formula = formulas[formula.uri] bhfc.component.save() component.delete() formula.delete() # re-import all the formulas for formula in Formula.objects.all(): # there's nothing we can do about private repos without having the password :( if not formula.git_username: try: import_formula(formula.id, '') except FormulaTaskException as e: if 'SPECFILE' in e.message: print('Skipping import of formula: {0}'.format(formula.uri)) else: raise else: print('Please manually update this formula via the API: {0}'.format(formula.uri)) # remove the old ones old_formula_dir = os.path.join(settings.STACKDIO_CONFIG['storage_root'], 'user_states') if os.path.isdir(old_formula_dir): shutil.rmtree(old_formula_dir)
def relurl(url, starturl): """Works like :py:func:`os.path.relpath`, but for urls >>> relurl("http://example.org/other/index.html", "http://example.org/main/index.html") == '../other/index.html' True >>> relurl("http://other.org/foo.html", "http://example.org/bar.html") == 'http://other.org/foo.html' True """ urlseg = urlsplit(url) startseg = urlsplit(starturl) urldomain = urlunsplit(urlseg[:2] + tuple('' for i in range(3))) startdomain = urlunsplit(startseg[:2] + tuple('' for i in range(3))) if urldomain != startdomain: # different domain, no relative url possible return url relpath = posixpath.relpath(urlseg.path, posixpath.dirname(startseg.path)) res = urlunsplit(('', '', relpath, urlseg.query, urlseg.fragment)) return res
def set_query_param(url, name, value): scheme, netloc, path, query_string, fragment = urlsplit(url) query_params = parse_qs(query_string) query_params[name] = [value] new_query_string = urlencode(query_params, doseq=True) if isinstance(scheme, bytes): new_query_string = new_query_string.encode('utf-8') return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def mfa_authentication_action(session_info: Mapping[str, Any], user: User) -> WerkzeugResponse: relay_state = request.form.get('RelayState') current_app.logger.debug('RelayState: {}'.format(relay_state)) redirect_url = None if 'eidas_redirect_urls' in session: redirect_url = session['eidas_redirect_urls'].pop(relay_state, None) if not redirect_url: # With no redirect url just redirect the user to dashboard for a new try to log in # TODO: This will result in a error 400 until we put the authentication in the session current_app.logger.error('Missing redirect url for mfa authentication') return redirect_with_msg(current_app.config.action_url, EidasMsg.no_redirect_url) # We get the mfa authentication views "next" argument as base64 to avoid our request sanitation # to replace all & to & redirect_url = base64.b64decode(redirect_url).decode('utf-8') # TODO: Rename verify_relay_state to verify_redirect_url redirect_url = verify_relay_state(redirect_url) if not is_required_loa(session_info, 'loa3'): return redirect_with_msg(redirect_url, EidasMsg.authn_context_mismatch) if not is_valid_reauthn(session_info): return redirect_with_msg(redirect_url, EidasMsg.reauthn_expired) # Check that a verified NIN is equal to the asserted attribute personalIdentityNumber _personal_idns = get_saml_attribute(session_info, 'personalIdentityNumber') if _personal_idns is None: current_app.logger.error( 'Got no personalIdentityNumber attributes. pysaml2 without the right attribute_converter?' ) # TODO: change to reasonable redirect_with_msg when the ENUM work for that is merged raise RuntimeError('Got no personalIdentityNumber') asserted_nin = _personal_idns[0] user_nin = user.nins.verified.find(asserted_nin) if not user_nin: current_app.logger.error('Asserted NIN not matching user verified nins') current_app.logger.debug('Asserted NIN: {}'.format(asserted_nin)) current_app.stats.count(name='mfa_auth_nin_not_matching') return redirect_with_msg(redirect_url, EidasMsg.nin_not_matching) session.mfa_action.success = True session.mfa_action.issuer = session_info['issuer'] session.mfa_action.authn_instant = session_info['authn_info'][0][2] session.mfa_action.authn_context = get_authn_ctx(session_info) current_app.stats.count(name='mfa_auth_success') current_app.stats.count(name=f'mfa_auth_{session_info["issuer"]}_success') # Redirect back to action app but to the redirect-action view resp = redirect_with_msg(redirect_url, EidasMsg.action_completed, error=False) scheme, netloc, path, query_string, fragment = urlsplit(resp.location) new_path = urlappend(path, 'redirect-action') new_url = urlunsplit((scheme, netloc, new_path, query_string, fragment)) current_app.logger.debug(f'Redirecting to: {new_url}') return redirect(new_url)
def generate_link_timemap_from_cdxj_lines(cdxj_lines, original, tm_self, tg_uri): tmurl = get_proxied_urit(tm_self) if app.proxy is not None: tm_self = urlunsplit(tmurl) tg_uri = urlunsplit(get_proxied_urit(tg_uri)) # Extract and trim for host:port prepending tmurl[2] = '' # Clear TM path host_and_port = f'{urlunsplit(tmurl)}/' # unsurted URI will never have a scheme, add one original_uri = f'http://{unsurt(original)}' tm_data = f'<{original_uri}>; rel="original",\n' tm_data += f'<{tm_self}>; rel="self timemap"; ' tm_data += 'type="application/link-format",\n' cdxj_tm_uri = tm_self.replace('/timemap/link/', '/timemap/cdxj/') tm_data += f'<{cdxj_tm_uri}>; rel="timemap"; ' tm_data += 'type="application/cdxj+ors",\n' tm_data += f'<{tg_uri}>; rel="timegate"' for i, line in enumerate(cdxj_lines): (surt_uri, datetime, json) = line.split(' ', 2) dt_rfc1123 = ipwb_utils.digits14_to_rfc1123(datetime) first_last_str = '' if len(cdxj_lines) > 1: if i == 0: first_last_str = 'first ' elif i == len(cdxj_lines) - 1: first_last_str = 'last ' elif len(cdxj_lines) == 1: first_last_str = 'first last ' tm_data += ( f',\n<{host_and_port}memento/{datetime}/{unsurt(surt_uri)}>; ' f'rel="{first_last_str}memento"; datetime="{dt_rfc1123}"') return f'{tm_data}\n'
def generateCDXJTimeMapFromCDXJLines(cdxjLines, original, tmself, tgURI): tmurl = getProxiedURIT(tmself) if app.proxy is not None: tmself = urlunsplit(tmurl) tgURI = urlunsplit(getProxiedURIT(tgURI)) # unsurted URI will never have a scheme, add one originalURI = 'http://{0}'.format(unsurt(original)) tmData = '!context ["http://tools.ietf.org/html/rfc7089"]\n' tmData += '!id {{"uri": "{0}"}}\n'.format(tmself) tmData += '!keys ["memento_datetime_YYYYMMDDhhmmss"]\n' tmData += '!meta {{"original_uri": "{0}"}}\n'.format(originalURI) tmData += '!meta {{"timegate_uri": "{0}"}}\n'.format(tgURI) linkTMURI = tmself.replace('/timemap/cdxj/', '/timemap/link/') tmData += ('!meta {{"timemap_uri": {{' '"link_format": "{0}", ' '"cdxj_format": "{1}"' '}}}}\n').format(linkTMURI, tmself) hostAndPort = tmself[0:tmself.index('timemap/')] for i, line in enumerate(cdxjLines): (surtURI, datetime, json) = line.split(' ', 2) dtRFC1123 = ipwbUtils.digits14ToRFC1123(datetime) firstLastStr = '' if len(cdxjLines) > 1: if i == 0: firstLastStr = 'first ' elif i == len(cdxjLines) - 1: firstLastStr = 'last ' elif len(cdxjLines) == 1: firstLastStr = 'first last ' tmData += ('{1} {{' '"uri": "{0}memento/{1}/{2}", ' '"rel": "{3}memento", ' '"datetime"="{4}"}}\n').format( hostAndPort, datetime, unsurt(surtURI), firstLastStr, dtRFC1123) return tmData
def generateLinkTimeMapFromCDXJLines(cdxjLines, original, tmself, tgURI): tmurl = getProxiedURIT(tmself) if app.proxy is not None: tmself = urlunsplit(tmurl) tgURI = urlunsplit(getProxiedURIT(tgURI)) # Extract and trim for host:port prepending tmurl[2] = '' # Clear TM path hostAndPort = urlunsplit(tmurl) + '/' # unsurted URI will never have a scheme, add one originalURI = 'http://{0}'.format(unsurt(original)) tmData = '<{0}>; rel="original",\n'.format(originalURI) tmData += '<{0}>; rel="self timemap"; '.format(tmself) tmData += 'type="application/link-format",\n' cdxjTMURI = tmself.replace('/timemap/link/', '/timemap/cdxj/') tmData += '<{0}>; rel="timemap"; '.format(cdxjTMURI) tmData += 'type="application/cdxj+ors",\n' tmData += '<{0}>; rel="timegate"'.format(tgURI) for i, line in enumerate(cdxjLines): (surtURI, datetime, json) = line.split(' ', 2) dtRFC1123 = ipwbUtils.digits14ToRFC1123(datetime) firstLastStr = '' if len(cdxjLines) > 1: if i == 0: firstLastStr = 'first ' elif i == len(cdxjLines) - 1: firstLastStr = 'last ' elif len(cdxjLines) == 1: firstLastStr = 'first last ' tmData += ',\n<{0}memento/{1}/{2}>; rel="{3}memento"; datetime="{4}"' \ .format(hostAndPort, datetime, unsurt(surtURI), firstLastStr, dtRFC1123) return tmData + '\n'
def generate_cdxj_timemap_from_cdxj_lines(cdxj_lines, original, tm_self, tg_uri): tmurl = get_proxied_urit(tm_self) if app.proxy is not None: tm_self = urlunsplit(tmurl) tg_uri = urlunsplit(get_proxied_urit(tg_uri)) # unsurted URI will never have a scheme, add one original_uri = f'http://{unsurt(original)}' tm_data = '!context ["http://tools.ietf.org/html/rfc7089"]\n' tm_data += f'!id {{"uri": "{tm_self}"}}\n' tm_data += '!keys ["memento_datetime_YYYYMMDDhhmmss"]\n' tm_data += f'!meta {{"original_uri": "{original_uri}"}}\n' tm_data += f'!meta {{"timegate_uri": "{tg_uri}"}}\n' link_tm_uri = tm_self.replace('/timemap/cdxj/', '/timemap/link/') tm_data += (f'!meta {{"timemap_uri": {{' f'"link_format": "{link_tm_uri}",' f'' f'"cdxj_format": "{tm_self}"' f'}}}}\n') host_and_port = tm_self[0:tm_self.index('timemap/')] for i, line in enumerate(cdxj_lines): (surt_uri, datetime, json) = line.split(' ', 2) dt_rfc1123 = ipwb_utils.digits14_to_rfc1123(datetime) first_last_str = '' if len(cdxj_lines) > 1: if i == 0: first_last_str = 'first ' elif i == len(cdxj_lines) - 1: first_last_str = 'last ' elif len(cdxj_lines) == 1: first_last_str = 'first last ' tm_data += (f'{datetime} {{' f'"uri": "{host_and_port}memento/{datetime}/{surt_uri}", ' f'"rel": "{first_last_str}memento", ' f'"datetime"="{dt_rfc1123}"}}\n') return tm_data
def make_url(data): """Construct a URL from a `dict` of components. Uses `scheme`, `netloc`, `username`, `password`, `hostname`, `host`, `port` (see `make_netloc`), `path`, `query`, and `fragment`. """ scheme = data.get('scheme') netloc = make_netloc(data) path = data.get('path') query = data.get('query', {}) query = urlencode(query) fragment = data.get('fragment') comps = (scheme, netloc, path, query, fragment) comps = map(lambda s: s or '', comps) return urlunsplit(comps)
def clone_to_temp(formula, git_password): # temporary directory to clone into so we can read the # SPECFILE and do some initial validation tmpdir = mkdtemp(prefix='stackdio-') reponame = formula.get_repo_name() repodir = os.path.join(tmpdir, reponame) origin = None repo = None uri = formula.uri # Add the password for a private repo if formula.private_git_repo: parsed = urlsplit(uri) uri = urlunsplit(( parsed.scheme, '{0}:{1}@{2}'.format(formula.git_username, git_password, parsed.netloc), parsed.path, parsed.query, parsed.fragment )) try: # Clone the repo into a temp dir repo = git.Repo.clone_from(uri, repodir) origin = repo.remotes.origin.name except git.GitCommandError: raise FormulaTaskException( formula, 'Unable to clone provided URI. This is either not ' 'a git repository, or you don\'t have permission to clone it. ' 'Note that private repositories require the https protocol.') finally: if repo and not origin: origin = repo.remotes.origin.name if repo and formula and formula.private_git_repo: # remove the password from the config repo.git.remote('set-url', origin, formula.uri) # Remove the logs which also store the password log_dir = os.path.join(repodir, '.git', 'logs') if os.path.isdir(log_dir): shutil.rmtree(log_dir) # return the path where the repo is return repodir
def _SerializeRequest(self, request): """Convert a http_wrapper.Request object into a string. Args: request: A http_wrapper.Request to serialize. Returns: The request as a string in application/http format. """ # Construct status line parsed = urllib_parse.urlsplit(request.url) request_line = urllib_parse.urlunsplit( (None, None, parsed.path, parsed.query, None)) status_line = u' '.join(( request.http_method, request_line.decode('utf-8'), u'HTTP/1.1\n' )) major, minor = request.headers.get( 'content-type', 'application/json').split('/') msg = mime_nonmultipart.MIMENonMultipart(major, minor) # MIMENonMultipart adds its own Content-Type header. # Keep all of the other headers in `request.headers`. for key, value in request.headers.items(): if key == 'content-type': continue msg[key] = value msg['Host'] = parsed.netloc msg.set_unixfrom(None) if request.body is not None: msg.set_payload(request.body) # Serialize the mime message. str_io = six.StringIO() # maxheaderlen=0 means don't line wrap headers. gen = generator.Generator(str_io, maxheaderlen=0) gen.flatten(msg, unixfrom=False) body = str_io.getvalue() # Strip off the \n\n that the MIME lib tacks onto the end of the # payload. if request.body is None: body = body[:-2] return status_line + body
def _get_hash(self, url, checksum=None): """ For Api V2, the cached downloads always have recipe and package REVISIONS in the URL, making them immutable, and perfect for cached downloads of artifacts. For V2 checksum will always be None. For ApiV1, the checksum is obtained from the server via "get_snapshot()" methods, but the URL in the apiV1 contains the signature=xxx for signed urls, but that can change, so better strip it from the URL before the hash """ scheme, netloc, path, _, _ = urlsplit(url) # append empty query and fragment before unsplit if not self._user_download: # removes ?signature=xxx url = urlunsplit((scheme, netloc, path, "", "")) if checksum is not None: url += checksum h = sha256_sum(url.encode()) return h
def issues_callback(request): url = urlsplit(request.url) base_url = urlunsplit(list(url[:3]) + ["", ""]) page = int(parse_qs(url.query).get("page", ["1"])[0]) links = [] if page != 3: links.append('<%s?page=%d>; rel="next"' % (base_url, page + 1)) links.append('<%s?page=3>; rel="last"' % base_url) if page != 1: links.append('<%s?page=1>; rel="first"' % base_url) links.append('<%s?page=%d>; rel="prev"' % (base_url, page - 1)) start = (page - 1) * 2 end = page * 2 return ( 200, {"Link": ", ".join(links)}, json.dumps(self.sample_bugs[start:end]))
def __call__(self, value): try: super(URLValidator, self).__call__(value) except ValidationError as e: # Trivial case failed. Try for possible IDN domain if value: # value = force_text(value) scheme, netloc, path, query, fragment = urlsplit(value) try: netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE except UnicodeError: # invalid domain part raise e url = urlunsplit((scheme, netloc, path, query, fragment)) super(URLValidator, self).__call__(url) else: raise else: pass
def mfa_authentication_action(session_info, user): relay_state = request.form.get('RelayState') current_app.logger.debug('RelayState: {}'.format(relay_state)) redirect_url = None if 'eidas_redirect_urls' in session: redirect_url = session['eidas_redirect_urls'].pop(relay_state, None) if not redirect_url: # With no redirect url just redirect the user to dashboard for a new try to log in # TODO: This will result in a error 400 until we put the authentication in the session current_app.logger.error('Missing redirect url for mfa authentication') return redirect_with_msg(current_app.config['ACTION_URL'], ':ERROR:eidas.no_redirect_url') # We get the mfa authentication views "next" argument as base64 to avoid our request sanitation # to replace all & to & redirect_url = base64.b64decode(redirect_url).decode('utf-8') # TODO: Rename verify_relay_state to verify_redirect_url redirect_url = verify_relay_state(redirect_url) if not is_required_loa(session_info, 'loa3'): return redirect_with_msg(redirect_url, ':ERROR:eidas.authn_context_mismatch') if not is_valid_reauthn(session_info): return redirect_with_msg(redirect_url, ':ERROR:eidas.reauthn_expired') # Check that a verified NIN is equal to the asserted attribute personalIdentityNumber asserted_nin = get_saml_attribute(session_info, 'personalIdentityNumber')[0] user_nin = user.nins.verified.find(asserted_nin) if not user_nin: current_app.logger.error('Asserted NIN not matching user verified nins') current_app.logger.debug('Asserted NIN: {}'.format(asserted_nin)) return redirect_with_msg(redirect_url, ':ERROR:eidas.nin_not_matching') session.mfa_action.success = True session.mfa_action.issuer = session_info['issuer'] session.mfa_action.authn_instant = session_info['authn_info'][0][2] session.mfa_action.authn_context = get_authn_ctx(session_info) # Redirect back to action app but to the redirect-action view resp = redirect_with_msg(redirect_url, 'actions.action-completed') scheme, netloc, path, query_string, fragment = urlsplit(resp.location) new_path = urlappend(path, 'redirect-action') new_url = urlunsplit((scheme, netloc, new_path, query_string, fragment)) current_app.logger.debug(f'Redirecting to: {new_url}') return redirect(new_url)
def validate(self, attrs): git_username = attrs.get('git_username') errors = {} if git_username and not self.instance: # We only need validation if a non-empty username is provided # We only care about this if we're importing access_token = attrs.get('access_token') git_password = attrs.get('git_password') if not access_token and not git_password: err_msg = 'Your git password is required if you\'re not using an access token.' errors.setdefault('access_token', []).append(err_msg) errors.setdefault('git_password', []).append(err_msg) if access_token and git_password: err_msg = 'If you are using an access_token, you may not provide a password.' errors.setdefault('access_token', []).append(err_msg) errors.setdefault('git_password', []).append(err_msg) if self.instance: uri = attrs.get('uri') if 'uri' in attrs else self.instance.uri else: uri = attrs['uri'] # Remove the git username from the uri if it's a private formula if git_username: parse_res = urlsplit(uri) if '@' in parse_res.netloc: new_netloc = parse_res.netloc.split('@')[-1] attrs['uri'] = urlunsplit(( parse_res.scheme, new_netloc, parse_res.path, parse_res.query, parse_res.fragment, )) if errors: raise serializers.ValidationError(errors) return attrs
def storageFromURL(url, read_only=None): # no schema -> file:// if "://" not in url: url = "file://" + url # read_only -> url if read_only is not None: scheme, netloc, path, query, fragment = urlsplit(url) # XXX this won't have effect with zconfig:// but for file:// neo:// # zeo:// etc ... it works if scheme != "zconfig": if len(query) > 0: query += "&" query += "read_only=%s" % read_only url = urlunsplit((scheme, netloc, path, query, fragment)) stor_factory, dbkw = zodburi.resolve_uri(url) stor = stor_factory() return stor
def set_query_params(url, **kwargs): """ Set or replace query parameters in a URL. >>> set_query_parameter('http://example.com?foo=bar&biz=baz', foo='stuff') 'http://example.com?foo=stuff&biz=baz' :param url: URL :type url: str :param kwargs: Query parameters :type kwargs: dict :return: Modified URL :rtype: str """ scheme, netloc, path, query_string, fragment = urlsplit(url) query_params = parse_qs(query_string) query_params.populate(kwargs) new_query_string = urlencode(query_params, doseq=True) return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def test(self, test_step): """ Test the response by additionally testing that the response redirects to an expected route as defined by :py:attr:`expected_route_name`. """ super(RedirectToRouteValidator, self).test(test_step) location = self.step.response['Location'] # remove schema, query string and host from URL. Since query string need to be removed to properly resolve that url location = urlunsplit(('', '', urlsplit(location).path, '', '')) try: redirected_to_route = resolve(location).view_name except Resolver404: msg = '{} returned a redirect to "{}" which cannot be resolved' self.step.test.fail( msg.format(self._get_base_error_message(), location)) self.step.test.assertEqual( redirected_to_route, self.expected_route_name, '{} returned redirect to route {} != {}' ''.format(self._get_base_error_message(), redirected_to_route, self.expected_route_name))
def authorization_response(user): # Redirect url for user feedback url = urlappend(current_app.config['DASHBOARD_URL'], 'accountlinking') scheme, netloc, path, query_string, fragment = urlsplit(url) current_app.stats.count(name='authn_response') # parse authentication response query_string = request.query_string.decode('utf-8') current_app.logger.debug('query_string: {!s}'.format(query_string)) authn_resp = current_app.oidc_client.parse_response(AuthorizationResponse, info=query_string, sformat='urlencoded') current_app.logger.debug('Authorization response received: {!s}'.format(authn_resp)) if authn_resp.get('error'): current_app.logger.error('AuthorizationError {!s} - {!s} ({!s})'.format(request.host, authn_resp['error'], authn_resp.get('error_message'), authn_resp.get('error_description'))) new_query_string = urlencode({'msg': ':ERROR:orc.authorization_fail'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) user_oidc_state = authn_resp['state'] proofing_state = current_app.proofing_statedb.get_state_by_oidc_state(user_oidc_state, raise_on_missing=False) if not proofing_state: current_app.logger.error('The \'state\' parameter ({!s}) does not match a user state.'.format(user_oidc_state)) new_query_string = urlencode({'msg': ':ERROR:orc.unknown_state'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) # do token request args = { 'code': authn_resp['code'], 'redirect_uri': url_for('orcid.authorization_response', _external=True), } current_app.logger.debug('Trying to do token request: {!s}'.format(args)) token_resp = current_app.oidc_client.do_access_token_request(scope='openid', state=authn_resp['state'], request_args=args, authn_method='client_secret_basic') current_app.logger.debug('token response received: {!s}'.format(token_resp)) id_token = token_resp['id_token'] if id_token['nonce'] != proofing_state.nonce: current_app.logger.error('The \'nonce\' parameter does not match for user') new_query_string = urlencode({'msg': ':ERROR:orc.unknown_nonce'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) current_app.logger.info('ORCID authorized for user') # do userinfo request current_app.logger.debug('Trying to do userinfo request:') userinfo = current_app.oidc_client.do_user_info_request(method=current_app.config['USERINFO_ENDPOINT_METHOD'], state=authn_resp['state']) current_app.logger.debug('userinfo received: {!s}'.format(userinfo)) if userinfo['sub'] != id_token['sub']: current_app.logger.error('The \'sub\' of userinfo does not match \'sub\' of ID Token for user {!s}.'.format( proofing_state.eppn)) new_query_string = urlencode({'msg': ':ERROR:orc.sub_mismatch'}) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url) # Save orcid and oidc data to user current_app.logger.info('Saving ORCID data for user') proofing_user = ProofingUser.from_user(user, current_app.private_userdb) oidc_id_token = OidcIdToken(iss=id_token['iss'], sub=id_token['sub'], aud=id_token['aud'], exp=id_token['exp'], iat=id_token['iat'], nonce=id_token['nonce'], auth_time=id_token['auth_time'], application='orcid') oidc_authz = OidcAuthorization(access_token=token_resp['access_token'], token_type=token_resp['token_type'], id_token=oidc_id_token, expires_in=token_resp['expires_in'], refresh_token=token_resp['refresh_token'], application='orcid') orcid_element = Orcid(id=userinfo['id'], name=userinfo['name'], given_name=userinfo['given_name'], family_name=userinfo['family_name'], verified=True, oidc_authz=oidc_authz, application='orcid') orcid_proofing = OrcidProofing(proofing_user, created_by='orcid', orcid=orcid_element.id, issuer=orcid_element.oidc_authz.id_token.iss, audience=orcid_element.oidc_authz.id_token.aud, proofing_method='oidc', proofing_version='2018v1') if current_app.proofing_log.save(orcid_proofing): current_app.logger.info('ORCID proofing data saved to log') proofing_user.orcid = orcid_element save_and_sync_user(proofing_user) current_app.logger.info('ORCID proofing data saved to user') new_query_string = urlencode({'msg': 'orc.authorization_success'}) else: current_app.logger.info('ORCID proofing data NOT saved, failed to save proofing log') new_query_string = urlencode({'msg': ':ERROR:Temporary technical problems'}) # Clean up current_app.logger.info('Removing proofing state') current_app.proofing_statedb.remove_state(proofing_state) url = urlunsplit((scheme, netloc, path, new_query_string, fragment)) return redirect(url)
def make_query(url, page_number, per_page): segments = urlparse(url) querystring = 'page={page}&per_page={per_page}'.format( page=page_number, per_page=per_page) return urlunsplit((segments.scheme, segments.netloc, segments.path, querystring, segments.fragment))
def on_message(self, message): try: contents = json.loads(message) except ValueError: self.log.debug('Bad JSON: %r', message) self.log.error('Couldn\'t parse JSON', exc_info=True) self._write_error(status=400, msg='JSON input is required.') return if not set(contents.keys()).issuperset(self._REQUIRED_KEYS): msg = ( 'Invalid request. The body must contain the following keys: ' '{required}. Got: {got}').format(required=self._REQUIRED_KEYS, got=contents.keys()) self._write_error(status=400, msg=msg) return method = str(contents['method']).upper() query = '' if method in self._REQUIRE_XSRF_FORWARDING_METHODS: xsrf = self.get_cookie('_xsrf') if xsrf: query += '_xsrf={}'.format(xsrf) body = contents.get('body') if method in self._REQUIRE_NO_BODY_METHODS and body is not None: msg = ( 'Invalid request: Body may not be specified for method "{}".' ).format(method) self._write_error(status=400, msg=msg) return path = urlparse.urlunsplit( urlparse.SplitResult(scheme=self.request.protocol, netloc=self.request.host, path=contents['path'], query=query, fragment='')) emitter = _StreamingResponseEmitter(contents['message_id'], self.write_message) proxy_request = httpclient.HTTPRequest( url=path, method=method, headers=self.request.headers, body=contents.get('body'), ca_certs=self.ca_certs, header_callback=emitter.header_callback, streaming_callback=emitter.streaming_callback) _modify_proxy_request_test_only(proxy_request) http_client = self._get_http_client() # Since this channel represents a proxy, don't raise errors directly and # instead send them back in the response. # The response contents will normally be captured by # _StreamingResponseEmitter. However, if a programming error occurs with how # the proxy is set up, these callbacks will not be used. response = yield http_client.fetch(proxy_request, raise_error=False) if response.error and not isinstance(response.error, httpclient.HTTPError): with stack_context.ExceptionStackContext(self._log_fetch_error): # Rethrow the exception to capture the stack trace and write # an error message. response.rethrow() self._write_error( status=500, msg=('Uncaught server-side exception. Check logs for ' 'additional details.')) return emitter.done()
def _normalize_url(url, charset='utf-8'): """ Normalizes a URL. Based on http://code.google.com/p/url-normalize.""" def _clean(string): string = str(unquote(string), 'utf-8', 'replace') return unicodedata.normalize('NFC', string).encode('utf-8') default_port = { 'ftp': 21, 'telnet': 23, 'http': 80, 'gopher': 70, 'news': 119, 'nntp': 119, 'prospero': 191, 'https': 443, 'snews': 563, 'snntp': 563, } # if there is no scheme use http as default scheme if url[0] not in ['/', '-'] and ':' not in url[:7]: url = 'http://' + url # shebang urls support url = url.replace('#!', '?_escaped_fragment_=') # splitting url to useful parts scheme, auth, path, query, fragment = urlsplit(url.strip()) (userinfo, host, port) = re.search('([^@]*@)?([^:]*):?(.*)', auth).groups() # Always provide the URI scheme in lowercase characters. scheme = scheme.lower() # Always provide the host, if any, in lowercase characters. host = host.lower() if host and host[-1] == '.': host = host[:-1] # take care about IDN domains host = host.decode(charset).encode('idna') # IDN -> ACE # Only perform percent-encoding where it is essential. # Always use uppercase A-through-F characters when percent-encoding. # All portions of the URI must be utf-8 encoded NFC from Unicode strings path = quote(_clean(path), "~:/?#[]@!$&'()*+,;=") fragment = quote(_clean(fragment), "~") # note care must be taken to only encode & and = characters as values query = "&".join(["=".join([quote(_clean(t), "~:/?#[]@!$'()*+,;=") \ for t in q.split("=", 1)]) for q in query.split("&")]) # Prevent dot-segments appearing in non-relative URI paths. if scheme in ["", "http", "https", "ftp", "file"]: output = [] for part in path.split('/'): if part == "": if not output: output.append(part) elif part == ".": pass elif part == "..": if len(output) > 1: output.pop() else: output.append(part) if part in ["", ".", ".."]: output.append("") path = '/'.join(output) # For schemes that define a default authority, use an empty authority if # the default is desired. if userinfo in ["@", ":@"]: userinfo = "" # For schemes that define an empty path to be equivalent to a path of "/", # use "/". if path == "" and scheme in ["http", "https", "ftp", "file"]: path = "/" # For schemes that define a port, use an empty port if the default is # desired if port and scheme in list(default_port.keys()): if port.isdigit(): port = str(int(port)) if int(port) == default_port[scheme]: port = '' # Put it all back together again auth = (userinfo or "") + host if port: auth += ":" + port if url.endswith("#") and query == "" and fragment == "": path += "#" return urlunsplit((scheme, auth, path, query, fragment))
def git_ssh_url(self): """The git+ssh:// URL for this branch, adjusted for this user.""" base_url = urlsplit(self.context.repository.git_ssh_url) url = list(base_url) url[1] = "{}@{}".format(self.user.name, base_url.hostname) return urlunsplit(url)
def _relative(split_result, path): new_split = split_result._replace(path=urljoin(split_result.path, path)) return urlunsplit(new_split)
def method(self, *path_args): ws = self.request.headers.get('Upgrade', '') == 'websocket' if ws: return WebSocketHandler.get(self) # Construct HTTP headers headers = HTTPHeaders(self.request.headers if self.request_headers. get('*', None) else {}) for key, val in self.request_headers.items(): if key == '*': continue if val is True: if key in self.request.headers: headers[key] = self.request.headers[key] else: headers[key] = six.text_type(val).format(handler=self) # Update query parameters # TODO: use a named capture for path_args? This is not the right method parts = urlsplit(self.url.format(*path_args)) params = { key: ([six.text_type(v).format(handler=self) for v in val] if isinstance(val, list) else six.text_type(val).format(handler=self)) for key, val in self.default.items() } params.update(parse_qs(parts.query)) params.update(self.args) query = urlencode(params, doseq=True) url = urlunsplit( (parts.scheme, parts.netloc, parts.path, query, parts.fragment)) request = HTTPRequest( url=url, method=self.request.method, headers=headers, body=None if self.request.method == 'GET' else self.request.body, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, ) if 'prepare' in self.info: self.info['prepare'](handler=self, request=request, response=None) app_log.debug('%s: proxying %s', self.name, url) response = yield self.browser.fetch(request, raise_error=False) if response.code in (MOVED_PERMANENTLY, FOUND): location = response.headers.get('Location', '') # TODO; check if Location: header MATCHES the url, not startswith # url: example.org/?x should match Location: example.org/?a=1&x # even though location does not start with url. if location.startswith(url): response.headers['Location'] = location.replace( 'url', self.conf.pattern) if 'modify' in self.info: self.info['modify'](handler=self, request=request, response=response) # Pass on the headers as-is, but override with the handler HTTP headers self.set_header('X-Proxy-Url', response.effective_url) for header_name, header_value in response.headers.items(): if header_name not in { 'Connection', 'Transfer-Encoding', 'Content-Length' }: self.set_header(header_name, header_value) # Proxies may send the wrong Content-Length. Correct it, else Tornado raises an error if response.body is not None: self.set_header('Content-Length', len(response.body)) for header_name, header_value in self.headers.items(): self.set_header(header_name, header_value) # Pass on HTTP status code and response body as-is self.set_status(response.code, reason=response.reason) if response.body is not None: self.write(response.body)
def update_formula(formula_id, git_password, version, repodir=None, raise_exception=True): repo = None current_commit = None formula = None origin = None try: formula = Formula.objects.get(pk=formula_id) formula.set_status(Formula.IMPORTING, 'Updating formula.') if repodir is None: repodir = formula.get_repo_dir() repo = formula.repo else: repo = git.Repo(repodir) # Ensure that the proper version is active repo.git.checkout(version) current_commit = repo.head.commit origin = repo.remotes.origin.name # Add the username / password for a private repo if formula.private_git_repo: parsed = urlsplit(formula.uri) uri = urlunsplit(( parsed.scheme, '{0}:{1}@{2}'.format(formula.git_username, git_password, parsed.netloc), parsed.path, parsed.query, parsed.fragment )) # add the password to the config repo.git.remote('set-url', origin, uri) result = repo.remotes.origin.pull() if len(result) == 1 and result[0].commit == current_commit: formula.set_status(Formula.COMPLETE, 'There were no changes to the repository.') return True formula_title, formula_description, root_path, components = validate_specfile(formula, repodir) # Validate all the new components for component in components: validate_component(formula, repodir, component) # Everything was validated, update the database formula.title = formula_title formula.description = formula_description formula.root_path = root_path formula.save() formula.set_status(Formula.COMPLETE, 'Import complete. Formula is now ready to be used.') return True except Exception as e: # Roll back the pull if repo is not None and current_commit is not None: repo.git.reset('--hard', current_commit) if isinstance(e, FormulaTaskException): if raise_exception: raise FormulaTaskException( formula, e.message + ' Your formula was not changed.' ) logger.warning(e) if raise_exception: raise FormulaTaskException( formula, 'An unhandled exception occurred. Your formula was not changed' ) finally: if formula and origin and formula.private_git_repo: # remove the password from the config repo.git.remote('set-url', origin, formula.uri) # Remove the logs which also store the password log_dir = os.path.join(repodir, '.git', 'logs') if os.path.isdir(log_dir): shutil.rmtree(log_dir)
def on_message(self, message): try: contents = json.loads(message) except ValueError: self.log.debug('Bad JSON: %r', message) self.log.error("Couldn't parse JSON", exc_info=True) self._write_error(status=400, msg='JSON input is required.') raise gen.Return() if not set(contents.keys()).issuperset(self._REQUIRED_KEYS): msg = ( 'Invalid request. The body must contain the following keys: ' '{required}. Got: {got}').format(required=self._REQUIRED_KEYS, got=contents.keys()) self._write_error(status=400, msg=msg, message_id=contents.get('message_id')) raise gen.Return() message_id = contents['message_id'] try: yield self._attach_auth_cookies() except Exception: # pylint:disable=broad-except self.log.error("Couldn't attach auth cookies") self._on_unhandled_exception(message_id) raise gen.Return() method = str(contents['method']).upper() query = '' if method in self._REQUIRE_XSRF_FORWARDING_METHODS: xsrf = self._get_xsrf_cookie() if xsrf: query += '_xsrf={}'.format(xsrf) path = urlparse.urlunsplit( # pylint:disable=too-many-function-args urlparse.SplitResult(scheme=self.request.protocol, netloc=self.request.host, path=contents['path'], query=query, fragment='')) body = None if contents.get('body_base64'): body = base64.b64decode( contents.get('body_base64')).decode('utf-8') else: body = contents.get('body') emitter = _StreamingResponseEmitter(message_id, self.write_message) proxy_request = httpclient.HTTPRequest( url=path, method=method, headers=self.request.headers, body=body, ca_certs=self.ca_certs, header_callback=emitter.header_callback, streaming_callback=emitter.streaming_callback, allow_nonstandard_methods=True) _modify_proxy_request_test_only(proxy_request) http_client = self._get_http_client() # Since this channel represents a proxy, don't raise errors directly and # instead send them back in the response. # The response contents will normally be captured by # _StreamingResponseEmitter. However, if a programming error occurs with how # the proxy is set up, these callbacks will not be used. response = yield http_client.fetch(proxy_request, raise_error=False) if response.error and not isinstance(response.error, httpclient.HTTPError): try: response.rethrow() except Exception: # pylint:disable=broad-except # Rethrow the exception to capture the stack trace and write # an error message. self.log.exception('Uncaught error when proxying request') self._on_unhandled_exception(message_id) raise gen.Return() emitter.done()