コード例 #1
0
ファイル: aws.py プロジェクト: castedo/requestbuilder
def _remove_params_from_url(url):
    """
    Return a copy of a URL with its parameters, fragments, and query
    string removed.
    """
    parsed = urlparse.urlparse(url)
    return urlparse.urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))
コード例 #2
0
    def get_kodi_header_formatted_url(self, url, options=None):

        if options is None:
            options = {}

        if url.startswith('http'):
            url_parts = urlparse(url)
            url = url_parts.path

            if url_parts.query:
                url = '?'.join([url, url_parts.query])

        access_path_dbl = '/%s/%s/' % \
                          (self.access_path.replace('/', ''), self.access_path.replace('/', ''))
        location = '/'.join([self.get_url_location().rstrip('/'), url.lstrip('/')])
        location = location.replace(access_path_dbl, self.access_path)
        url_parts = urlparse(location)

        query_args = parse_qsl(url_parts.query)
        query_args += options.items()

        if self.token is not None:
            query_args += {
                'X-Plex-Token': self.token
            }.items()

        new_query_args = urlencode(query_args, True)

        return '%s|%s' % (urlunparse((url_parts.scheme, url_parts.netloc,
                                      url_parts.path.replace('//', '/'), url_parts.params,
                                      new_query_args, url_parts.fragment)),
                          self.plex_identification_string)
コード例 #3
0
ファイル: utils.py プロジェクト: Nakaner/postorius-debian
def reorder_request_params(request):
    def reorder_params(params):
        parsed = None
        if PY3:
            if isinstance(params, binary_type):
                params = params.decode("ascii")
            parsed = parse_qsl(params, encoding="utf-8")
        else:
            parsed = parse_qsl(params)
        if parsed:
            return urlencode(sorted(parsed, key=lambda kv: kv[0]))
        else:
            # Parsing failed, it may be a simple string.
            return params
        # sort the URL query-string by key names.
    uri_parts = urlparse(request.uri)
    if uri_parts.query:
        request.uri = urlunparse((
            uri_parts.scheme, uri_parts.netloc, uri_parts.path,
            uri_parts.params, reorder_params(uri_parts.query),
            uri_parts.fragment,
        ))
        # convert the request body to text and sort the parameters.
    if isinstance(request.body, binary_type):
        try:
            request._body = request._body.decode('utf-8')
        except UnicodeDecodeError:
            pass
    if isinstance(request.body, text_type):
        request._body = reorder_params(request._body.encode('utf-8'))
    return request
コード例 #4
0
    def get_sample_data(self, meter_name, parse_url, params, cache):

        extractor = self._get_extractor(meter_name)
        if extractor is None:
            # The way to getting meter is not implemented in this driver or
            # OpenDaylight REST API has not api to getting meter.
            return None

        iter = self._get_iter(meter_name)
        if iter is None:
            # The way to getting meter is not implemented in this driver or
            # OpenDaylight REST API has not api to getting meter.
            return None

        parts = urlparse.ParseResult(
            params.get('scheme', ['http'])[0], parse_url.netloc,
            parse_url.path, None, None, None)
        endpoint = urlparse.urlunparse(parts)

        data = self._prepare_cache(endpoint, params, cache)

        samples = []
        if data:
            for sample in iter(extractor, data):
                if sample is not None:
                    # set controller name to resource_metadata
                    sample[2]['controller'] = 'OpenDaylight_V2'
                    samples.append(sample)

        return samples
コード例 #5
0
    def get_formatted_url(self, url, options=None):

        if options is None:
            options = {}

        url_options = self.plex_identification_header
        url_options.update(options)

        if url.startswith('http'):
            url_parts = urlparse(url)
            url = url_parts.path

            if url_parts.query:
                url = '?'.join([url, url_parts.query])

        access_path_dbl = '/%s/%s/' % \
                          (self.access_path.replace('/', ''), self.access_path.replace('/', ''))
        location = '/'.join([self.get_url_location().rstrip('/'), url.lstrip('/')])
        location = location.replace(access_path_dbl, self.access_path)
        url_parts = urlparse(location)

        query_args = parse_qsl(url_parts.query)
        query_args += url_options.items()

        new_query_args = urlencode(query_args, True)

        return urlunparse((url_parts.scheme, url_parts.netloc, url_parts.path.replace('//', '/'),
                           url_parts.params, new_query_args, url_parts.fragment))
コード例 #6
0
ファイル: x509.py プロジェクト: sotte/cryptography
    def __init__(self, value):
        if not isinstance(value, six.text_type):
            raise TypeError("value must be a unicode string")

        parsed = urllib_parse.urlparse(value)
        if not parsed.hostname:
            netloc = ""
        elif parsed.port:
            netloc = (
                idna.encode(parsed.hostname) +
                ":{0}".format(parsed.port).encode("ascii")
            ).decode("ascii")
        else:
            netloc = idna.encode(parsed.hostname).decode("ascii")

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        )).encode("ascii")

        self._value = value
        self._encoded = uri
コード例 #7
0
ファイル: driver.py プロジェクト: openstack/networking-odl
    def get_sample_data(self, meter_name, parse_url, params, cache):

        extractor = self._get_extractor(meter_name)
        if extractor is None:
            # The way to getting meter is not implemented in this driver or
            # OpenDaylight REST API has not api to getting meter.
            return None

        iter = self._get_iter(meter_name)
        if iter is None:
            # The way to getting meter is not implemented in this driver or
            # OpenDaylight REST API has not api to getting meter.
            return None

        parts = urlparse.ParseResult(params.get('scheme', ['http'])[0],
                                     parse_url.netloc,
                                     parse_url.path,
                                     None,
                                     None,
                                     None)
        endpoint = urlparse.urlunparse(parts)

        data = self._prepare_cache(endpoint, params, cache)

        samples = []
        if data:
            for sample in iter(extractor, data):
                if sample is not None:
                    # set controller name to resource_metadata
                    sample[2]['controller'] = 'OpenDaylight_V2'
                    samples.append(sample)

        return samples
コード例 #8
0
ファイル: views.py プロジェクト: ysf002/seahub
def logout(request, next_page=None):
    """Redirects to CAS logout page"""
    # try to find the ticket matching current session for logout signal
    try:
        st = SessionTicket.objects.get(session_key=request.session.session_key)
        ticket = st.ticket
    except SessionTicket.DoesNotExist:
        ticket = None
    # send logout signal
    cas_user_logout.send(
        sender="manual",
        user=request.user,
        session=request.session,
        ticket=ticket,
    )
    auth_logout(request)
    # clean current session ProxyGrantingTicket and SessionTicket
    ProxyGrantingTicket.objects.filter(
        session_key=request.session.session_key).delete()
    SessionTicket.objects.filter(
        session_key=request.session.session_key).delete()
    next_page = next_page or get_redirect_url(request)
    if settings.CAS_LOGOUT_COMPLETELY:
        protocol = get_protocol(request)
        host = request.get_host()
        redirect_url = urllib_parse.urlunparse(
            (protocol, host, next_page, '', '', ''), )
        client = get_cas_client(request=request)
        return HttpResponseRedirect(client.get_logout_url(redirect_url))
    else:
        # This is in most cases pointless if not CAS_RENEW is set. The user will
        # simply be logged in again on next request requiring authorization.
        return HttpResponseRedirect(next_page)
コード例 #9
0
ファイル: general_name.py プロジェクト: seebees/cryptography
    def value(self):
        warnings.warn(
            "UniformResourceIdentifier.bytes_value should be used instead of "
            "UniformResourceIdentifier.value; it contains the name as raw "
            "bytes, instead of as an idna-decoded unicode string. "
            "UniformResourceIdentifier.value will be removed in a future "
            "version.",
            utils.DeprecatedIn21,
            stacklevel=2)
        parsed = urllib_parse.urlparse(self.bytes_value)
        if not parsed.hostname:
            # There's no idna here so we can immediately return
            return self.bytes_value.decode("utf-8")
        elif parsed.port:
            netloc = idna.decode(parsed.hostname) + ":{0}".format(parsed.port)
        else:
            netloc = idna.decode(parsed.hostname)

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        return urllib_parse.urlunparse(
            (parsed.scheme.decode('utf8'), netloc, parsed.path.decode('utf8'),
             parsed.params.decode('utf8'), parsed.query.decode('utf8'),
             parsed.fragment.decode('utf8')))
コード例 #10
0
 def process_request(self, request):
     parts = urllib_parse.urlparse(request.build_absolute_uri())
     parts = parts._replace(path='', params='', query='', fragment='')
     request.uri = DotDict({
         'origin': urllib_parse.urlunparse(parts),  # or parts.geturl()
         'scheme': 'https' if request.is_secure() else 'http',
     })
     return None
コード例 #11
0
ファイル: parser.py プロジェクト: shen-zc/pyraml-parser
def _build_network_relative_path(url):
    p = urlparse.urlparse(url)
    parse_result = urlparse.ParseResult(
        p.scheme,
        p.netloc,
        os.path.dirname(p.path) + '/',
        '', '', '')
    return urlparse.urlunparse(parse_result)
コード例 #12
0
ファイル: parser.py プロジェクト: nfredrik/pyraml-parser
def _build_network_relative_path(url):
    p = urlparse.urlparse(url)
    parse_result = urlparse.ParseResult(
        p.scheme,
        p.netloc,
        os.path.dirname(p.path),
        '', '', '')
    return urlparse.urlunparse(parse_result)
コード例 #13
0
def iap_request(url, data=None, headers=None):
    if "localhost.newsdev.net" in url:
        resp = requests.post(url, headers=headers, data=data)

    else:
        base_url = urlparse.urlunparse(
            urlparse.urlparse(url)._replace(path='', query='', fragment=''))

        credentials = service_account.Credentials.from_service_account_file(
            GCP_SECRETS)
        bootstrap_credentials = credentials.with_scopes([IAM_SCOPE])

        if isinstance(bootstrap_credentials,
                      google.oauth2.credentials.Credentials):
            raise Exception(
                'make_iap_request is only supported for service accounts.')

        bootstrap_credentials.refresh(Request())

        signer_email = bootstrap_credentials.service_account_email

        if isinstance(bootstrap_credentials,
                      google.auth.compute_engine.credentials.Credentials):
            signer = google.auth.iam.Signer(Request(), bootstrap_credentials,
                                            signer_email)
        else:
            signer = bootstrap_credentials.signer

        service_account_credentials = google.oauth2.service_account.Credentials(
            signer,
            signer_email,
            token_uri=OAUTH_TOKEN_URI,
            additional_claims={'target_audience': base_url})

        google_open_id_connect_token = get_google_open_id_connect_token(
            service_account_credentials)

        # Append our header to a list of possible headers.
        if not headers:
            headers = {
                'Authorization':
                'Bearer {}'.format(google_open_id_connect_token)
            }
        else:
            headers['Authorization'] = 'Bearer {}'.format(
                google_open_id_connect_token)

        resp = requests.post(url, headers=headers, data=data)

        if resp.status_code == 403:
            raise Exception(
                'Service account {} does not have permission to '
                'access the IAP-protected application.'.format(signer_email))

    if resp.status_code != 200:
        return resp.text

    return resp.text
コード例 #14
0
 def _auth1(self):
     status = 0
     reason = 'Unknown'
     attempt = 0
     while attempt < self.attempts:
         attempt += 1
         self.verbose('Attempting auth v1 with %s', self.auth_url)
         parsed, conn = self._connect(self.auth_url)
         self.verbose('> GET %s', parsed.path)
         conn.request(
             'GET', parsed.path, '', {
                 'User-Agent': self.user_agent,
                 'X-Auth-User': quote(self.auth_user),
                 'X-Auth-Key': quote(self.auth_key)
             })
         try:
             resp = conn.getresponse()
             status = resp.status
             reason = resp.reason
             self.verbose('< %s %s', status, reason)
             hdrs = headers_to_dict(resp.getheaders())
             resp.read()
             resp.close()
             conn.close()
         except Exception as err:
             status = 0
             reason = str(err)
             hdrs = {}
         if status == 401:
             break
         if status // 100 == 2:
             try:
                 self.storage_url = hdrs['x-storage-url']
             except KeyError:
                 status = 0
                 reason = 'No x-storage-url header'
                 break
             if self.snet:
                 parsed = list(urlparse.urlparse(self.storage_url))
                 # Second item in the list is the netloc
                 parsed[1] = 'snet-' + parsed[1]
                 self.storage_url = urlparse.urlunparse(parsed)
             self.cdn_url = hdrs.get('x-cdn-management-url')
             self.auth_token = hdrs.get('x-auth-token')
             if not self.auth_token:
                 self.auth_token = hdrs.get('x-storage-token')
                 if not self.auth_token:
                     status = 500
                     reason = (
                         'No x-auth-token or x-storage-token header in '
                         'response')
                     break
             self._auth_save_cache()
             break
         elif status // 100 != 5:
             break
         self.client.sleep(2**attempt)
     return status, reason
コード例 #15
0
ファイル: auth.py プロジェクト: dstufft/fastly-py
    def __init__(self, user, password, session):
        self.user = user
        self.password = password
        self.session = session

        self.cookies = {}
        self.login_url = urllib_parse.urlunparse(
            ["https", API_DOMAIN, "/login", "", "", ""],
        )
コード例 #16
0
ファイル: utils.py プロジェクト: Finc3/social-core
def url_add_parameters(url, params):
    """Adds parameters to URL, parameter will be repeated if already present"""
    if params:
        fragments = list(urlparse(url))
        value = parse_qs(fragments[4])
        value.update(params)
        fragments[4] = urlencode(value)
        url = urlunparse(fragments)
    return url
コード例 #17
0
ファイル: utils.py プロジェクト: tzwenn/social-core
def url_add_parameters(url, params):
    """Adds parameters to URL, parameter will be repeated if already present"""
    if params:
        fragments = list(urlparse(url))
        value = parse_qs(fragments[4])
        value.update(params)
        fragments[4] = urlencode(value)
        url = urlunparse(fragments)
    return url
コード例 #18
0
 def _next_server(self, cause=None):
     while True:
         url = super(Client, self)._next_server(cause)
         r = urlparse(url)
         ips = self._dns_resolver.resolve(r.hostname)
         if ips:
             netloc = '{0}:{1}'.format(random.choice(ips), r.port)
             self._base_uri_unresolved = url
             return urlunparse((r.scheme, netloc, r.path, r.params, r.query, r.fragment))
コード例 #19
0
 def _auth1(self):
     status = 0
     reason = 'Unknown'
     attempt = 0
     while attempt < self.attempts:
         attempt += 1
         self.verbose('Attempting auth v1 with %s', self.auth_url)
         parsed, conn = self._connect(self.auth_url)
         self.verbose('> GET %s', parsed.path)
         conn.request(
             'GET', parsed.path, '',
             {'User-Agent': self.user_agent,
              'X-Auth-User': quote(self.auth_user),
              'X-Auth-Key': quote(self.auth_key)})
         try:
             resp = conn.getresponse()
             status = resp.status
             reason = resp.reason
             self.verbose('< %s %s', status, reason)
             hdrs = headers_to_dict(resp.getheaders())
             resp.read()
             resp.close()
             conn.close()
         except Exception as err:
             status = 0
             reason = str(err)
             hdrs = {}
         if status == 401:
             break
         if status // 100 == 2:
             try:
                 self.storage_url = hdrs['x-storage-url']
             except KeyError:
                 status = 0
                 reason = 'No x-storage-url header'
                 break
             if self.snet:
                 parsed = list(urlparse.urlparse(self.storage_url))
                 # Second item in the list is the netloc
                 parsed[1] = 'snet-' + parsed[1]
                 self.storage_url = urlparse.urlunparse(parsed)
             self.cdn_url = hdrs.get('x-cdn-management-url')
             self.auth_token = hdrs.get('x-auth-token')
             if not self.auth_token:
                 self.auth_token = hdrs.get('x-storage-token')
                 if not self.auth_token:
                     status = 500
                     reason = (
                         'No x-auth-token or x-storage-token header in '
                         'response')
                     break
             self._auth_save_cache()
             break
         elif status // 100 != 5:
             break
         self.client.sleep(2 ** attempt)
     return status, reason
コード例 #20
0
def mantis_login_hook(response, *args, **kwargs):
    """requests hook to automatically log into Mantis anonymously if needed.

    The ALSA bug tracker is the only tested Mantis installation that
    actually needs this. For ALSA bugs, the dance is like so:

      1. We request bug 3301 ('jack sensing problem'):
           https://bugtrack.alsa-project.org/alsa-bug/view.php?id=3301

      2. Mantis redirects us to:
           .../alsa-bug/login_page.php?
                 return=%2Falsa-bug%2Fview.php%3Fid%3D3301

      3. We notice this, rewrite the query, and skip to login.php:
           .../alsa-bug/login.php?
                 return=%2Falsa-bug%2Fview.php%3Fid%3D3301&
                 username=guest&password=guest

      4. Mantis accepts our credentials then redirects us to the bug
         view page via a cookie test page (login_cookie_test.php)
    """
    if response.status_code not in (301, 302, 303, 307):
        return response
    if 'Location' not in response.headers:
        return response

    url = response.headers['Location']
    scheme, host, path, params, query, fragment = urlparse(url)

    # If we can, skip the login page and submit credentials directly.  The
    # query should contain a 'return' parameter which, if our credentials
    # are accepted, means we'll be redirected back whence we came.  In other
    # words, we'll end up back at the bug page we first requested.
    login_page = '/login_page.php'
    if path.endswith(login_page):
        path = path[:-len(login_page)] + '/login.php'
        query_list = [('username', 'guest'), ('password', 'guest')]
        query_list.extend(parse_qsl(query, True))
        if not any(name == 'return' for name, _ in query_list):
            raise BugTrackerConnectError(
                url, ("Mantis redirected us to the login page "
                      "but did not set a return path."))

        query = urlencode(query_list, True)
        url = urlunparse((scheme, host, path, params, query, fragment))

    # Previous versions of the Mantis external bug tracker fetched
    # login_anon.php in addition to the login.php method above, but none of
    # the Mantis installations tested actually needed this.  For example,
    # the ALSA bugtracker actually issues an error "Your account may be
    # disabled" when accessing this page.  For now it's better to *not* try
    # this page because we may end up annoying admins with spurious login
    # attempts.

    response.headers['Location'] = url
    return response
コード例 #21
0
def _build_general_name(backend, gn):
    if gn.type == backend._lib.GEN_DNS:
        data = backend._ffi.buffer(gn.d.dNSName.data, gn.d.dNSName.length)[:]
        return x509.DNSName(idna.decode(data))
    elif gn.type == backend._lib.GEN_URI:
        data = backend._ffi.buffer(
            gn.d.uniformResourceIdentifier.data,
            gn.d.uniformResourceIdentifier.length)[:].decode("ascii")
        parsed = urllib_parse.urlparse(data)
        hostname = idna.decode(parsed.hostname)
        if parsed.port:
            netloc = hostname + u":" + six.text_type(parsed.port)
        else:
            netloc = hostname

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse(
            (parsed.scheme, netloc, parsed.path, parsed.params, parsed.query,
             parsed.fragment))
        return x509.UniformResourceIdentifier(uri)
    elif gn.type == backend._lib.GEN_RID:
        oid = _obj2txt(backend, gn.d.registeredID)
        return x509.RegisteredID(x509.ObjectIdentifier(oid))
    elif gn.type == backend._lib.GEN_IPADD:
        return x509.IPAddress(
            ipaddress.ip_address(
                backend._ffi.buffer(gn.d.iPAddress.data,
                                    gn.d.iPAddress.length)[:]))
    elif gn.type == backend._lib.GEN_DIRNAME:
        return x509.DirectoryName(_build_x509_name(backend,
                                                   gn.d.directoryName))
    elif gn.type == backend._lib.GEN_EMAIL:
        data = backend._ffi.buffer(gn.d.rfc822Name.data,
                                   gn.d.rfc822Name.length)[:].decode("ascii")
        name, address = parseaddr(data)
        parts = address.split(u"@")
        if name or len(parts) > 2 or not address:
            # parseaddr has found a name (e.g. Name <email>) or the split
            # has found more than 2 parts (which means more than one @ sign)
            # or the entire value is an empty string.
            raise ValueError("Invalid rfc822name value")
        elif len(parts) == 1:
            # Single label email name. This is valid for local delivery. No
            # IDNA decoding can be done since there is no domain component.
            return x509.RFC822Name(address)
        else:
            # A normal email of the form [email protected]. Let's attempt to
            # decode the domain component and return the entire address.
            return x509.RFC822Name(parts[0] + u"@" + idna.decode(parts[1]))
    else:
        # otherName, x400Address or ediPartyName
        raise x509.UnsupportedGeneralNameType(
            "{0} is not a supported type".format(
                x509._GENERAL_NAMES.get(gn.type, gn.type)), gn.type)
コード例 #22
0
def _rebase_url(url, base):
    base = list(urlparse.urlparse(base))
    url = list(urlparse.urlparse(url))
    if not url[0]:  # fix up schema
        url[0] = base[0] or "http"
    if not url[1]:  # fix up hostname
        url[1] = base[1]
        if not url[2].startswith('/'):
            url[2] = re.sub(r'/[^/]+$', '/', base[2]) + url[2]
    return urlparse.urlunparse(url)
コード例 #23
0
    def _traverse_request_path(self, level, path_only):
        parsed = list(urlparse(self.path_url))
        if level:
            parts = [x for x in parsed[2].split('/') if x]
            if abs(level) > len(parts):
                raise IndexError(abs(level))
            parts = parts[:level]
            parsed[2] = '/' + '/'.join(parts) if parts else ''

        return parsed[2] if path_only else urlunparse(parsed)
コード例 #24
0
def _rebase_url(url, base):
    base = list(urlparse.urlparse(base))
    url = list(urlparse.urlparse(url))
    if not url[0]:  # fix up schema
        url[0] = base[0] or "http"
    if not url[1]:  # fix up hostname
        url[1] = base[1]
        if not url[2].startswith("/"):
            url[2] = re.sub(r"/[^/]+$", "/", base[2]) + url[2]
    return urlparse.urlunparse(url)
コード例 #25
0
ファイル: client.py プロジェクト: afoucret/site-search-python
 def sso_url(self, user_id):
     timestamp = self._get_timestamp()
     params = {
         'user_id': user_id,
         'client_id': self.client_id,
         'timestamp': timestamp,
         'token': self._sso_token(user_id, timestamp)
     }
     return urlunparse(
         ('https', 'swiftype.com', '/sso', '', urlencode(params), ''))
コード例 #26
0
 def __call__(self,
              member,
              method='GET',
              endpoint=None,
              data=None,
              **kwargs):
     url = member.api_url
     if endpoint:
         scheme, netloc, _, _, _, _ = urlparse(url)
         url = urlunparse((scheme, netloc, endpoint, '', '', ''))
     return self.request(method, url, data, **kwargs)
コード例 #27
0
 def get(self, ticket_id):
     if not ticket_id:
         raise HTTPBadRequest("Ticket id is required")
     try:
         ticket = tickets.get(ticket_id)
     except KeyError:
         raise HTTPNotFound("No such ticket %r" % ticket_id)
     ticket = ticket.copy()
     ticket["url"] = urllib_parse.urlunparse(ticket["url"])
     self.log.info("Retrieving ticket %s", ticket_id)
     return response(payload=ticket)
コード例 #28
0
 def __call__(self,
              member,
              method='GET',
              endpoint=None,
              data=None,
              **kwargs):
     url = member.api_url
     if endpoint:
         scheme, netloc, _, _, _, _ = urlparse(url)
         url = urlunparse((scheme, netloc, endpoint, '', '', ''))
     if data is not None:
         kwargs['body'] = json.dumps(data)
     return self._pool.request(method.upper(), url, **kwargs)
コード例 #29
0
ファイル: util.py プロジェクト: BYERRORz/electrum
def create_URI(addr, amount, message):
    from . import bitcoin
    if not bitcoin.is_address(addr):
        return ""
    query = []
    if amount:
        query.append('amount=%s'%format_satoshis_plain(amount))
    if message:
        if six.PY2 and type(message) == unicode:
            message = message.encode('utf8')
        query.append('message=%s'%urllib.quote(message))
    p = urllib_parse.ParseResult(scheme='bitcoin', netloc='', path=addr, params='', query='&'.join(query), fragment='')
    return urllib_parse.urlunparse(p)
コード例 #30
0
    def get_mail_info(self):
        ucr.load()
        ADDRESS_VALUE = ucr.get('umc/sysinfo/mail/address',
                                '*****@*****.**')
        SUBJECT_VALUE = ucr.get('umc/sysinfo/mail/subject',
                                'Univention System Info')

        url = urlunparse(('mailto', '', ADDRESS_VALUE, '',
                          urlencode({
                              'subject': SUBJECT_VALUE,
                          }), ''))
        result = {}
        result['url'] = url.replace('+', '%20')
        return result
コード例 #31
0
    def _idna_encode(self, value):
        parsed = urllib_parse.urlparse(value)
        if parsed.port:
            netloc = (
                idna.encode(parsed.hostname) +
                ":{0}".format(parsed.port).encode("ascii")).decode("ascii")
        else:
            netloc = idna.encode(parsed.hostname).decode("ascii")

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        return urllib_parse.urlunparse(
            (parsed.scheme, netloc, parsed.path, parsed.params, parsed.query,
             parsed.fragment))
コード例 #32
0
ファイル: dcs.py プロジェクト: sean-/patroni
def parse_connection_string(value):
    """Original Governor stores connection strings for each cluster members if a following format:
        postgres://{username}:{password}@{connect_address}/postgres
    Since each of our patroni instances provides own REST API endpoint it's good to store this information
    in DCS among with postgresql connection string. In order to not introduce new keys and be compatible with
    original Governor we decided to extend original connection string in a following way:
        postgres://{username}:{password}@{connect_address}/postgres?application_name={api_url}
    This way original Governor could use such connection string as it is, because of feature of `libpq` library.

    This method is able to split connection string stored in DCS into two parts, `conn_url` and `api_url`"""

    scheme, netloc, path, params, query, fragment = urlparse(value)
    conn_url = urlunparse((scheme, netloc, path, params, '', fragment))
    api_url = ([v for n, v in parse_qsl(query) if n == 'application_name'] or [None])[0]
    return conn_url, api_url
コード例 #33
0
ファイル: __init__.py プロジェクト: snooc/patroni
def parse_connection_string(value):
    """Original Governor stores connection strings for each cluster members if a following format:
        postgres://{username}:{password}@{connect_address}/postgres
    Since each of our patroni instances provides own REST API endpoint it's good to store this information
    in DCS among with postgresql connection string. In order to not introduce new keys and be compatible with
    original Governor we decided to extend original connection string in a following way:
        postgres://{username}:{password}@{connect_address}/postgres?application_name={api_url}
    This way original Governor could use such connection string as it is, because of feature of `libpq` library.

    This method is able to split connection string stored in DCS into two parts, `conn_url` and `api_url`"""

    scheme, netloc, path, params, query, fragment = urlparse(value)
    conn_url = urlunparse((scheme, netloc, path, params, '', fragment))
    api_url = ([v for n, v in parse_qsl(query) if n == 'application_name'] or [None])[0]
    return conn_url, api_url
コード例 #34
0
ファイル: tickets.py プロジェクト: MuralidharB/ovirt-imageio
 def info(self):
     info = {
         "active": self.active(),
         "expires": self._expires,
         "ops": list(self._ops),
         "size": self._size,
         "timeout": self.expires - int(util.monotonic_time()),
         "url": urllib_parse.urlunparse(self._url),
         "uuid": self._uuid,
     }
     if self.filename:
         info["filename"] = self.filename
     transferred = self.transferred()
     if transferred is not None:
         info["transferred"] = transferred
     return info
コード例 #35
0
def _build_general_name(backend, gn):
    if gn.type == backend._lib.GEN_DNS:
        data = backend._ffi.buffer(gn.d.dNSName.data, gn.d.dNSName.length)[:]
        return x509.DNSName(idna.decode(data))
    elif gn.type == backend._lib.GEN_URI:
        data = backend._ffi.buffer(
            gn.d.uniformResourceIdentifier.data,
            gn.d.uniformResourceIdentifier.length
        )[:].decode("ascii")
        parsed = urllib_parse.urlparse(data)
        hostname = idna.decode(parsed.hostname)
        if parsed.port:
            netloc = hostname + u":" + six.text_type(parsed.port)
        else:
            netloc = hostname

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        ))
        return x509.UniformResourceIdentifier(uri)
    elif gn.type == backend._lib.GEN_RID:
        oid = _obj2txt(backend, gn.d.registeredID)
        return x509.RegisteredID(x509.ObjectIdentifier(oid))
    elif gn.type == backend._lib.GEN_IPADD:
        return x509.IPAddress(
            ipaddress.ip_address(
                backend._ffi.buffer(
                    gn.d.iPAddress.data, gn.d.iPAddress.length
                )[:]
            )
        )
    else:
        # otherName, x400Address or ediPartyName
        raise x509.UnsupportedGeneralNameType(
            "{0} is not a supported type".format(
                x509._GENERAL_NAMES.get(gn.type, gn.type)
            ),
            gn.type
        )
コード例 #36
0
ファイル: utils.py プロジェクト: ysf002/seahub
def get_redirect_url(request):
    """Redirects to referring page, or CAS_REDIRECT_URL if no referrer is
    set.
    """

    next_ = request.GET.get(REDIRECT_FIELD_NAME)
    if not next_:
        redirect_url = resolve_url(django_settings.CAS_REDIRECT_URL)
        if django_settings.CAS_IGNORE_REFERER:
            next_ = redirect_url
        else:
            next_ = request.META.get('HTTP_REFERER', redirect_url)
        prefix = urllib_parse.urlunparse(
            (get_protocol(request), request.get_host(), '', '', '', ''), )
        if next_.startswith(prefix):
            next_ = next_[len(prefix):]
    return next_
コード例 #37
0
    def resolve_url(self, service_type, url):
        """
        :returns: Resolved URL of a service using a service ID
        """
        # FIXME(mb): some tests don't put scheme, should fix tests
        if not url.startswith('http://'):
            url = "http://" + url

        parsed = urlparse(url)
        if parsed.port is not None:
            return url

        service_addr = self.resolve_service_id(service_type,
                                               parsed.hostname,
                                               check_format=False)
        return urlunparse((parsed.scheme, service_addr, parsed.path,
                           parsed.params, parsed.query, parsed.fragment))
コード例 #38
0
ファイル: utils.py プロジェクト: ysf002/seahub
def get_service_url(request, redirect_to=None):
    """Generates application django service URL for CAS"""
    if hasattr(django_settings, 'CAS_ROOT_PROXIED_AS'):
        service = django_settings.CAS_ROOT_PROXIED_AS + '/' + request.path
    else:
        protocol = get_protocol(request)
        host = request.get_host()
        service = urllib_parse.urlunparse(
            (protocol, host, request.path, '', '', ''), )
    if not django_settings.CAS_STORE_NEXT:
        if '?' in service:
            service += '&'
        else:
            service += '?'
        service += urllib_parse.urlencode(
            {REDIRECT_FIELD_NAME: redirect_to or get_redirect_url(request)})
    return service
コード例 #39
0
ファイル: auth.py プロジェクト: danielerez/ovirt-imageio
 def __repr__(self):
     return ("<Ticket "
             "active={active!r} "
             "expires={self.expires!r} "
             "filename={self.filename!r} "
             "idle_time={self.idle_time} "
             "ops={self.ops!r} "
             "size={self.size!r} "
             "sparse={self.sparse!r} "
             "transfer_id={self.transfer_id!r} "
             "transferred={transferred!r} "
             "url={url!r} "
             "uuid={self.uuid!r} "
             "at {addr:#x}>").format(active=self.active(),
                                     addr=id(self),
                                     self=self,
                                     transferred=self.transferred(),
                                     url=urllib_parse.urlunparse(self.url))
コード例 #40
0
ファイル: urlhelpers.py プロジェクト: HoverHell/pyaux
def url_replace(url, **params):
    """ Replace some named parts in an url; See `urlparse.ParseResult` for the names """
    url_fields = urlparse.ParseResult._fields
    name_to_num = {field: idx for idx, field in enumerate(url_fields)}
    url_parts = list(urlparse.urlparse(url))  # Need a copy anyway
    for key, val in params.items():

        # Allow supplying various stuff as a query
        if key == 'query' and not isinstance(val, (bytes, unicode)):
            if isinstance(val, dict):
                val = val.items()
            val = [(to_bytes(query_key), to_bytes(query_val))
                   for query_key, query_val in val]
            val = urlencode(val)

        num = name_to_num[key]  # Will except here if supplied an unknown url param
        url_parts[num] = val
    return urlparse.urlunparse(url_parts)
コード例 #41
0
ファイル: auth.py プロジェクト: oVirt/ovirt-imageio
 def info(self):
     info = {
         "active": self.active(),
         "expires": self._expires,
         "idle_time": self.idle_time,
         "ops": list(self._ops),
         "size": self._size,
         "sparse": self._sparse,
         "timeout": self._timeout,
         "url": urllib_parse.urlunparse(self._url),
         "uuid": self._uuid,
     }
     if self._transfer_id:
         info["transfer_id"] = self._transfer_id
     if self.filename:
         info["filename"] = self.filename
     transferred = self.transferred()
     if transferred is not None:
         info["transferred"] = transferred
     return info
コード例 #42
0
ファイル: auth.py プロジェクト: oVirt/ovirt-imageio
 def __repr__(self):
     return ("<Ticket "
             "active={active!r} "
             "expires={self.expires!r} "
             "filename={self.filename!r} "
             "idle_time={self.idle_time} "
             "ops={self.ops!r} "
             "size={self.size!r} "
             "sparse={self.sparse!r} "
             "transfer_id={self.transfer_id!r} "
             "transferred={transferred!r} "
             "url={url!r} "
             "uuid={self.uuid!r} "
             "at {addr:#x}>"
             ).format(
                 active=self.active(),
                 addr=id(self),
                 self=self,
                 transferred=self.transferred(),
                 url=urllib_parse.urlunparse(self.url)
             )
コード例 #43
0
ファイル: webtest.py プロジェクト: Southpaw-TACTIC/TACTIC
def strip_netloc(url):
    """
    Strip the scheme and host from the URL, returning the
    server-absolute portion.

    Useful for wrapping an absolute-URI for which only the
    path is expected (such as in calls to getPage).

    >>> strip_netloc('https://google.com/foo/bar?bing#baz')
    '/foo/bar?bing'

    >>> strip_netloc('//google.com/foo/bar?bing#baz')
    '/foo/bar?bing'

    >>> strip_netloc('/foo/bar?bing#baz')
    '/foo/bar?bing'
    """
    parsed = urllib_parse.urlparse(url)
    scheme, netloc, path, params, query, fragment = parsed
    stripped = '', '', path, params, query, ''
    return urllib_parse.urlunparse(stripped)
コード例 #44
0
ファイル: general_name.py プロジェクト: alex/cryptography
    def _idna_encode(self, value):
        parsed = urllib_parse.urlparse(value)
        if parsed.port:
            netloc = (
                idna.encode(parsed.hostname) +
                ":{0}".format(parsed.port).encode("ascii")
            ).decode("ascii")
        else:
            netloc = idna.encode(parsed.hostname).decode("ascii")

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        return urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        ))
コード例 #45
0
ファイル: security.py プロジェクト: SUNET/eduid-webapp
def delete_account(user):
    """
    Terminate account view.
    It receives a POST request, checks the csrf token,
    schedules the account termination action,
    and redirects to the IdP.
    """
    current_app.logger.debug('Initiating account termination for user {}'.format(user))

    ts_url = current_app.config.get('TOKEN_SERVICE_URL')
    terminate_url = urlappend(ts_url, 'terminate')
    next_url = url_for('security.account_terminated')

    params = {'next': next_url}

    url_parts = list(urlparse(terminate_url))
    query = parse_qs(url_parts[4])
    query.update(params)

    url_parts[4] = urlencode(query)
    location = urlunparse(url_parts)
    return RedirectSchema().dump({'location': location}).data
コード例 #46
0
ファイル: environ.py プロジェクト: ei-grad/django-environ
    def search_url_config(cls, url, engine=None):
        config = {}

        url = urlparse.urlparse(url) if not isinstance(url, cls.URL_CLASS) else url

        # Remove query strings.
        path = url.path[1:]
        path = path.split('?', 2)[0]

        if url.scheme in cls.SEARCH_SCHEMES:
            config["ENGINE"] = cls.SEARCH_SCHEMES[url.scheme]

        if path.endswith("/"):
            path = path[:-1]

        split = path.rsplit("/", 1)

        if len(split) > 1:
            path = split[:-1]
            index = split[-1]
        else:
            path = ""
            index = split[0]

        config.update({
            "URL": urlparse.urlunparse(("http",) + url[1:2] + (path,) + url[3:]),
            "INDEX_NAME": index,
        })

        if path:
            config.update({
                "PATH": path,
            })

        if engine:
            config['ENGINE'] = engine

        return config
コード例 #47
0
ファイル: swiftype.py プロジェクト: swiftype/swiftype-py
 def sso_url(self, user_id):
   timestamp = self._get_timestamp()
   params = {'user_id': user_id, 'client_id': self.client_id, 'timestamp': timestamp, 'token': self._sso_token(user_id, timestamp)}
   return urlunparse(('https', 'swiftype.com', '/sso', '', urlencode(params), ''))
コード例 #48
0
ファイル: url.py プロジェクト: justinvanwinkle/wextracto
 def update_fragment_dict(self, **kw):
     fragment_dict = dict(self.fragment_dict)
     fragment_dict.update(kw)
     fragment = encode_json(fragment_dict)
     return self.__class__(urlunparse(self.parsed._replace(fragment=fragment)))
コード例 #49
0
ファイル: x509.py プロジェクト: sigmavirus24/cryptography
def _decode_general_name(backend, gn):
    if gn.type == backend._lib.GEN_DNS:
        data = backend._ffi.buffer(gn.d.dNSName.data, gn.d.dNSName.length)[:]
        if data.startswith(b"*."):
            # This is a wildcard name. We need to remove the leading wildcard,
            # IDNA decode, then re-add the wildcard. Wildcard characters should
            # always be left-most (RFC 2595 section 2.4).
            data = u"*." + idna.decode(data[2:])
        else:
            # Not a wildcard, decode away. If the string has a * in it anywhere
            # invalid this will raise an InvalidCodePoint
            data = idna.decode(data)

        return x509.DNSName(data)
    elif gn.type == backend._lib.GEN_URI:
        data = backend._ffi.buffer(
            gn.d.uniformResourceIdentifier.data,
            gn.d.uniformResourceIdentifier.length
        )[:].decode("ascii")
        parsed = urllib_parse.urlparse(data)
        hostname = idna.decode(parsed.hostname)
        if parsed.port:
            netloc = hostname + u":" + six.text_type(parsed.port)
        else:
            netloc = hostname

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        ))
        return x509.UniformResourceIdentifier(uri)
    elif gn.type == backend._lib.GEN_RID:
        oid = _obj2txt(backend, gn.d.registeredID)
        return x509.RegisteredID(x509.ObjectIdentifier(oid))
    elif gn.type == backend._lib.GEN_IPADD:
        return x509.IPAddress(
            ipaddress.ip_address(
                backend._ffi.buffer(
                    gn.d.iPAddress.data, gn.d.iPAddress.length
                )[:]
            )
        )
    elif gn.type == backend._lib.GEN_DIRNAME:
        return x509.DirectoryName(
            _decode_x509_name(backend, gn.d.directoryName)
        )
    elif gn.type == backend._lib.GEN_EMAIL:
        data = backend._ffi.buffer(
            gn.d.rfc822Name.data, gn.d.rfc822Name.length
        )[:].decode("ascii")
        name, address = parseaddr(data)
        parts = address.split(u"@")
        if name or len(parts) > 2 or not address:
            # parseaddr has found a name (e.g. Name <email>) or the split
            # has found more than 2 parts (which means more than one @ sign)
            # or the entire value is an empty string.
            raise ValueError("Invalid rfc822name value")
        elif len(parts) == 1:
            # Single label email name. This is valid for local delivery. No
            # IDNA decoding can be done since there is no domain component.
            return x509.RFC822Name(address)
        else:
            # A normal email of the form [email protected]. Let's attempt to
            # decode the domain component and return the entire address.
            return x509.RFC822Name(
                parts[0] + u"@" + idna.decode(parts[1])
            )
    else:
        # otherName, x400Address or ediPartyName
        raise x509.UnsupportedGeneralNameType(
            "{0} is not a supported type".format(
                x509._GENERAL_NAMES.get(gn.type, gn.type)
            ),
            gn.type
        )
コード例 #50
0
def encode_url_path(url, safe=SAFE_CHARS):
    from six.moves import urllib_parse  # pylint: disable=import-error
    url_parts = urllib_parse.urlparse(url)
    quoted_path = encode_for_url(url_parts.path, safe)
    return urllib_parse.urlunparse(url_parts[:2] + (quoted_path,) + url_parts[3:])
コード例 #51
0
ファイル: x509.py プロジェクト: joernheissler/cryptography
def _decode_general_name(backend, gn):
    if gn.type == backend._lib.GEN_DNS:
        data = backend._asn1_string_to_bytes(gn.d.dNSName)
        if not data:
            decoded = u""
        elif data.startswith(b"*."):
            # This is a wildcard name. We need to remove the leading wildcard,
            # IDNA decode, then re-add the wildcard. Wildcard characters should
            # always be left-most (RFC 2595 section 2.4).
            decoded = u"*." + idna.decode(data[2:])
        else:
            # Not a wildcard, decode away. If the string has a * in it anywhere
            # invalid this will raise an InvalidCodePoint
            decoded = idna.decode(data)
            if data.startswith(b"."):
                # idna strips leading periods. Name constraints can have that
                # so we need to re-add it. Sigh.
                decoded = u"." + decoded

        return x509.DNSName(decoded)
    elif gn.type == backend._lib.GEN_URI:
        data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier)
        parsed = urllib_parse.urlparse(data)
        if parsed.hostname:
            hostname = idna.decode(parsed.hostname)
        else:
            hostname = ""
        if parsed.port:
            netloc = hostname + u":" + six.text_type(parsed.port)
        else:
            netloc = hostname

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        ))
        return x509.UniformResourceIdentifier(uri)
    elif gn.type == backend._lib.GEN_RID:
        oid = _obj2txt(backend, gn.d.registeredID)
        return x509.RegisteredID(x509.ObjectIdentifier(oid))
    elif gn.type == backend._lib.GEN_IPADD:
        data = backend._asn1_string_to_bytes(gn.d.iPAddress)
        data_len = len(data)
        if data_len == 8 or data_len == 32:
            # This is an IPv4 or IPv6 Network and not a single IP. This
            # type of data appears in Name Constraints. Unfortunately,
            # ipaddress doesn't support packed bytes + netmask. Additionally,
            # IPv6Network can only handle CIDR rather than the full 16 byte
            # netmask. To handle this we convert the netmask to integer, then
            # find the first 0 bit, which will be the prefix. If another 1
            # bit is present after that the netmask is invalid.
            base = ipaddress.ip_address(data[:data_len // 2])
            netmask = ipaddress.ip_address(data[data_len // 2:])
            bits = bin(int(netmask))[2:]
            prefix = bits.find('0')
            # If no 0 bits are found it is a /32 or /128
            if prefix == -1:
                prefix = len(bits)

            if "1" in bits[prefix:]:
                raise ValueError("Invalid netmask")

            ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
        else:
            ip = ipaddress.ip_address(data)

        return x509.IPAddress(ip)
    elif gn.type == backend._lib.GEN_DIRNAME:
        return x509.DirectoryName(
            _decode_x509_name(backend, gn.d.directoryName)
        )
    elif gn.type == backend._lib.GEN_EMAIL:
        data = backend._asn1_string_to_ascii(gn.d.rfc822Name)
        name, address = parseaddr(data)
        parts = address.split(u"@")
        if name or not address:
            # parseaddr has found a name (e.g. Name <email>) or the entire
            # value is an empty string.
            raise ValueError("Invalid rfc822name value")
        elif len(parts) == 1:
            # Single label email name. This is valid for local delivery. No
            # IDNA decoding can be done since there is no domain component.
            return x509.RFC822Name(address)
        else:
            # A normal email of the form [email protected]. Let's attempt to
            # decode the domain component and return the entire address.
            return x509.RFC822Name(
                parts[0] + u"@" + idna.decode(parts[1])
            )
    elif gn.type == backend._lib.GEN_OTHERNAME:
        type_id = _obj2txt(backend, gn.d.otherName.type_id)
        value = backend._asn1_to_der(gn.d.otherName.value)
        return x509.OtherName(x509.ObjectIdentifier(type_id), value)
    else:
        # x400Address or ediPartyName
        raise x509.UnsupportedGeneralNameType(
            "{0} is not a supported type".format(
                x509._GENERAL_NAMES.get(gn.type, gn.type)
            ),
            gn.type
        )
コード例 #52
0
ファイル: madness_stuffstuff.py プロジェクト: HoverHell/pyaux
 def to_string(self):
     query_str = urlparse.urlencode(list(self.query.items()))
     return urlparse.urlunparse((
         self[key] if key != 'query' else query_str
         for key in self._base_components))
コード例 #53
0
def _database_url(request):
    from warehouse.application import Warehouse

    def _get_name():
        tag = "".join(
            random.choice(string.ascii_lowercase + string.digits)
            for x in range(7)
        )
        return "warehousetest_{}".format(tag)

    def _check_name(engine, name):
        with engine.connect() as conn:
            results = conn.execute(
                "SELECT datname FROM pg_database WHERE datistemplate = false"
            )
            return name not in [r[0] for r in results]

    database_url_default = 'postgresql://localhost/test_warehouse'
    database_url_environ = os.environ.get("WAREHOUSE_DATABASE_URL")
    database_url_option = request.config.getvalue("database_url")

    if (not database_url_default and not database_url_environ
            and not database_url_option):
        pytest.skip("No database provided")

    # Configure our engine so that we can create a database
    database_url = (
        database_url_option or database_url_environ or database_url_default
    )
    engine = sqlalchemy.create_engine(
        database_url,
        isolation_level="AUTOCOMMIT",
        poolclass=sqlalchemy.pool.NullPool
    )

    # Make a random database name that doesn't exist
    name = _get_name()
    while not _check_name(engine, name):
        name = _get_name()

    # Create the database
    with engine.connect() as conn:
        conn.execute("CREATE DATABASE {} ENCODING 'UTF8'".format(name))

    # Create a new database_url with the name replaced
    parsed = urllib_parse.urlparse(database_url)
    test_database_url = urllib_parse.urlunparse(
        parsed[:2] + ("/" + name,) + parsed[3:]
    )

    # Create the database schema
    test_engine = sqlalchemy.create_engine(
        test_database_url,
        poolclass=sqlalchemy.pool.NullPool,
    )
    app = Warehouse.from_yaml(
        override={
            "database": {"url": test_database_url},
            "search": {"hosts": []},
        },
        engine=test_engine,
        redis=False,
    )
    with app.engine.connect() as conn:
        conn.execute("CREATE EXTENSION IF NOT EXISTS citext")
    alembic_cfg = alembic.config.Config()
    alembic_cfg.set_main_option(
        "script_location",
        app.config.database.migrations,
    )
    alembic_cfg.set_main_option("url", app.config.database.url)
    alembic.command.upgrade(alembic_cfg, "head")
    test_engine.dispose()

    # Drop the database at the end of the session
    def _drop_database():
        with engine.connect() as conn:
            # Terminate all open connections to the test database
            conn.execute(
                """SELECT pg_terminate_backend(pid)
                   FROM pg_stat_activity
                   WHERE datname = %s
                """,
                [name],
            )
            conn.execute("DROP DATABASE {}".format(name))
    request.addfinalizer(_drop_database)

    return test_database_url
コード例 #54
0
ファイル: upload.py プロジェクト: ncoghlan/twine
    def __call__(self, dists, repository, sign, identity, username, password,
            comment):
        # Check that a nonsensical option wasn't given
        if not sign and identity:
            raise CommandError("--sign must be given along with --identity")

        # Get our config from ~/.pypirc
        config = get_distutils_config(repository)

        parsed = urllib_parse.urlparse(config["repository"])
        if parsed.netloc == "pypi.python.org" and parsed.scheme == "http":
            config["repository"] = urllib_parse.urlunparse(
                ("https",) + parsed[1:]
            )

        logger.info("Uploading distributions to %s", config["repository"])

        session = requests.session()

        for filename in dists:
            # Sign the dist if requested
            if sign:
                logger.info("Signing %s", os.path.basename(filename))
                gpg_args = ["gpg", "--detach-sign", "-a", filename]
                if identity:
                    gpg_args[2:2] = ["--local-user", identity]
                distutils.spawn.spawn(gpg_args)

            # Extract the metadata from the package
            for ext, dtype in self.DIST_EXTENSIONS.items():
                if filename.endswith(ext):
                    meta = self.DIST_TYPES[dtype](filename)
                    break
            else:
                raise ValueError(
                    "Unknown distribution format: '%s'" %
                    os.path.basename(filename)
                )

            if dtype == "bdist_egg":
                pkgd = pkg_resources.Distribution.from_filename(filename)
                py_version = pkgd.py_version
            elif dtype == "bdist_wheel":
                py_version = meta.py_version
            else:
                py_version = None

            # Fill in the data - send all the meta-data in case we need to
            # register a new release
            data = {
                # action
                ":action": "file_upload",
                "protcol_version": "1",

                # identify release
                "name": meta.name,
                "version": meta.version,

                # file content
                "filetype": dtype,
                "pyversion": py_version,

                # additional meta-data
                "metadata_version": meta.metadata_version,
                "summary": meta.summary,
                "home_page": meta.home_page,
                "author": meta.author,
                "author_email": meta.author_email,
                "maintainer": meta.maintainer,
                "maintainer_email": meta.maintainer_email,
                "license": meta.license,
                "description": meta.description,
                "keywords": meta.keywords,
                "platform": meta.platforms,
                "classifiers": meta.classifiers,
                "download_url": meta.download_url,
                "supported_platform": meta.supported_platforms,
                "comment": comment,

                # PEP 314
                "provides": meta.provides,
                "requires": meta.requires,
                "obsoletes": meta.obsoletes,

                # Metadata 1.2
                "project_urls": meta.project_urls,
                "provides_dist": meta.provides_dist,
                "obsoletes_dist": meta.obsoletes_dist,
                "requires_dist": meta.requires_dist,
                "requires_external": meta.requires_external,
                "requires_python": meta.requires_python,

            }

            with open(filename, "rb") as fp:
                content = fp.read()
                filedata = {
                    "content": (os.path.basename(filename), content),
                }
                data["md5_digest"] = hashlib.md5(content).hexdigest()

            if sign:
                with open(filename + ".asc") as gpg:
                    sigdata = gpg.read()
                    filedata["gpg_signature"] = (
                        os.path.basename(filename) + ".asc",
                        sigdata,
                    )

            logger.info("Uploading %s", os.path.basename(filename))

            resp = session.post(
                config["repository"],
                data=dict((k, v) for k, v in data.items() if v),
                files=filedata,
                auth=(config.get("username"), config.get("password")),
            )
            resp.raise_for_status()

        logger.info("Finished")
コード例 #55
0
ファイル: url.py プロジェクト: justinvanwinkle/wextracto
 def url_query_filter(obj):
     parsed = parse_url(obj)
     qsl = list(filter(pred, parse_qsl(parsed.query)))
     filtered_query = urlencode(qsl)
     return urlunparse(parsed._replace(query=filtered_query))
コード例 #56
0
ファイル: environ.py プロジェクト: AliLozano/django-environ
    def search_url_config(cls, url, engine=None):
        config = {}

        url = urlparse.urlparse(url) if not isinstance(url, cls.URL_CLASS) else url

        # Remove query strings.
        path = url.path[1:]
        path = path.split('?', 2)[0]

        if url.scheme not in cls.SEARCH_SCHEMES:
            raise ImproperlyConfigured('Invalid search schema %s' % url.scheme)
        config["ENGINE"] = cls.SEARCH_SCHEMES[url.scheme]

        # check commons params
        params = {}
        if url.query:
            params = urlparse.parse_qs(url.query)
            if 'EXCLUDED_INDEXES' in params.keys():
                config['EXCLUDED_INDEXES'] = params['EXCLUDED_INDEXES'][0].split(',')
            if 'INCLUDE_SPELLING' in params.keys():
                config['INCLUDE_SPELLING'] = cls.parse_value(params['INCLUDE_SPELLING'][0], bool)
            if 'BATCH_SIZE' in params.keys():
                config['BATCH_SIZE'] = cls.parse_value(params['BATCH_SIZE'][0], int)

        if url.scheme == 'simple':
            return config
        elif url.scheme in ['solr', 'elasticsearch']:
            if 'KWARGS' in params.keys():
                config['KWARGS'] = params['KWARGS'][0]

        # remove trailing slash
        if path.endswith("/"):
            path = path[:-1]

        if url.scheme == 'solr':
            config['URL'] = urlparse.urlunparse(('http',) + url[1:2] + (path,) + ('', '', ''))
            if 'TIMEOUT' in params.keys():
                config['TIMEOUT'] = cls.parse_value(params['TIMEOUT'][0], int)
            return config

        if url.scheme == 'elasticsearch':

            split = path.rsplit("/", 1)

            if len(split) > 1:
                path = "/".join(split[:-1])
                index = split[-1]
            else:
                path = ""
                index = split[0]

            config['URL'] = urlparse.urlunparse(('http',) + url[1:2] + (path,) + ('', '', ''))
            if 'TIMEOUT' in params.keys():
                config['TIMEOUT'] = cls.parse_value(params['TIMEOUT'][0], int)
            config['INDEX_NAME'] = index
            return config

        config['PATH'] = '/' + path

        if url.scheme == 'whoosh':
            if 'STORAGE' in params.keys():
                config['STORAGE'] = params['STORAGE'][0]
            if 'POST_LIMIT' in params.keys():
                config['POST_LIMIT'] = cls.parse_value(params['POST_LIMIT'][0], int)
        elif url.scheme == 'xapian':
            if 'FLAGS' in params.keys():
                config['FLAGS'] = params['FLAGS'][0]

        if engine:
            config['ENGINE'] = engine

        return config
コード例 #57
0
ファイル: utils.py プロジェクト: openstack/networking-odl
def get_odl_url(path=''):
    '''Make a URL for some ODL resource (path)'''
    purl = urlparse.urlsplit(cfg.CONF.ml2_odl.url)
    features_url = urlparse.urlunparse((
        purl.scheme, purl.netloc, path, '', '', ''))
    return features_url
コード例 #58
0
ファイル: service.py プロジェクト: castedo/requestbuilder
    def send_request(self, method='GET', path=None, params=None, headers=None,
                     data=None, files=None, auth=None):
        url = self.__get_url_for_path(path)
        headers = dict(headers)
        if 'host' not in [header.lower() for header in headers]:
            headers['Host'] = urlparse.urlparse(self.endpoint).netloc

        try:
            max_tries = self.max_retries + 1
            assert max_tries >= 1
            redirects_left = 5
            if isinstance(data, file) and hasattr(data, 'seek'):
                # If we're redirected we need to be able to reset
                data_file_offset = data.tell()
            else:
                data_file_offset = None
            while True:
                for attempt_no, delay in enumerate(
                        _generate_delays(max_tries), 1):
                    # Use exponential backoff if this is a retry
                    if delay > 0:
                        self.log.debug('will retry after %.3f seconds', delay)
                        time.sleep(delay)

                    self.log.info('sending request (attempt %i of %i)',
                                  attempt_no, max_tries)
                    p_request = self.__log_and_prepare_request(
                        method, url, params, data, files, headers, auth)
                    p_request.start_time = datetime.datetime.now()
                    proxies = requests.utils.get_environ_proxies(url)
                    for key, val in sorted(proxies.items()):
                        self.log.debug('request proxy:  %s=%s', key, val)
                    try:
                        response = self.session.send(
                            p_request, timeout=self.timeout, proxies=proxies,
                            allow_redirects=False)
                    except requests.exceptions.Timeout:
                        if attempt_no < max_tries:
                            self.log.debug('timeout', exc_info=True)
                            if data_file_offset is not None:
                                self.log.debug('re-seeking body to '
                                               'beginning of file')
                                # pylint: disable=E1101
                                data.seek(data_file_offset)
                                # pylint: enable=E1101
                                continue
                            elif not hasattr(data, 'tell'):
                                continue
                            # Fallthrough -- if it has a file pointer but not
                            # seek we can't retry because we can't rewind.
                        raise
                    if response.status_code not in (500, 503):
                        break
                    # If it *was* in that list, retry
                if (response.status_code in (301, 302, 307, 308) and
                        redirects_left > 0 and 'Location' in response.headers):
                    # Standard redirect -- we need to handle this ourselves
                    # because we have to re-sign requests when their URLs
                    # change.
                    redirects_left -= 1
                    parsed_rdr = urlparse.urlparse(
                        response.headers['Location'])
                    parsed_url = urlparse.urlparse(url)
                    new_url_bits = []
                    for rdr_bit, url_bit in zip(parsed_rdr, parsed_url):
                        new_url_bits.append(rdr_bit or url_bit)
                    if 'Host' in headers:
                        headers['Host'] = new_url_bits[1]  # netloc
                    url = urlparse.urlunparse(new_url_bits)
                    self.log.debug('redirecting to %s (%i redirect(s) '
                                   'remaining)', url, redirects_left)
                    if data_file_offset is not None:
                        self.log.debug('re-seeking body to beginning of file')
                        # pylint: disable=E1101
                        data.seek(data_file_offset)
                        # pylint: enable=E1101
                    continue
                elif response.status_code >= 300:
                    # We include 30x because we've handled the standard method
                    # of redirecting, but the server might still be trying to
                    # redirect another way for some reason.
                    self.handle_http_error(response)
                return response
        except requests.exceptions.Timeout as exc:
            self.log.debug('timeout', exc_info=True)
            raise TimeoutError('request timed out', exc)
        except requests.exceptions.ConnectionError as exc:
            self.log.debug('connection error', exc_info=True)
            return self.__handle_connection_error(exc)
        except requests.exceptions.HTTPError as exc:
            return self.handle_http_error(response)
        except requests.exceptions.RequestException as exc:
            self.log.debug('request error', exc_info=True)
            raise ClientError(exc)