def discharge(url, request):
     qs = parse_qs(request.body)
     if qs.get('token64') is None:
         return response(
             status_code=401,
             content={
                 'Code': httpbakery.ERR_INTERACTION_REQUIRED,
                 'Message': 'interaction required',
                 'Info': {
                     'InteractionMethods': {
                         'agent': {'login-url': '/login'},
                     },
                 },
             },
             headers={'Content-Type': 'application/json'})
     else:
         qs = parse_qs(request.body)
         content = {q: qs[q][0] for q in qs}
         m = httpbakery.discharge(checkers.AuthContext(), content,
                                  discharge_key, None, alwaysOK3rd)
         return {
             'status_code': 200,
             'content': {
                 'Macaroon': m.to_dict()
             }
         }
예제 #2
0
    def test_authorize_view(self):
        with self.app.test_client() as client:
            response = client.get('/oauth2authorize')
            location = response.headers['Location']
            q = urlparse.parse_qs(location.split('?', 1)[1])
            state = json.loads(q['state'][0])

            self.assertIn(GOOGLE_AUTH_URI, location)
            self.assertNotIn(self.oauth2.client_secret, location)
            self.assertIn(self.oauth2.client_id, q['client_id'])
            self.assertEqual(
                flask.session['google_oauth2_csrf_token'], state['csrf_token'])
            self.assertEqual(state['return_url'], '/')

        with self.app.test_client() as client:
            response = client.get('/oauth2authorize?return_url=/test')
            location = response.headers['Location']
            q = urlparse.parse_qs(location.split('?', 1)[1])
            state = json.loads(q['state'][0])
            self.assertEqual(state['return_url'], '/test')

        with self.app.test_client() as client:
            response = client.get('/oauth2authorize?extra_param=test')
            location = response.headers['Location']
            self.assertIn('extra_param=test', location)
예제 #3
0
    def assertRedirectsNoFollow(self, response, expected_url, use_params=True,
                                status_code=302):
        """Checks response redirect without loading the destination page.

        Django's assertRedirects method loads the destination page, which
        requires that the page be renderable in the current test context
        (possibly requiring additional, unrelated setup).
        """
        # Assert that the response has the correct redirect code.
        self.assertEqual(
            response.status_code, status_code,
            "Response didn't redirect as expected: Response code was {0} "
            "(expected {1})".format(response.status_code, status_code))

        # Assert that the response redirects to the correct base URL.
        # Use force_text to force evaluation of anything created by
        # reverse_lazy.
        response_url = force_text(response['location'])
        expected_url = force_text(expected_url)
        parsed1 = urlparse(response_url)
        parsed2 = urlparse(expected_url)
        self.assertEquals(
            parsed1.path, parsed2.path,
            "Response did not redirect to the expected URL: Redirect "
            "location was {0} (expected {1})".format(parsed1.path, parsed2.path))

        # Optionally assert that the response redirect URL has the correct
        # GET parameters.
        if use_params:
            self.assertDictEqual(
                parse_qs(parsed1.query), parse_qs(parsed2.query),
                "Response did not have the GET parameters expected: GET "
                "parameters were {0} (expected "
                "{1})".format(parsed1.query or {}, parsed2.query or {}))
예제 #4
0
def get_msg(hinfo, binding, response=False):
    if binding == BINDING_SOAP:
        msg = hinfo["data"]
    elif binding == BINDING_HTTP_POST:
        _inp = hinfo["data"][3]
        i = _inp.find(TAG1)
        i += len(TAG1) + 1
        j = _inp.find('"', i)
        msg = _inp[i:j]
    elif binding == BINDING_HTTP_ARTIFACT:
        # either by POST or by redirect
        if hinfo["data"]:
            _inp = hinfo["data"][3]
            i = _inp.find(TAG1)
            i += len(TAG1) + 1
            j = _inp.find('"', i)
            msg = _inp[i:j]
        else:
            parts = urlparse(hinfo["url"])
            msg = parse_qs(parts.query)["SAMLart"][0]
    else: # BINDING_HTTP_REDIRECT
        parts = urlparse(hinfo["headers"][0][1])
        msg = parse_qs(parts.query)["SAMLRequest"][0]

    return msg
예제 #5
0
    def _list_buckets_non_empty_helper(self, project, use_default=False):
        from six.moves.urllib.parse import parse_qs
        from six.moves.urllib.parse import urlencode
        from six.moves.urllib.parse import urlparse
        from gcloud._testing import _monkey_defaults as _base_monkey_defaults
        from gcloud.storage._testing import _monkey_defaults
        from gcloud.storage.connection import Connection
        BUCKET_NAME = 'bucket-name'
        conn = Connection()
        query_params = urlencode({'project': project, 'projection': 'noAcl'})
        BASE_URI = '/'.join([
            conn.API_BASE_URL,
            'storage',
            conn.API_VERSION,
        ])
        URI = '/'.join([BASE_URI, 'b?%s' % (query_params,)])
        http = conn._http = Http(
            {'status': '200', 'content-type': 'application/json'},
            '{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME)
            .encode('utf-8'),
        )

        if use_default:
            with _base_monkey_defaults(project=project):
                with _monkey_defaults(connection=conn):
                    buckets = list(self._callFUT())
        else:
            buckets = list(self._callFUT(project=project, connection=conn))

        self.assertEqual(len(buckets), 1)
        self.assertEqual(buckets[0].name, BUCKET_NAME)
        self.assertEqual(http._called_with['method'], 'GET')
        self.assertTrue(http._called_with['uri'].startswith(BASE_URI))
        self.assertEqual(parse_qs(urlparse(http._called_with['uri']).query),
                         parse_qs(urlparse(URI).query))
예제 #6
0
    def new_websocket_client(self):
        """Called after a new WebSocket connection has been established."""
        # Reopen the eventlet hub to make sure we don't share an epoll
        # fd with parent and/or siblings, which would be bad
        from eventlet import hubs
        hubs.use_hub()

        # The zun expected behavior is to have token
        # passed to the method GET of the request
        parse = urlparse.urlparse(self.path)
        if parse.scheme not in ('http', 'https'):
            # From a bug in urlparse in Python < 2.7.4 we cannot support
            # special schemes (cf: https://bugs.python.org/issue9374)
            if sys.version_info < (2, 7, 4):
                raise exception.ZunException(
                    _("We do not support scheme '%s' under Python < 2.7.4, "
                      "please use http or https") % parse.scheme)

        query = parse.query
        token = urlparse.parse_qs(query).get("token", [""]).pop()
        uuid = urlparse.parse_qs(query).get("uuid", [""]).pop()
        exec_id = urlparse.parse_qs(query).get("exec_id", [""]).pop()

        ctx = context.get_admin_context(all_projects=True)

        if uuidutils.is_uuid_like(uuid):
            container = objects.Container.get_by_uuid(ctx, uuid)
        else:
            container = objects.Container.get_by_name(ctx, uuid)

        if exec_id:
            self._new_exec_client(container, token, uuid, exec_id)
        else:
            self._new_websocket_client(container, token, uuid)
예제 #7
0
    def test_authorize_view(self):
        with self.app.test_client() as c:
            rv = c.get('/oauth2authorize')
            location = rv.headers['Location']
            q = urlparse.parse_qs(location.split('?', 1)[1])
            state = json.loads(q['state'][0])

            self.assertTrue(GOOGLE_AUTH_URI in location)
            self.assertFalse(self.oauth2.client_secret in location)
            self.assertTrue(self.oauth2.client_id in q['client_id'])
            self.assertEqual(
                flask.session['google_oauth2_csrf_token'], state['csrf_token'])
            self.assertEqual(state['return_url'], '/')

        with self.app.test_client() as c:
            rv = c.get('/oauth2authorize?return_url=/test')
            location = rv.headers['Location']
            q = urlparse.parse_qs(location.split('?', 1)[1])
            state = json.loads(q['state'][0])
            self.assertEqual(state['return_url'], '/test')

        with self.app.test_client() as c:
            rv = c.get('/oauth2authorize?extra_param=test')
            location = rv.headers['Location']
            self.assertTrue('extra_param=test' in location)
예제 #8
0
    def test_build_ga_params_for_campaign_tracking_params(self):
        '''
        Test that the  correct GA campaign
        tracking params are tracked correctly
        '''
        request = self.make_fake_request(
            '/somewhere/?utm_campaign=campaign name&utm_term=campaign keyword')
        ga_dict_with_campaign_params = build_ga_params(
            request, 'ua-test-id', '/compaign/path/')
        self.assertEqual(
            parse_qs(ga_dict_with_campaign_params.get(
                'utm_url')).get('cn'), ['campaign name'])
        self.assertEqual(
            parse_qs(ga_dict_with_campaign_params.get(
                'utm_url')).get('ck'), ['campaign keyword'])

        # params that aren't in the request should be excluded from the utm_url
        self.assertEqual(
            parse_qs(
                ga_dict_with_campaign_params.get(
                    'utm_url')).get('cs'), None)
        self.assertEqual(
            parse_qs(
                ga_dict_with_campaign_params.get(
                    'utm_url')).get('cm'), None)
예제 #9
0
    def _match_url(self, request):
        if self._url is ANY:
            return True

        # regular expression matching
        if hasattr(self._url, 'search'):
            return self._url.search(request.url) is not None

        if self._url_parts.scheme and request.scheme != self._url_parts.scheme:
            return False

        if self._url_parts.netloc and request.netloc != self._url_parts.netloc:
            return False

        if (request.path or '/') != (self._url_parts.path or '/'):
            return False

        # construct our own qs structure as we remove items from it below
        request_qs = urlparse.parse_qs(request.query)
        matcher_qs = urlparse.parse_qs(self._url_parts.query)

        for k, vals in six.iteritems(matcher_qs):
            for v in vals:
                try:
                    request_qs.get(k, []).remove(v)
                except ValueError:
                    return False

        if self._complete_qs:
            for v in six.itervalues(request_qs):
                if v:
                    return False

        return True
예제 #10
0
    def _list_buckets_non_empty_helper(self, project, use_default=False):
        from six.moves.urllib.parse import parse_qs
        from six.moves.urllib.parse import urlencode
        from six.moves.urllib.parse import urlparse
        from gcloud._testing import _monkey_defaults as _base_monkey_defaults
        from gcloud.storage._testing import _monkey_defaults
        from gcloud.storage.connection import Connection

        BUCKET_NAME = "bucket-name"
        conn = Connection()
        query_params = urlencode({"project": project, "projection": "noAcl"})
        BASE_URI = "/".join([conn.API_BASE_URL, "storage", conn.API_VERSION])
        URI = "/".join([BASE_URI, "b?%s" % (query_params,)])
        http = conn._http = Http(
            {"status": "200", "content-type": "application/json"},
            '{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME).encode("utf-8"),
        )

        if use_default:
            with _base_monkey_defaults(project=project):
                with _monkey_defaults(connection=conn):
                    buckets = list(self._callFUT())
        else:
            buckets = list(self._callFUT(project=project, connection=conn))

        self.assertEqual(len(buckets), 1)
        self.assertEqual(buckets[0].name, BUCKET_NAME)
        self.assertEqual(http._called_with["method"], "GET")
        self.assertTrue(http._called_with["uri"].startswith(BASE_URI))
        self.assertEqual(parse_qs(urlparse(http._called_with["uri"]).query), parse_qs(urlparse(URI).query))
예제 #11
0
def test_publish_to_http():
    httpretty.HTTPretty.register_uri(
        method="POST",
        uri="http://example.com/foobar",
    )

    conn = boto.connect_sns()
    conn.create_topic("some-topic")
    topics_json = conn.get_all_topics()
    topic_arn = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"][0]['TopicArn']

    conn.subscribe(topic_arn, "http", "http://example.com/foobar")

    response = conn.publish(topic=topic_arn, message="my message", subject="my subject")
    message_id = response['PublishResponse']['PublishResult']['MessageId']

    last_request = httpretty.last_request()
    last_request.method.should.equal("POST")
    parse_qs(last_request.body.decode('utf-8')).should.equal({
        "Type": ["Notification"],
        "MessageId": [message_id],
        "TopicArn": ["arn:aws:sns:us-east-1:123456789012:some-topic"],
        "Subject": ["my subject"],
        "Message": ["my message"],
        "Timestamp": ["2013-01-01T00:00:00Z"],
        "SignatureVersion": ["1"],
        "Signature": ["EXAMPLElDMXvB8r9R83tGoNn0ecwd5UjllzsvSvbItzfaMpN2nk5HVSw7XnOn/49IkxDKz8YrlH2qJXj2iZB0Zo2O71c4qQk1fMUDi3LGpij7RCW7AW9vYYsSqIKRnFS94ilu7NFhUzLiieYr4BKHpdTmdD6c0esKEYBpabxDSc="],
        "SigningCertURL": ["https://sns.us-east-1.amazonaws.com/SimpleNotificationService-f3ecfb7224c7233fe7bb5f59f96de52f.pem"],
        "UnsubscribeURL": ["https://sns.us-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:us-east-1:123456789012:some-topic:2bcfbf39-05c3-41de-beaa-fcfcc21c8f55"],
    })
예제 #12
0
    def test_collection_ok_by_state(
            self, f_users, f_coprs,
            f_mock_chroots_many,
            f_build_many_chroots,
            f_db,
            f_users_api):

        self.db.session.commit()
        for status in StatusEnum.vals.values():
            expected_chroots = set([
                name
                for name, chroot_status in
                self.status_by_chroot.items()
                if chroot_status == status
            ])

            href = "/api_2/build_tasks?state={}&limit=50".format(StatusEnum(status))

            r0 = self.tc.get(href)
            assert r0.status_code == 200
            obj = json.loads(r0.data.decode("utf-8"))
            assert len(obj["build_tasks"]) == len(expected_chroots)
            assert set(bt["build_task"]["chroot_name"]
                       for bt in obj["build_tasks"]) == expected_chroots

            assert parse_qs(urlparse(obj["_links"]["self"]["href"]).query) \
                == parse_qs(urlparse(href).query)
    def test_signature_values(self):
        """Test signature generation and update"""

        body = """
        {"response": {
            "user_info": {}, "new_key": "yes", "result": "Success"}
        }
        """

        responses.add(responses.POST, self.url, body=body, status=200,
                      content_type="application/json")
        # original secret key
        self.api.user_get_info()
        # secret key is (1000000000 * 16807) % 2147483647 = 792978578
        self.api.user_get_info()

        query = responses.calls[0].request.body
        params = parse_qs(query)

        self.assertEqual(params['signature'][0], CALL_SIGNATURES[0])

        query = responses.calls[1].request.body
        params = parse_qs(query)

        self.assertEqual(params['signature'][0], CALL_SIGNATURES[1])
예제 #14
0
    def test_add_extras(self):
        context = {}
        settings.ANALYTICS = {
            'GOOGLE': {
                'GTM_SITE_ID': 'gtm-site-id',
                'GA_SITE_ID': 'ga-site-id',
                },
            'DAP': {
                'AGENCY': 'agency',
                'SUBAGENCY': 'sub-agency',
            }
        }

        utils.add_extras(context)

        self.assertTrue('APP_PREFIX' in context)
        self.assertTrue('env' in context)

        self.assertEquals('gtm-site-id',
                          context['ANALYTICS']['GOOGLE']['GTM_SITE_ID'])
        self.assertEquals('ga-site-id',
                          context['ANALYTICS']['GOOGLE']['GA_SITE_ID'])
        self.assertEquals('agency', context['ANALYTICS']['DAP']['AGENCY'])
        self.assertEquals('sub-agency',
                          context['ANALYTICS']['DAP']['SUBAGENCY'])
        self.assertEquals(
            parse_qs('agency=agency&subagency=sub-agency'),
            parse_qs(context['ANALYTICS']['DAP']['DAP_URL_PARAMS']),
        )
예제 #15
0
    def test_list_buckets_non_empty(self):
        from six.moves.urllib.parse import parse_qs
        from six.moves.urllib.parse import urlencode
        from six.moves.urllib.parse import urlparse
        PROJECT = 'PROJECT'
        CREDENTIALS = _Credentials()
        client = self._makeOne(project=PROJECT, credentials=CREDENTIALS)

        BUCKET_NAME = 'bucket-name'
        query_params = urlencode({'project': PROJECT, 'projection': 'noAcl'})
        BASE_URI = '/'.join([
            client.connection.API_BASE_URL,
            'storage',
            client.connection.API_VERSION,
        ])
        URI = '/'.join([BASE_URI, 'b?%s' % (query_params,)])
        http = client.connection._http = _Http(
            {'status': '200', 'content-type': 'application/json'},
            '{{"items": [{{"name": "{0}"}}]}}'.format(BUCKET_NAME)
            .encode('utf-8'),
        )
        buckets = list(client.list_buckets())
        self.assertEqual(len(buckets), 1)
        self.assertEqual(buckets[0].name, BUCKET_NAME)
        self.assertEqual(http._called_with['method'], 'GET')
        self.assertTrue(http._called_with['uri'].startswith(BASE_URI))
        self.assertEqual(parse_qs(urlparse(http._called_with['uri']).query),
                         parse_qs(urlparse(URI).query))
예제 #16
0
def _unicode_parse_qs(qs, **kwargs):
    """
    A wrapper around ``urlparse.parse_qs`` that converts unicode strings to
    UTF-8 to prevent ``urlparse.unquote`` from performing it's default decoding
    to latin-1 see http://hg.python.org/cpython/file/2.7/Lib/urlparse.py

    :param qs:       Percent-encoded query string to be parsed.
    :type qs:        ``str``

    :param kwargs:   Other keyword args passed onto ``parse_qs``.
    """
    if PY3 or isinstance(qs, bytes):
        # Nothing to do
        return parse_qs(qs, **kwargs)

    qs = qs.encode('utf-8', 'ignore')
    query = parse_qs(qs, **kwargs)
    unicode_query = {}
    for key in query:
        uni_key = key.decode('utf-8', 'ignore')
        if uni_key == '':
            # because we ignore decode errors and only support utf-8 right now,
            # we could end up with a blank string which we ignore
            continue
        unicode_query[uni_key] = [p.decode('utf-8', 'ignore') for p in query[key]]
    return unicode_query
예제 #17
0
    def test_build_ga_params_for_direct_referals(self):
        headers = {'HTTP_HOST': 'localhost:8000'}
        request = self.make_fake_request('/somewhere/', headers)
        ga_dict_without_referal = build_ga_params(
            request, 'ua-test-id', '/some/path/',)
        ga_dict_without_direct_referal = build_ga_params(
            request, 'ua-test-id', '/some/path/',
            referer='http://test.com/some/path/')

        ga_dict_with_direct_referal = build_ga_params(
            request, 'ua-test-id', '/some/path/',
            referer='http://localhost:8000/some/path/')

        # None: if referal is not set
        self.assertEqual(
            parse_qs(ga_dict_without_referal.get('utm_url')).get('dr'), None)
        # Include referals from another host
        self.assertEqual(
            parse_qs(
                ga_dict_without_direct_referal.get('utm_url')).get('dr'),
            ['http://test.com/some/path/'])
        # Exlcude referals from the same host
        self.assertEqual(
            parse_qs(
                ga_dict_with_direct_referal.get('utm_url')).get('dr'),
            ['/some/path/'])
예제 #18
0
    def assert_setup_flow(self, team_id='TXXXXXXX1', authorizing_user_id='UXXXXXXX1'):
        responses.reset()

        resp = self.client.get(self.init_path)
        assert resp.status_code == 302
        redirect = urlparse(resp['Location'])
        assert redirect.scheme == 'https'
        assert redirect.netloc == 'slack.com'
        assert redirect.path == '/oauth/authorize'
        params = parse_qs(redirect.query)
        assert params['scope'] == [' '.join(self.provider.identity_oauth_scopes)]
        assert params['state']
        assert params['redirect_uri'] == ['http://testserver/extensions/slack/setup/']
        assert params['response_type'] == ['code']
        assert params['client_id'] == ['slack-client-id']
        # once we've asserted on it, switch to a singular values to make life
        # easier
        authorize_params = {k: v[0] for k, v in six.iteritems(params)}

        responses.add(
            responses.POST, 'https://slack.com/api/oauth.token',
            json={
                'ok': True,
                'access_token': 'xoxp-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx',
                'team_id': team_id,
                'team_name': 'Example',
                'authorizing_user_id': authorizing_user_id,
            }
        )

        responses.add(
            responses.GET, 'https://slack.com/api/team.info',
            json={
                'ok': True,
                'team': {
                    'domain': 'test-slack-workspace',
                    'icon': {'image_132': 'http://example.com/ws_icon.jpg'},
                },
            }
        )

        resp = self.client.get(u'{}?{}'.format(
            self.setup_path,
            urlencode({
                'code': 'oauth-code',
                'state': authorize_params['state'],
            })
        ))

        mock_request = responses.calls[0].request
        req_params = parse_qs(mock_request.body)
        assert req_params['grant_type'] == ['authorization_code']
        assert req_params['code'] == ['oauth-code']
        assert req_params['redirect_uri'] == ['http://testserver/extensions/slack/setup/']
        assert req_params['client_id'] == ['slack-client-id']
        assert req_params['client_secret'] == ['slack-client-secret']

        assert resp.status_code == 200
        self.assertDialogSuccess(resp)
예제 #19
0
파일: responses.py 프로젝트: spulec/moto
    def setup_class(self, request, full_url, headers):
        querystring = {}
        if hasattr(request, 'body'):
            # Boto
            self.body = request.body
        else:
            # Flask server

            # FIXME: At least in Flask==0.10.1, request.data is an empty string
            # and the information we want is in request.form. Keeping self.body
            # definition for back-compatibility
            self.body = request.data

            querystring = {}
            for key, value in request.form.items():
                querystring[key] = [value, ]

        raw_body = self.body
        if isinstance(self.body, six.binary_type):
            self.body = self.body.decode('utf-8')

        if not querystring:
            querystring.update(
                parse_qs(urlparse(full_url).query, keep_blank_values=True))
        if not querystring:
            if 'json' in request.headers.get('content-type', []) and self.aws_service_spec:
                decoded = json.loads(self.body)

                target = request.headers.get(
                    'x-amz-target') or request.headers.get('X-Amz-Target')
                service, method = target.split('.')
                input_spec = self.aws_service_spec.input_spec(method)
                flat = flatten_json_request_body('', decoded, input_spec)
                for key, value in flat.items():
                    querystring[key] = [value]
            elif self.body:
                try:
                    querystring.update(parse_qs(raw_body, keep_blank_values=True))
                except UnicodeEncodeError:
                    pass  # ignore encoding errors, as the body may not contain a legitimate querystring
        if not querystring:
            querystring.update(headers)

        try:
            querystring = _decode_dict(querystring)
        except UnicodeDecodeError:
            pass  # ignore decoding errors, as the body may not contain a legitimate querystring

        self.uri = full_url
        self.path = urlparse(full_url).path
        self.querystring = querystring
        self.method = request.method
        self.region = self.get_region_from_url(request, full_url)
        self.uri_match = None

        self.headers = request.headers
        if 'host' not in self.headers:
            self.headers['host'] = urlparse(full_url).netloc
        self.response_headers = {"server": "amazon.com"}
    def equals(self, rhs):
        lhsp = urlparse.urlparse(self.lhs)
        rhsp = urlparse.urlparse(rhs)

        return (lhsp.scheme == rhsp.scheme and
                lhsp.netloc == rhsp.netloc and
                lhsp.path == rhsp.path and
                urlparse.parse_qs(lhsp.query) == urlparse.parse_qs(rhsp.query))
예제 #21
0
    def assertQueryStringIs(self, qs=''):
        """Verify the QueryString matches what is expected.

        The qs parameter should be of the format \'foo=bar&abc=xyz\'
        """
        expected = urlparse.parse_qs(qs, keep_blank_values=True)
        parts = urlparse.urlparse(self.requests.last_request.url)
        querystring = urlparse.parse_qs(parts.query, keep_blank_values=True)
        self.assertEqual(expected, querystring)
예제 #22
0
 def test_github(self):
     """Test GitHub integration"""
     httpretty.register_uri(
         httpretty.POST,
         'https://github.com/login/oauth/access_token',
         body=json.dumps({
             'access_token': '123',
             'token_type': 'bearer',
         })
     )
     httpretty.register_uri(
         httpretty.GET,
         'https://api.github.com/user',
         body=json.dumps({
             'email': '*****@*****.**',
             'login': '******',
             'id': 1,
             'name': 'Weblate',
         }),
     )
     httpretty.register_uri(
         httpretty.GET,
         'https://api.github.com/user/emails',
         body=json.dumps([
             {
                 'email': '*****@*****.**',
                 'verified': False,
                 'primary': False,
             }, {
                 'email': '*****@*****.**',
                 'verified': True,
                 'primary': True
             }
         ])
     )
     response = self.client.get(reverse('social:begin', args=('github',)))
     self.assertEqual(response.status_code, 302)
     self.assertTrue(
         response['Location'].startswith(
             'https://github.com/login/oauth/authorize'
         )
     )
     query = parse_qs(urlparse(response['Location']).query)
     return_query = parse_qs(urlparse(query['redirect_uri'][0]).query)
     response = self.client.get(
         reverse('social:complete', args=('github',)),
         {
             'state': query['state'][0],
             'redirect_state': return_query['redirect_state'][0],
             'code': 'XXX'
         },
         follow=True
     )
     user = User.objects.get(username='******')
     self.assertEqual(user.first_name, 'Weblate')
     self.assertEqual(user.email, '*****@*****.**')
예제 #23
0
 def same_urls(url_a, url_b):
     a = urlparse(url_a)
     b = urlparse(url_b)
     return (a.scheme == b.scheme and
             a.netloc == b.netloc and
             a.path == b.path and
             a.params == b.params and
             parse_qs(a.query) == parse_qs(b.query) and
             a.fragment == b.fragment
             )
예제 #24
0
    def verify_endpoint(self, request="", cookie=None, **kwargs):
        _req = urlparse.parse_qs(request)
        try:
            areq = urlparse.parse_qs(_req["query"][0])
        except KeyError:
            return BadRequest()

        authn, acr = self.pick_auth(areq=areq)
        kwargs["cookie"] = cookie
        return authn.verify(_req, **kwargs)
예제 #25
0
    def assert_setup_flow(self):
        resp = self.client.get(self.init_path)
        assert resp.status_code == 302
        redirect = urlparse(resp['Location'])
        assert redirect.scheme == 'https'
        assert redirect.netloc == 'github.com'
        assert redirect.path == '/apps/sentry-test-app'

        # App installation ID is provided
        resp = self.client.get(u'{}?{}'.format(
            self.setup_path,
            urlencode({'installation_id': self.installation_id})
        ))

        redirect = urlparse(resp['Location'])

        assert resp.status_code == 302
        assert redirect.scheme == 'https'
        assert redirect.netloc == 'github.com'
        assert redirect.path == '/login/oauth/authorize'

        params = parse_qs(redirect.query)

        assert params['state']
        assert params['redirect_uri'] == ['http://testserver/extensions/github/setup/']
        assert params['response_type'] == ['code']
        assert params['client_id'] == ['github-client-id']

        # Compact list values into singular values, since there's only ever one.
        authorize_params = {k: v[0] for k, v in six.iteritems(params)}

        resp = self.client.get(u'{}?{}'.format(
            self.setup_path,
            urlencode({
                'code': 'oauth-code',
                'state': authorize_params['state'],
            })
        ))

        oauth_exchange = responses.calls[0]
        req_params = parse_qs(oauth_exchange.request.body)

        assert req_params['grant_type'] == ['authorization_code']
        assert req_params['code'] == ['oauth-code']
        assert req_params['redirect_uri'] == ['http://testserver/extensions/github/setup/']
        assert req_params['client_id'] == ['github-client-id']
        assert req_params['client_secret'] == ['github-client-secret']

        assert oauth_exchange.response.status_code == 200

        auth_header = responses.calls[2].request.headers['Authorization']
        assert auth_header == 'Bearer jwt_token_1'

        self.assertDialogSuccess(resp)
        return resp
 def test_calls_request_with_correct_form_data(self):
     self._api.create_whitelabel_user('Expected Identifier')
     expected = JSONBodyProducer({
         'identifier': 'Expected Identifier',
         'app_key': 'Expected Rocket Key',
         'session': 'Expected Session Value',
         'signature': 'Base64 Encoded RSA Signed Value',
         'secret_key': 'RSA Encrypted Value'
     })
     actual = mockito.getCallArgument(self._agent, 'request', 4)
     self.assertEquals(parse_qs(actual.body), parse_qs(expected.body))
 def test_calls_request_with_correct_form_data(self):
     self._api.authorize('Expected Username', 'Expected Session Value', 'Expected User Push ID Value')
     expected = JSONToURLEncodedFormDataBodyProducer({
         'username': '******',
         'user_push_id': 'Expected User Push ID Value',
         'app_key': 'Expected Rocket Key',
         'session': 'Expected Session Value',
         'signature': 'Base64 Encoded RSA Signed Value',
         'secret_key': 'RSA Encrypted Value'
     })
     actual = mockito.getCallArgument(self._agent, 'request', 4)
     self.assertEquals(parse_qs(actual.body), parse_qs(expected.body))
예제 #28
0
    def assert_query_string_equal(self, expected_qs, actual_qs):
        """
        Compares two query strings to see if they are equivalent. Note that order of parameters is not significant.

        Args:
            expected_qs (str): The expected query string.
            actual_qs (str): The actual query string.

        Raises:
            AssertionError: If the two query strings are not equal.
        """
        self.assertDictEqual(parse_qs(expected_qs), parse_qs(actual_qs))
예제 #29
0
 def parse_response(self, response):
     if self.api == 'bpdailyreport2':
         self.response = response
     elif self.api == 'bp10emu':        
         query_string = urlparse(response)
         response = parse_qs(query_string.query)
         self.response = response
         self.assign_response_values()
     elif self.api == 'stq' or self.api == 'bp20rebadmin':
         response = parse_qs(response)
         self.response = response
         self.assign_response_values()
예제 #30
0
 def test_ga_template_tag(self):
     rf = RequestFactory()
     post_request = rf.post('/submit/', {'foo': 'bar'})
     url = google_analytics(
         {'request': post_request},
         tracking_code='ua-test-id', debug=True)
     self.assertEqual(parse_qs(url).get('tracking_code'), ['ua-test-id'])
     self.assertEqual(parse_qs(url).get('utmdebug'), ['1'])
     url = google_analytics(
         {'request': post_request},
         tracking_code='ua-test-id', debug=False)
     self.assertEqual(parse_qs(url).get('utmdebug'), None)
예제 #31
0
    def view_autocomplete(self, request, group, **kwargs):
        query = request.GET.get("autocomplete_query")
        field = request.GET.get("autocomplete_field")
        project = self.get_option("default_project", group.project)

        if field == "issue_id":
            client = self.get_jira_client(group.project)
            try:
                response = client.search_issues(project, query)
            except ApiError as e:
                return Response(
                    {
                        "error_type": "validation",
                        "errors": [{"__all__": self.message_from_error(e)}],
                    },
                    status=400,
                )
            else:
                issues = [
                    {"text": "(%s) %s" % (i["key"], i["fields"]["summary"]), "id": i["key"]}
                    for i in response.get("issues", [])
                ]
                return Response({field: issues})

        jira_url = request.GET.get("jira_url")
        if jira_url:
            jira_url = unquote_plus(jira_url)
            parsed = list(urlsplit(jira_url))
            jira_query = parse_qs(parsed[3])

            jira_client = self.get_jira_client(group.project)

            is_user_api = re.search("/rest/api/(latest|[0-9])/user/", jira_url)

            is_user_picker = "/rest/api/1.0/users/picker" in jira_url

            if is_user_api:  # its the JSON version of the autocompleter
                is_xml = False
                jira_query["username"] = query.encode("utf8")
                jira_query.pop(
                    "issueKey", False
                )  # some reason JIRA complains if this key is in the URL.
                jira_query["project"] = project.encode("utf8")
            elif is_user_picker:
                is_xml = False
                # for whatever reason, the create meta api returns an
                # invalid path, so let's just use the correct, documented one here:
                # https://docs.atlassian.com/jira/REST/cloud/#api/2/user
                # also, only pass path so saved instance url will be used
                parsed[0] = ""
                parsed[1] = ""
                parsed[2] = "/rest/api/2/user/picker"
                jira_query["query"] = query.encode("utf8")
            else:  # its the stupid XML version of the API.
                is_xml = True
                jira_query["query"] = query.encode("utf8")
                if jira_query.get("fieldName"):
                    # for some reason its a list.
                    jira_query["fieldName"] = jira_query["fieldName"][0]

            parsed[3] = urlencode(jira_query)
            final_url = urlunsplit(parsed)

            autocomplete_response = jira_client.get_cached(final_url)

            if is_user_picker:
                autocomplete_response = autocomplete_response["users"]

            users = []

            if is_xml:
                for userxml in autocomplete_response.xml.findAll("users"):
                    users.append(
                        {"id": userxml.find("name").text, "text": userxml.find("html").text}
                    )
            else:
                for user in autocomplete_response:
                    if user.get("name"):
                        users.append(self._get_formatted_user(user))

            # if JIRA user doesn't have proper permission for user api,
            # try the assignee api instead
            if not users and is_user_api:
                try:
                    autocomplete_response = jira_client.search_users_for_project(
                        jira_query.get("project"), jira_query.get("username")
                    )
                except (ApiUnauthorized, ApiError) as e:

                    return Response(
                        {
                            "error_type": "validation",
                            "errors": [{"__all__": self.message_from_error(e)}],
                        },
                        status=400,
                    )

                for user in autocomplete_response:
                    if user.get("name"):
                        users.append(self._get_formatted_user(user))

            return Response({field: users})
예제 #32
0
    def test_github(self, confirm=None, fail=False):
        """Test GitHub integration"""
        try:
            # psa creates copy of settings...
            orig_backends = social_django.utils.BACKENDS
            social_django.utils.BACKENDS = GH_BACKENDS

            httpretty.register_uri(
                httpretty.POST,
                'https://github.com/login/oauth/access_token',
                body=json.dumps({
                    'access_token': '123',
                    'token_type': 'bearer',
                }))
            httpretty.register_uri(
                httpretty.GET,
                'https://api.github.com/user',
                body=json.dumps({
                    'email': '*****@*****.**',
                    'login': '******',
                    'id': 1,
                    'name': 'Test Weblate Name',
                }),
            )
            httpretty.register_uri(httpretty.GET,
                                   'https://api.github.com/user/emails',
                                   body=json.dumps([{
                                       'email': '*****@*****.**',
                                       'verified': False,
                                       'primary': False,
                                   }, {
                                       'email': '*****@*****.**',
                                       'verified': True,
                                       'primary': True
                                   }]))
            response = self.client.post(
                reverse('social:begin', args=('github', )))
            self.assertEqual(response.status_code, 302)
            self.assertTrue(response['Location'].startswith(
                'https://github.com/login/oauth/authorize'))
            query = parse_qs(urlparse(response['Location']).query)
            return_query = parse_qs(urlparse(query['redirect_uri'][0]).query)
            response = self.client.get(
                reverse('social:complete', args=('github', )), {
                    'state': query['state'][0] or return_query['state'][0],
                    'code': 'XXX'
                },
                follow=True)
            if fail:
                self.assertContains(
                    response, 'is already associated with another account')
                return
            if confirm:
                self.assertContains(response, 'Confirm new association')
                response = self.client.post(reverse('confirm'),
                                            {'password': confirm},
                                            follow=True)
            self.assertContains(response, 'Test Weblate Name')
            user = User.objects.get(username='******')
            self.assertEqual(user.full_name, 'Test Weblate Name')
            self.assertEqual(user.email, '*****@*****.**')
        finally:
            social_django.utils.BACKENDS = orig_backends
예제 #33
0
    def new_websocket_client(self):
        """Called after a new WebSocket connection has been established."""
        # Reopen the eventlet hub to make sure we don't share an epoll
        # fd with parent and/or siblings, which would be bad
        from eventlet import hubs
        hubs.use_hub()

        # The nova expected behavior is to have token
        # passed to the method GET of the request
        parse = urlparse.urlparse(self.path)
        if parse.scheme not in ('http', 'https'):
            # From a bug in urlparse in Python < 2.7.4 we cannot support
            # special schemes (cf: http://bugs.python.org/issue9374)
            if sys.version_info < (2, 7, 4):
                raise exception.NovaException(
                    _("We do not support scheme '%s' under Python < 2.7.4, "
                      "please use http or https") % parse.scheme)

        query = parse.query
        token = urlparse.parse_qs(query).get("token", [""]).pop()
        if not token:
            # NoVNC uses it's own convention that forward token
            # from the request to a cookie header, we should check
            # also for this behavior
            hcookie = self.headers.get('cookie')
            if hcookie:
                cookie = Cookie.SimpleCookie()
                for hcookie_part in hcookie.split(';'):
                    hcookie_part = hcookie_part.lstrip()
                    try:
                        cookie.load(hcookie_part)
                    except Cookie.CookieError:
                        # NOTE(stgleb): Do not print out cookie content
                        # for security reasons.
                        LOG.warning('Found malformed cookie')
                    else:
                        if 'token' in cookie:
                            token = cookie['token'].value

        ctxt = context.get_admin_context()
        connect_info = self._get_connect_info(ctxt, token)

        # Verify Origin
        expected_origin_hostname = self.headers.get('Host')
        if ':' in expected_origin_hostname:
            e = expected_origin_hostname
            if '[' in e and ']' in e:
                expected_origin_hostname = e.split(']')[0][1:]
            else:
                expected_origin_hostname = e.split(':')[0]
        expected_origin_hostnames = CONF.console.allowed_origins
        expected_origin_hostnames.append(expected_origin_hostname)
        origin_url = self.headers.get('Origin')
        # missing origin header indicates non-browser client which is OK
        if origin_url is not None:
            origin = urlparse.urlparse(origin_url)
            origin_hostname = origin.hostname
            origin_scheme = origin.scheme
            # If the console connection was forwarded by a proxy (example:
            # haproxy), the original protocol could be contained in the
            # X-Forwarded-Proto header instead of the Origin header. Prefer the
            # forwarded protocol if it is present.
            forwarded_proto = self.headers.get('X-Forwarded-Proto')
            if forwarded_proto is not None:
                origin_scheme = forwarded_proto
            if origin_hostname == '' or origin_scheme == '':
                detail = _("Origin header not valid.")
                raise exception.ValidationError(detail=detail)
            if origin_hostname not in expected_origin_hostnames:
                detail = _("Origin header does not match this host.")
                raise exception.ValidationError(detail=detail)
            if not self.verify_origin_proto(connect_info, origin_scheme):
                detail = _("Origin header protocol does not match this host.")
                raise exception.ValidationError(detail=detail)

        sanitized_info = copy.copy(connect_info)
        sanitized_info.token = '***'
        self.msg(_('connect info: %s'), sanitized_info)

        host = connect_info.host
        port = connect_info.port

        # Connect to the target
        self.msg(
            _("connecting to: %(host)s:%(port)s") % {
                'host': host,
                'port': port
            })
        tsock = self.socket(host, port, connect=True)

        # Handshake as necessary
        if 'internal_access_path' in connect_info:
            path = connect_info.internal_access_path
            if path:
                tsock.send(
                    encodeutils.safe_encode('CONNECT %s HTTP/1.1\r\n\r\n' %
                                            path))
                end_token = "\r\n\r\n"
                while True:
                    data = tsock.recv(4096, socket.MSG_PEEK)
                    token_loc = data.find(end_token)
                    if token_loc != -1:
                        if data.split("\r\n")[0].find("200") == -1:
                            raise exception.InvalidConnectionInfo()
                        # remove the response from recv buffer
                        tsock.recv(token_loc + len(end_token))
                        break

        if self.server.security_proxy is not None:
            tenant_sock = TenantSock(self)

            try:
                tsock = self.server.security_proxy.connect(tenant_sock, tsock)
            except exception.SecurityProxyNegotiationFailed:
                LOG.exception("Unable to perform security proxying, shutting "
                              "down connection")
                tenant_sock.close()
                tsock.shutdown(socket.SHUT_RDWR)
                tsock.close()
                raise

            tenant_sock.finish_up()

        # Start proxying
        try:
            self.do_proxy(tsock)
        except Exception:
            if tsock:
                tsock.shutdown(socket.SHUT_RDWR)
                tsock.close()
                self.vmsg(
                    _("%(host)s:%(port)s: "
                      "Websocket client or target closed") % {
                          'host': host,
                          'port': port
                      })
            raise
예제 #34
0
def decode_state(state):
    return parse.parse_qs(state)
예제 #35
0
    def _update(self, req, id, body):
        context = req.environ['manila.context']
        project_id = id
        bad_keys = []
        force_update = False
        params = parse.parse_qs(req.environ.get('QUERY_STRING', ''))
        user_id = params.get('user_id', [None])[0]

        try:
            settable_quotas = QUOTAS.get_settable_quotas(context,
                                                         project_id,
                                                         user_id=user_id)
        except exception.NotAuthorized:
            raise webob.exc.HTTPForbidden()

        for key, value in body.get('quota_set', {}).items():
            if (key not in QUOTAS and key not in NON_QUOTA_KEYS):
                bad_keys.append(key)
                continue
            if key == 'force':
                force_update = strutils.bool_from_string(value)
            elif key not in NON_QUOTA_KEYS and value:
                try:
                    value = int(value)
                except (ValueError, TypeError):
                    msg = _("Quota '%(value)s' for %(key)s should be "
                            "integer.") % {
                                'value': value,
                                'key': key
                            }
                    LOG.warning(msg)
                    raise webob.exc.HTTPBadRequest(explanation=msg)

        LOG.debug("Force update quotas: %s.", force_update)

        if len(bad_keys) > 0:
            msg = _("Bad key(s) %s in quota_set.") % ",".join(bad_keys)
            raise webob.exc.HTTPBadRequest(explanation=msg)

        try:
            quotas = self._get_quotas(context,
                                      id,
                                      user_id=user_id,
                                      usages=True)
        except exception.NotAuthorized:
            raise webob.exc.HTTPForbidden()

        for key, value in body.get('quota_set', {}).items():
            if key in NON_QUOTA_KEYS or (not value and value != 0):
                continue
            # validate whether already used and reserved exceeds the new
            # quota, this check will be ignored if admin want to force
            # update
            try:
                value = int(value)
            except (ValueError, TypeError):
                msg = _("Quota '%(value)s' for %(key)s should be "
                        "integer.") % {
                            'value': value,
                            'key': key
                        }
                LOG.warning(msg)
                raise webob.exc.HTTPBadRequest(explanation=msg)

            if force_update is False and value >= 0:
                quota_value = quotas.get(key)
                if quota_value and quota_value['limit'] >= 0:
                    quota_used = (quota_value['in_use'] +
                                  quota_value['reserved'])
                    LOG.debug(
                        "Quota %(key)s used: %(quota_used)s, "
                        "value: %(value)s.", {
                            'key': key,
                            'quota_used': quota_used,
                            'value': value
                        })
                    if quota_used > value:
                        msg = (_("Quota value %(value)s for %(key)s are "
                                 "greater than already used and reserved "
                                 "%(quota_used)s.") % {
                                     'value': value,
                                     'key': key,
                                     'quota_used': quota_used
                                 })
                        raise webob.exc.HTTPBadRequest(explanation=msg)

            minimum = settable_quotas[key]['minimum']
            maximum = settable_quotas[key]['maximum']
            self._validate_quota_limit(value, minimum, maximum, force_update)
            try:
                db.quota_create(context,
                                project_id,
                                key,
                                value,
                                user_id=user_id)
            except exception.QuotaExists:
                db.quota_update(context,
                                project_id,
                                key,
                                value,
                                user_id=user_id)
            except exception.AdminRequired:
                raise webob.exc.HTTPForbidden()
        return self._view_builder.detail_list(
            self._get_quotas(context, id, user_id=user_id))
예제 #36
0
def parse_query_url(url):
    base_url, query_params = url.split('?')
    return base_url, parse.parse_qs(query_params)
    def do_GET(self):
        addon = xbmcaddon.Addon('plugin.video.youtube')
        dash_proxy_enabled = addon.getSetting('kodion.mpd.proxy') == 'true'
        api_config_enabled = addon.getSetting(
            'youtube.api.config.page') == 'true'

        if not self.client_address[0].startswith(
                self.local_ranges
        ) and not self.client_address[0] in self.whitelist_ips:
            self.send_error(403)
        else:
            if dash_proxy_enabled and self.path.endswith('.mpd'):
                file_path = xbmc.translatePath(self.base_path + self.path)
                file_chunk = True
                xbmc.log(
                    '[plugin.video.youtube] HTTPServer: Request |{proxy_path}| -> |{file_path}|'
                    .format(proxy_path=self.path,
                            file_path=file_path), xbmc.LOGDEBUG)
                try:
                    with open(file_path, 'rb') as f:
                        self.send_response(200)
                        self.send_header('Content-Type',
                                         'application/xml+dash')
                        self.send_header('Content-Length',
                                         os.path.getsize(file_path))
                        self.end_headers()
                        while file_chunk:
                            file_chunk = f.read(self.chunk_size)
                            if file_chunk:
                                self.wfile.write(file_chunk)
                except IOError:
                    response = 'File Not Found: |{proxy_path}| -> |{file_path}|'.format(
                        proxy_path=self.path, file_path=file_path)
                    self.send_error(404, response)
            elif api_config_enabled and self.path == '/api':
                html = self.api_config_page()
                html = html.encode('utf-8')
                self.send_response(200)
                self.send_header('Content-Type', 'text/html; charset=utf-8')
                self.send_header('Content-Length', len(html))
                self.end_headers()
                for chunk in self.get_chunks(html):
                    self.wfile.write(chunk)
            elif api_config_enabled and self.path.startswith('/api_submit'):
                addon = xbmcaddon.Addon('plugin.video.youtube')
                i18n = addon.getLocalizedString
                xbmc.executebuiltin('Dialog.Close(addonsettings,true)')
                old_api_key = addon.getSetting('youtube.api.key')
                old_api_id = addon.getSetting('youtube.api.id')
                old_api_secret = addon.getSetting('youtube.api.secret')
                query = urlparse(self.path).query
                params = parse_qs(query)
                api_key = params.get('api_key', [None])[0]
                api_id = params.get('api_id', [None])[0]
                api_secret = params.get('api_secret', [None])[0]
                if api_key and api_id and api_secret:
                    footer = i18n(30638)
                else:
                    footer = u''
                if re.search(r'api_key=(?:&|$)', query):
                    api_key = ''
                if re.search(r'api_id=(?:&|$)', query):
                    api_id = ''
                if re.search(r'api_secret=(?:&|$)', query):
                    api_secret = ''
                updated = []
                if api_key is not None and api_key != old_api_key:
                    addon.setSetting('youtube.api.key', api_key)
                    updated.append(i18n(30201))
                if api_id is not None and api_id != old_api_id:
                    addon.setSetting('youtube.api.id', api_id)
                    updated.append(i18n(30202))
                if api_secret is not None and api_secret != old_api_secret:
                    updated.append(i18n(30203))
                    addon.setSetting('youtube.api.secret', api_secret)
                if addon.getSetting('youtube.api.key') and addon.getSetting('youtube.api.id') and \
                        addon.getSetting('youtube.api.secret'):
                    enabled = i18n(30636)
                    addon.setSetting('youtube.api.enable', 'true')
                else:
                    enabled = i18n(30637)
                    addon.setSetting('youtube.api.enable', 'false')
                if not updated:
                    updated = i18n(30635)
                else:
                    updated = i18n(30631) % u', '.join(updated)
                html = self.api_submit_page(updated, enabled, footer)
                html = html.encode('utf-8')
                self.send_response(200)
                self.send_header('Content-Type', 'text/html; charset=utf-8')
                self.send_header('Content-Length', len(html))
                self.end_headers()
                for chunk in self.get_chunks(html):
                    self.wfile.write(chunk)
            elif self.path == '/ping':
                self.send_error(204)
            else:
                self.send_error(403)
예제 #38
0
    def view_autocomplete(self, request, group, **kwargs):
        query = request.GET.get('autocomplete_query')
        field = request.GET.get('autocomplete_field')
        project = self.get_option('default_project', group.project)

        if field == 'issue_id':
            client = self.get_jira_client(group.project)
            try:
                response = client.search_issues(project, query)
            except ApiError as e:
                return Response(
                    {
                        'error_type': 'validation',
                        'errors': [{
                            '__all__': self.message_from_error(e)
                        }]
                    },
                    status=400)
            else:
                issues = [{
                    'text':
                    '(%s) %s' % (i['key'], i['fields']['summary']),
                    'id':
                    i['key']
                } for i in response.get('issues', [])]
                return Response({field: issues})

        jira_url = request.GET.get('jira_url')
        if jira_url:
            jira_url = unquote_plus(jira_url)
            parsed = list(urlsplit(jira_url))
            jira_query = parse_qs(parsed[3])

            jira_client = self.get_jira_client(group.project)

            is_user_api = '/rest/api/latest/user/' in jira_url

            is_user_picker = '/rest/api/1.0/users/picker' in jira_url

            if is_user_api:  # its the JSON version of the autocompleter
                is_xml = False
                jira_query['username'] = query.encode('utf8')
                jira_query.pop(
                    'issueKey', False
                )  # some reason JIRA complains if this key is in the URL.
                jira_query['project'] = project.encode('utf8')
            elif is_user_picker:
                is_xml = False
                # for whatever reason, the create meta api returns an
                # invalid path, so let's just use the correct, documented one here:
                # https://docs.atlassian.com/jira/REST/cloud/#api/2/user
                # also, only pass path so saved instance url will be used
                parsed[0] = ''
                parsed[1] = ''
                parsed[2] = '/rest/api/2/user/picker'
                jira_query['query'] = query.encode('utf8')
            else:  # its the stupid XML version of the API.
                is_xml = True
                jira_query['query'] = query.encode('utf8')
                if jira_query.get('fieldName'):
                    # for some reason its a list.
                    jira_query['fieldName'] = jira_query['fieldName'][0]

            parsed[3] = urlencode(jira_query)
            final_url = urlunsplit(parsed)

            autocomplete_response = jira_client.get_cached(final_url)

            if is_user_picker:
                autocomplete_response = autocomplete_response['users']

            users = []

            if is_xml:
                for userxml in autocomplete_response.xml.findAll("users"):
                    users.append({
                        'id': userxml.find('name').text,
                        'text': userxml.find('html').text
                    })
            else:
                for user in autocomplete_response:
                    if user.get('name'):
                        users.append(self._get_formatted_user(user))

            # if JIRA user doesn't have proper permission for user api,
            # try the assignee api instead
            if not users and is_user_api:
                try:
                    autocomplete_response = jira_client.search_users_for_project(
                        jira_query.get('project'), jira_query.get('username'))
                except (ApiUnauthorized, ApiError) as e:
                    return Response(
                        {
                            'error_type': 'validation',
                            'errors': [{
                                '__all__': self.message_from_error(e)
                            }]
                        },
                        status=400)

                for user in autocomplete_response:
                    if user.get('name'):
                        users.append(self._get_formatted_user(user))

            return Response({field: users})
예제 #39
0
    def assert_setup_flow(self, is_team=False):
        responses.reset()

        access_json = {
            "user_id": "my_user_id",
            "access_token": "my_access_token",
            "installation_id": "my_config_id",
        }

        if is_team:
            team_query = "?teamId=my_team_id"
            access_json["team_id"] = "my_team_id"
            responses.add(
                responses.GET,
                "https://api.vercel.com/v1/teams/my_team_id%s" % team_query,
                json={"name": "my_team_name"},
            )
        else:
            team_query = ""
            responses.add(
                responses.GET,
                "https://api.vercel.com/www/user",
                json={"user": {
                    "name": "my_user_name"
                }},
            )

        responses.add(responses.POST,
                      "https://api.vercel.com/v2/oauth/access_token",
                      json=access_json)

        responses.add(
            responses.GET,
            "https://api.vercel.com/v4/projects/%s" % team_query,
            json={"projects": []},
        )

        responses.add(
            responses.POST,
            "https://api.vercel.com/v1/integrations/webhooks%s" % team_query,
            json={"id": "webhook-id"},
        )

        resp = self.client.get(u"{}?{}".format(
            self.setup_path,
            urlencode({"code": "oauth-code"}),
        ))

        mock_request = responses.calls[0].request
        req_params = parse_qs(mock_request.body)
        assert req_params["grant_type"] == ["authorization_code"]
        assert req_params["code"] == ["oauth-code"]
        assert req_params["redirect_uri"] == [
            "http://testserver/extensions/vercel/configure/"
        ]
        assert req_params["client_id"] == ["vercel-client-id"]
        assert req_params["client_secret"] == ["vercel-client-secret"]

        assert resp.status_code == 200
        self.assertDialogSuccess(resp)

        integration = Integration.objects.get(provider=self.provider.key)

        external_id = "my_team_id" if is_team else "my_user_id"
        name = "my_team_name" if is_team else "my_user_name"
        installation_type = "team" if is_team else "user"

        assert integration.external_id == external_id
        assert integration.name == name
        assert integration.metadata == {
            "access_token": "my_access_token",
            "installation_id": "my_config_id",
            "installation_type": installation_type,
            "webhook_id": "webhook-id",
        }
        assert OrganizationIntegration.objects.get(
            integration=integration, organization=self.organization)
예제 #40
0
파일: tests.py 프로젝트: hectorip/rapidpro
    def test_event_deliveries(self):
        sms = self.create_msg(contact=self.joe,
                              direction='I',
                              status='H',
                              text="I'm gonna pop some tags")

        with patch('requests.Session.send') as mock:
            now = timezone.now()
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event, shouldnn't fire as we don't have a webhook
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            self.assertFalse(WebHookEvent.objects.all())

        self.setupChannel()

        with patch('requests.Session.send') as mock:
            # clear out which events we listen for, we still shouldnt be notified though we have a webhook
            self.channel.org.webhook_events = 0
            self.channel.org.save()

            now = timezone.now()
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event, shouldnn't fire as we don't have a webhook
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            self.assertFalse(WebHookEvent.objects.all())

        self.setupChannel()

        with patch('requests.Session.send') as mock:
            # remove all the org users
            self.org.administrators.clear()
            self.org.editors.clear()
            self.org.viewers.clear()

            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            self.assertEqual('F', event.status)
            self.assertEqual(0, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertIn("No active user", result.message)
            self.assertEqual(0, result.status_code)

            self.assertFalse(mock.called)

            # what if they send weird json back?
            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        # add ad manager back in
        self.org.administrators.add(self.admin)
        self.admin.set_org(self.org)

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            self.assertEqual('C', event.status)
            self.assertEqual(1, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertIn("Event delivered successfully", result.message)
            self.assertIn("not JSON", result.message)
            self.assertEqual(200, result.status_code)

            self.assertTrue(mock.called)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            mock.side_effect = [MockResponse(500, "I am error")]

            # trigger an event
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.all().first()

            self.assertEqual('E', event.status)
            self.assertEqual(1, event.try_count)
            self.assertTrue(event.next_attempt)

            mock.return_value = MockResponse(200, "Hello World")
            # simulate missing channel
            event.channel = None
            event.save()

            # no exception should raised
            event.deliver()

            self.assertTrue(mock.called)
            self.assertEqual(mock.call_count, 2)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            # valid json, but not our format
            bad_json = '{ "thrift_shops": ["Goodwill", "Value Village"] }'
            mock.return_value = MockResponse(200, bad_json)

            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            self.assertEqual('C', event.status)
            self.assertEqual(1, event.try_count)
            self.assertFalse(event.next_attempt)

            self.assertTrue(mock.called)

            result = WebHookResult.objects.get()
            self.assertIn("Event delivered successfully", result.message)
            self.assertIn("ignoring", result.message)
            self.assertEqual(200, result.status_code)
            self.assertEqual(bad_json, result.body)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(
                200, '{ "phone": "+250788123123", "text": "I am success" }')

            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            self.assertEqual('C', event.status)
            self.assertEqual(1, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertEqual(200, result.status_code)

            self.assertTrue(mock.called)

            broadcast = Broadcast.objects.get()
            contact, urn_obj = Contact.get_or_create(self.org,
                                                     "tel:+250788123123",
                                                     self.channel,
                                                     user=self.admin)
            self.assertTrue(broadcast.text, {'base': "I am success"})
            self.assertTrue(contact, broadcast.contacts.all())

            self.assertTrue(mock.called)
            args = mock.call_args_list[0][0]
            prepared_request = args[0]
            self.assertEqual(self.org.get_webhook_url(), prepared_request.url)

            data = parse_qs(prepared_request.body)
            self.assertEqual(
                self.joe.get_urn(TEL_SCHEME).path, data['phone'][0])
            self.assertEqual(six.text_type(self.joe.get_urn(TEL_SCHEME)),
                             data['urn'][0])
            self.assertEqual(self.joe.uuid, data['contact'][0])
            self.assertEqual(self.joe.name, data['contact_name'][0])
            self.assertEqual(sms.pk, int(data['sms'][0]))
            self.assertEqual(self.channel.pk, int(data['channel'][0]))
            self.assertEqual(WebHookEvent.TYPE_SMS_RECEIVED, data['event'][0])
            self.assertEqual("I'm gonna pop some tags", data['text'][0])
            self.assertIn('time', data)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(500, "I am error")

            next_attempt_earliest = timezone.now() + timedelta(minutes=4)
            next_attempt_latest = timezone.now() + timedelta(minutes=6)

            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            self.assertEqual('E', event.status)
            self.assertEqual(1, event.try_count)
            self.assertTrue(event.next_attempt)
            self.assertTrue(next_attempt_earliest < event.next_attempt
                            and next_attempt_latest > event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertIn("Error", result.message)
            self.assertEqual(500, result.status_code)
            self.assertEqual("I am error", result.body)

            # make sure things become failures after three retries
            event.try_count = 2
            event.deliver()
            event.save()

            self.assertTrue(mock.called)

            self.assertEqual('F', event.status)
            self.assertEqual(3, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertIn("Error", result.message)
            self.assertEqual(500, result.status_code)
            self.assertEqual("I am error", result.body)
            self.assertEqual("http://fake.com/webhook.php", result.url)
            self.assertTrue(result.data.find("pop+some+tags") > 0)

            # check out our api log
            response = self.client.get(reverse('api.log'))
            self.assertRedirect(response, reverse('users.user_login'))

            response = self.client.get(reverse('api.log_read',
                                               args=[event.pk]))
            self.assertRedirect(response, reverse('users.user_login'))

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        # add a webhook header to the org
        self.channel.org.webhook = {
            "url": "http://fake.com/webhook.php",
            "headers": {
                "X-My-Header":
                "foobar",
                "Authorization":
                "Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="
            },
            "method": "POST"
        }
        self.channel.org.save()

        # check that our webhook settings have saved
        self.assertEqual('http://fake.com/webhook.php',
                         self.channel.org.get_webhook_url())
        self.assertDictEqual(
            {
                'X-My-Header':
                'foobar',
                'Authorization':
                'Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
            }, self.channel.org.get_webhook_headers())

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(200, "Boom")
            WebHookEvent.trigger_sms_event(WebHookEvent.TYPE_SMS_RECEIVED, sms,
                                           now)
            event = WebHookEvent.objects.get()

            result = WebHookResult.objects.get()
            # both headers should be in the json-encoded url string
            self.assertIn('X-My-Header: foobar', result.request)
            self.assertIn(
                'Authorization: Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==',
                result.request)
예제 #41
0
    def _pick_idp(self, came_from):
        """
        If more than one idp and if none is selected, I have to do wayf or
        disco
        """

        _cli = self.sp

        logger.debug("[_pick_idp] %s", self.environ)
        if "HTTP_PAOS" in self.environ:
            if self.environ["HTTP_PAOS"] == PAOS_HEADER_INFO:
                if 'application/vnd.paos+xml' in self.environ["HTTP_ACCEPT"]:
                    # Where should I redirect the user to
                    # entityid -> the IdP to use
                    # relay_state -> when back from authentication

                    logger.debug("- ECP client detected -")

                    _rstate = rndstr()
                    self.cache.relay_state[_rstate] = geturl(self.environ)
                    _entityid = _cli.config.ecp_endpoint(
                        self.environ["REMOTE_ADDR"])

                    if not _entityid:
                        return -1, ServiceError("No IdP to talk to")
                    logger.debug("IdP to talk to: %s", _entityid)
                    return ecp.ecp_auth_request(_cli, _entityid, _rstate)
                else:
                    return -1, ServiceError('Faulty Accept header')
            else:
                return -1, ServiceError('unknown ECP version')

        # Find all IdPs
        idps = self.sp.metadata.with_descriptor("idpsso")

        idp_entity_id = None

        kaka = self.environ.get("HTTP_COOKIE", '')
        if kaka:
            try:
                (idp_entity_id, _) = parse_cookie("ve_disco", "SEED_SAW", kaka)
            except ValueError:
                pass
            except TypeError:
                pass
            except AttributeError:
                pass

        # Any specific IdP specified in a query part
        query = self.environ.get("QUERY_STRING")
        if not idp_entity_id and query:
            try:
                _idp_entity_id = dict(parse_qs(query))[self.idp_query_param][0]
                if _idp_entity_id in idps:
                    idp_entity_id = _idp_entity_id
            except KeyError:
                logger.debug("No IdP entity ID in query: %s", query)
                pass

        if not idp_entity_id:
            if self.wayf:
                if query:
                    try:
                        wayf_selected = dict(
                            parse_qs(query))["wayf_selected"][0]
                    except KeyError:
                        return self._wayf_redirect(came_from)
                    idp_entity_id = wayf_selected
                else:
                    return self._wayf_redirect(came_from)
            elif self.discosrv:
                if query:
                    idp_entity_id = _cli.parse_discovery_service_response(
                        query=self.environ.get("QUERY_STRING"))
                if not idp_entity_id:
                    sid_ = sid()
                    self.cache.outstanding_queries[sid_] = came_from
                    logger.debug("Redirect to Discovery Service function")
                    eid = _cli.config.entityid
                    ret = _cli.config.getattr("endpoints",
                                              "sp")["discovery_response"][0][0]
                    ret += "?sid=%s" % sid_
                    loc = _cli.create_discovery_service_request(
                        self.discosrv, eid, **{"return": ret})
                    return -1, SeeOther(loc)
            elif len(idps) == 1:
                # idps is a dictionary
                idp_entity_id = idps.keys()[0]
            elif not len(idps):
                return -1, ServiceError('Misconfiguration')
            else:
                return -1, NotImplemented("No WAYF or DS present!")

        logger.info("Chosen IdP: '%s'", idp_entity_id)
        return 0, idp_entity_id
예제 #42
0
 def unpack_redirect(self):
     if "QUERY_STRING" in self.environ:
         _qs = self.environ["QUERY_STRING"]
         return dict([(k, v[0]) for k, v in parse_qs(_qs).items()])
     else:
         return None
예제 #43
0
    def update(self, req, id, body):
        context = req.environ['patron.context']
        authorize_update(context)
        project_id = id

        bad_keys = []

        # By default, we can force update the quota if the extended
        # is not loaded
        force_update = True
        extended_loaded = False
        if self.ext_mgr.is_loaded('os-extended-quotas'):
            # force optional has been enabled, the default value of
            # force_update need to be changed to False
            extended_loaded = True
            force_update = False

        user_id = None
        if self.ext_mgr.is_loaded('os-user-quotas'):
            # Update user quotas only if the extended is loaded
            params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
            user_id = params.get('user_id', [None])[0]

        try:
            settable_quotas = QUOTAS.get_settable_quotas(context,
                                                         project_id,
                                                         user_id=user_id)
        except exception.Forbidden:
            raise webob.exc.HTTPForbidden()

        if not self.is_valid_body(body, 'quota_set'):
            msg = _("quota_set not specified")
            raise webob.exc.HTTPBadRequest(explanation=msg)
        quota_set = body['quota_set']

        # NOTE(dims): Pass #1 - In this loop for quota_set.items(), we figure
        # out if we have bad keys or if we need to forcibly set quotas or
        # if some of the values for the quotas can be converted to integers.
        for key, value in quota_set.items():
            if (key not in self.supported_quotas
                    and key not in NON_QUOTA_KEYS):
                bad_keys.append(key)
                continue
            if key == 'force' and extended_loaded:
                # only check the force optional when the extended has
                # been loaded
                force_update = strutils.bool_from_string(value)
            elif key not in NON_QUOTA_KEYS and value:
                try:
                    value = utils.validate_integer(value, key)
                except exception.InvalidInput as e:
                    raise webob.exc.HTTPBadRequest(
                        explanation=e.format_message())

        if bad_keys:
            msg = _("Bad key(s) %s in quota_set") % ",".join(bad_keys)
            raise webob.exc.HTTPBadRequest(explanation=msg)

        # NOTE(dims): Pass #2 - In this loop for quota_set.items(), based on
        # force_update flag we validate the quota limit. A loop just for
        # the validation of min/max values ensure that we can bail out if
        # any of the items in the set is bad.
        valid_quotas = {}
        for key, value in quota_set.items():
            if key in NON_QUOTA_KEYS or (not value and value != 0):
                continue
            # validate whether already used and reserved exceeds the new
            # quota, this check will be ignored if admin want to force
            # update
            value = int(value)
            if not force_update:
                minimum = settable_quotas[key]['minimum']
                maximum = settable_quotas[key]['maximum']
                self._validate_quota_limit(key, value, minimum, maximum)
            valid_quotas[key] = value

        # NOTE(dims): Pass #3 - At this point we know that all the keys and
        # values are valid and we can iterate and update them all in one
        # shot without having to worry about rolling back etc as we have done
        # the validation up front in the 2 loops above.
        for key, value in valid_quotas.items():
            try:
                objects.Quotas.create_limit(context,
                                            project_id,
                                            key,
                                            value,
                                            user_id=user_id)
            except exception.QuotaExists:
                objects.Quotas.update_limit(context,
                                            project_id,
                                            key,
                                            value,
                                            user_id=user_id)
            except exception.AdminRequired:
                raise webob.exc.HTTPForbidden()
        values = self._get_quotas(context, id, user_id=user_id)
        return self._format_quota_set(None, values)
예제 #44
0
        def wrapper(*args, **kwargs):
            timeout = kwargs.pop('timeout', None)

            uri, params, method, body, headers, singleobject = func(
                *args, **kwargs)
            cert = getattr(self.credentials, 'client_cert', None)

            if headers is None:
                headers = {}

            # Use the JSON API by default, but remember we might request a PDF (application/pdf)
            # so don't force the Accept header.
            if 'Accept' not in headers:
                headers['Accept'] = 'application/json'

            # Set a user-agent so Xero knows the traffic is coming from pyxero
            # or individual user/partner
            headers['User-Agent'] = self.user_agent

            response = getattr(requests, method)(uri,
                                                 data=body,
                                                 headers=headers,
                                                 auth=self.credentials.oauth,
                                                 params=params,
                                                 cert=cert,
                                                 timeout=timeout)

            if response.status_code == 200:
                # If we haven't got XML or JSON, assume we're being returned a binary file
                if not response.headers['content-type'].startswith(
                        'application/json'):
                    return response.content

                return self._parse_api_response(response, self.name)

            elif response.status_code == 400:
                raise XeroBadRequest(response)

            elif response.status_code == 401:
                raise XeroUnauthorized(response)

            elif response.status_code == 403:
                raise XeroForbidden(response)

            elif response.status_code == 404:
                raise XeroNotFound(response)

            elif response.status_code == 500:
                raise XeroInternalError(response)

            elif response.status_code == 501:
                raise XeroNotImplemented(response)

            elif response.status_code == 503:
                # Two 503 responses are possible. Rate limit errors
                # return encoded content; offline errors don't.
                # If you parse the response text and there's nothing
                # encoded, it must be a not-available error.
                payload = parse_qs(response.text)
                if payload:
                    raise XeroRateLimitExceeded(response, payload)
                else:
                    raise XeroNotAvailable(response)
            else:
                raise XeroExceptionUnknown(response)
예제 #45
0
    def _key_response_put(self, request, body, bucket_name, query, key_name,
                          headers):
        if query.get('uploadId') and query.get('partNumber'):
            upload_id = query['uploadId'][0]
            part_number = int(query['partNumber'][0])
            if 'x-amz-copy-source' in request.headers:
                src = request.headers.get("x-amz-copy-source")
                src_bucket, src_key = src.split("/", 1)
                src_range = request.headers.get('x-amz-copy-source-range',
                                                '').split("bytes=")[-1]

                try:
                    start_byte, end_byte = src_range.split("-")
                    start_byte, end_byte = int(start_byte), int(end_byte)
                except ValueError:
                    start_byte, end_byte = None, None

                key = self.backend.copy_part(bucket_name, upload_id,
                                             part_number, src_bucket, src_key,
                                             start_byte, end_byte)
                template = self.response_template(S3_MULTIPART_UPLOAD_RESPONSE)
                response = template.render(part=key)
            else:
                key = self.backend.set_part(bucket_name, upload_id,
                                            part_number, body)
                response = ""
            headers.update(key.response_dict)
            return 200, headers, response

        storage_class = request.headers.get('x-amz-storage-class', 'STANDARD')
        acl = self._acl_from_headers(request.headers)

        if 'acl' in query:
            key = self.backend.get_key(bucket_name, key_name)
            # TODO: Support the XML-based ACL format
            key.set_acl(acl)
            return 200, headers, ""

        if 'x-amz-copy-source' in request.headers:
            # Copy key
            src_key_parsed = urlparse(request.headers.get("x-amz-copy-source"))
            src_bucket, src_key = src_key_parsed.path.split("/", 1)
            src_version_id = parse_qs(src_key_parsed.query).get(
                'versionId', [None])[0]
            self.backend.copy_key(src_bucket,
                                  src_key,
                                  bucket_name,
                                  key_name,
                                  storage=storage_class,
                                  acl=acl,
                                  src_version_id=src_version_id)
            new_key = self.backend.get_key(bucket_name, key_name)
            mdirective = request.headers.get('x-amz-metadata-directive')
            if mdirective is not None and mdirective == 'REPLACE':
                metadata = metadata_from_headers(request.headers)
                new_key.set_metadata(metadata, replace=True)
            template = self.response_template(S3_OBJECT_COPY_RESPONSE)
            headers.update(new_key.response_dict)
            return 200, headers, template.render(key=new_key)
        streaming_request = hasattr(request, 'streaming') and request.streaming
        closing_connection = headers.get('connection') == 'close'
        if closing_connection and streaming_request:
            # Closing the connection of a streaming request. No more data
            new_key = self.backend.get_key(bucket_name, key_name)
        elif streaming_request:
            # Streaming request, more data
            new_key = self.backend.append_to_key(bucket_name, key_name, body)
        else:
            # Initial data
            new_key = self.backend.set_key(bucket_name,
                                           key_name,
                                           body,
                                           storage=storage_class)
            request.streaming = True
            metadata = metadata_from_headers(request.headers)
            new_key.set_metadata(metadata)
            new_key.set_acl(acl)

        template = self.response_template(S3_OBJECT_RESPONSE)
        headers.update(new_key.response_dict)
        return 200, headers, template.render(key=new_key)
예제 #46
0
    def query_param(self, key):
        """
        Return value of parameter in query string.
        """

        return parse_qs(self.url_details().query)[key][0]
예제 #47
0
 def _query_string(self):
     return parse_qs(self._protocol_info.query)
예제 #48
0
def update_cloudformation(method,
                          path,
                          data,
                          headers,
                          response=None,
                          return_forward_info=False):
    req_data = None
    if method == 'POST' and path == '/':
        req_data = urlparse.parse_qs(data)
        action = req_data.get('Action')[0]

    if return_forward_info:
        if req_data:
            if action == 'CreateChangeSet':
                return create_change_set(req_data)
            elif action == 'DescribeChangeSet':
                return describe_change_set(req_data)
            elif action == 'ExecuteChangeSet':
                return execute_change_set(req_data)
            elif action == 'UpdateStack' and req_data.get('TemplateURL'):
                # Temporary fix until the moto CF backend can handle TemplateURL (currently fails)
                url = re.sub(r'https?://s3\.amazonaws\.com',
                             aws_stack.get_local_service_url('s3'),
                             req_data.get('TemplateURL')[0])
                req_data['TemplateBody'] = requests.get(url).content
                modified_data = urlparse.urlencode(req_data, doseq=True)
                return Request(data=modified_data,
                               headers=headers,
                               method=method)
        return True

    if req_data:
        if action == 'DescribeStackResources':
            if response.status_code < 300:
                response_dict = xmltodict.parse(
                    response.content)['DescribeStackResourcesResponse']
                resources = response_dict['DescribeStackResourcesResult'][
                    'StackResources']
                if not resources:
                    # Check if stack exists
                    stack_name = req_data.get('StackName')[0]
                    cloudformation_client = aws_stack.connect_to_service(
                        'cloudformation')
                    try:
                        cloudformation_client.describe_stacks(
                            StackName=stack_name)
                    except Exception as e:
                        return error_response(
                            'Stack with id %s does not exist' % stack_name,
                            code=404)
        if action == 'DescribeStackResource':
            if response.status_code >= 500:
                # fix an error in moto where it fails with 500 if the stack does not exist
                return error_response('Stack resource does not exist',
                                      code=404)
        elif action == 'CreateStack' or action == 'UpdateStack':
            # run the actual deployment
            template = template_deployer.template_to_json(
                req_data.get('TemplateBody')[0])
            template_deployer.deploy_template(template,
                                              req_data.get('StackName')[0])
            if response.status_code >= 400:
                return make_response(action)
예제 #49
0
 def _matches(self, request):
     args = urlparse.parse_qs(self._get_value(request))
     if self.exact_match:
         return args == self.expected
     return all(item in args.items() for item in self.expected.items())
예제 #50
0
 def uri_with_query_matcher(r1, r2):
     "URI matcher that allows query params to appear in any order"
     p1, p2 = urlparse(r1.uri), urlparse(r2.uri)
     return (p1[:3] == p2[:3]
             and parse_qs(p1.query, True) == parse_qs(p2.query, True))
예제 #51
0
 def responder(request):
     query = parse_qs(urlparse(request.url).query)
     assert "HSP" == query["project"][0]
     assert "bob" == query["query"][0]
     return (200, {}, SAMPLE_USER_SEARCH_RESPONSE)
예제 #52
0
 def test_params(self):
     client = Client(['127.0.0.1:4200'], error_trace=True)
     from six.moves.urllib.parse import urlparse, parse_qs
     parsed = urlparse(client.path)
     params = parse_qs(parsed.query)
     self.assertEquals(params["error_trace"], ["1"])
예제 #53
0
 def __init__(self, response):
     payload = parse_qs(response.text)
     self.problem = payload['oauth_problem'][0]
     super(XeroUnauthorized, self).__init__(response, payload['oauth_problem_advice'][0])
예제 #54
0
    def update(self, req, id, body):
        context = req.environ['nova.context']
        authorize_update(context)
        project_id = id

        bad_keys = []

        # By default, we can force update the quota if the extended
        # is not loaded
        force_update = True
        extended_loaded = False
        if self.ext_mgr.is_loaded('os-extended-quotas'):
            # force optional has been enabled, the default value of
            # force_update need to be changed to False
            extended_loaded = True
            force_update = False

        user_id = None
        if self.ext_mgr.is_loaded('os-user-quotas'):
            # Update user quotas only if the extended is loaded
            params = urlparse.parse_qs(req.environ.get('QUERY_STRING', ''))
            user_id = params.get('user_id', [None])[0]

        try:
            settable_quotas = QUOTAS.get_settable_quotas(context,
                                                         project_id,
                                                         user_id=user_id)
        except exception.Forbidden:
            raise webob.exc.HTTPForbidden()

        if not self.is_valid_body(body, 'quota_set'):
            msg = _("quota_set not specified")
            raise webob.exc.HTTPBadRequest(explanation=msg)
        quota_set = body['quota_set']

        for key, value in quota_set.items():
            if (key not in self.supported_quotas
                    and key not in NON_QUOTA_KEYS):
                bad_keys.append(key)
                continue
            if key == 'force' and extended_loaded:
                # only check the force optional when the extended has
                # been loaded
                force_update = strutils.bool_from_string(value)
            elif key not in NON_QUOTA_KEYS and value:
                try:
                    value = utils.validate_integer(value, key)
                except exception.InvalidInput as e:
                    raise webob.exc.HTTPBadRequest(
                        explanation=e.format_message())

        LOG.debug("force update quotas: %s", force_update)

        if bad_keys:
            msg = _("Bad key(s) %s in quota_set") % ",".join(bad_keys)
            raise webob.exc.HTTPBadRequest(explanation=msg)

        for key, value in quota_set.items():
            if key in NON_QUOTA_KEYS or (not value and value != 0):
                continue
            # validate whether already used and reserved exceeds the new
            # quota, this check will be ignored if admin want to force
            # update
            value = int(value)
            if not force_update:
                minimum = settable_quotas[key]['minimum']
                maximum = settable_quotas[key]['maximum']
                self._validate_quota_limit(key, value, minimum, maximum)

            try:
                objects.Quotas.create_limit(context,
                                            project_id,
                                            key,
                                            value,
                                            user_id=user_id)
            except exception.QuotaExists:
                objects.Quotas.update_limit(context,
                                            project_id,
                                            key,
                                            value,
                                            user_id=user_id)
            except exception.AdminRequired:
                raise webob.exc.HTTPForbidden()
        values = self._get_quotas(context, id, user_id=user_id)
        return self._format_quota_set(None, values)
예제 #55
0
 def parse_components(url):
     parsed = parse.urlsplit(url)
     query = parse.parse_qs(parsed.query)
     return parsed._replace(query=''), query
예제 #56
0
    def __call__(self, req):
        """
        The method is invoked on every request and shows the lifecycle of the request received from
        the middleware.

        Although some middleware may use parts of the API spec, it is safe to assume that if you're
        looking for the particular spec property handler, it's most likely a part of this method.

        At the time of writing, the only property being utilized by middleware was `x-log-result`.
        """
        LOG.debug("Received call with WebOb: %s", req)
        endpoint, path_vars = self.match(req)
        LOG.debug("Parsed endpoint: %s", endpoint)
        LOG.debug("Parsed path_vars: %s", path_vars)

        context = copy.copy(getattr(self, 'mock_context', {}))
        cookie_token = None

        # Handle security
        if 'security' in endpoint:
            security = endpoint.get('security')
        else:
            security = self.spec.get('security', [])

        if self.auth and security:
            try:
                security_definitions = self.spec.get('securityDefinitions', {})
                for statement in security:
                    declaration, options = statement.copy().popitem()
                    definition = security_definitions[declaration]

                    if definition['type'] == 'apiKey':
                        if definition['in'] == 'header':
                            token = req.headers.get(definition['name'])
                        elif definition['in'] == 'query':
                            token = req.GET.get(definition['name'])
                        elif definition['in'] == 'cookie':
                            token = req.cookies.get(definition['name'])
                        else:
                            token = None

                        if token:
                            _, auth_func = op_resolver(definition['x-operationId'])
                            auth_resp = auth_func(token)

                            # Include information on how user authenticated inside the context
                            if 'auth-token' in definition['name'].lower():
                                auth_method = 'authentication token'
                            elif 'api-key' in definition['name'].lower():
                                auth_method = 'API key'

                            context['user'] = User.get_by_name(auth_resp.user)
                            context['auth_info'] = {
                                'method': auth_method,
                                'location': definition['in']
                            }

                            # Also include token expiration time when authenticated via auth token
                            if 'auth-token' in definition['name'].lower():
                                context['auth_info']['token_expire'] = auth_resp.expiry

                            if 'x-set-cookie' in definition:
                                max_age = auth_resp.expiry - date_utils.get_datetime_utc_now()
                                cookie_token = cookies.make_cookie(definition['x-set-cookie'],
                                                                   token,
                                                                   max_age=max_age,
                                                                   httponly=True)

                            break

                if 'user' not in context:
                    raise auth_exc.NoAuthSourceProvidedError('One of Token or API key required.')
            except (auth_exc.NoAuthSourceProvidedError,
                    auth_exc.MultipleAuthSourcesError) as e:
                LOG.error(six.text_type(e))
                return abort_unauthorized(six.text_type(e))
            except auth_exc.TokenNotProvidedError as e:
                LOG.exception('Token is not provided.')
                return abort_unauthorized(six.text_type(e))
            except auth_exc.TokenNotFoundError as e:
                LOG.exception('Token is not found.')
                return abort_unauthorized(six.text_type(e))
            except auth_exc.TokenExpiredError as e:
                LOG.exception('Token has expired.')
                return abort_unauthorized(six.text_type(e))
            except auth_exc.ApiKeyNotProvidedError as e:
                LOG.exception('API key is not provided.')
                return abort_unauthorized(six.text_type(e))
            except auth_exc.ApiKeyNotFoundError as e:
                LOG.exception('API key is not found.')
                return abort_unauthorized(six.text_type(e))
            except auth_exc.ApiKeyDisabledError as e:
                LOG.exception('API key is disabled.')
                return abort_unauthorized(six.text_type(e))

            if cfg.CONF.rbac.enable:
                user_db = context['user']

                permission_type = endpoint.get('x-permissions', None)
                if permission_type:
                    resolver = resolvers.get_resolver_for_permission_type(permission_type)
                    has_permission = resolver.user_has_permission(user_db, permission_type)

                    if not has_permission:
                        raise rbac_exc.ResourceTypeAccessDeniedError(user_db,
                                                                     permission_type)

        # Collect parameters
        kw = {}
        for param in endpoint.get('parameters', []) + endpoint.get('x-parameters', []):
            name = param['name']
            argument_name = param.get('x-as', None) or name
            source = param['in']
            default = param.get('default', None)

            # Collecting params from different sources
            if source == 'query':
                kw[argument_name] = req.GET.get(name, default)
            elif source == 'path':
                kw[argument_name] = path_vars[name]
            elif source == 'header':
                kw[argument_name] = req.headers.get(name, default)
            elif source == 'formData':
                kw[argument_name] = req.POST.get(name, default)
            elif source == 'environ':
                kw[argument_name] = req.environ.get(name.upper(), default)
            elif source == 'context':
                kw[argument_name] = context.get(name, default)
            elif source == 'request':
                kw[argument_name] = getattr(req, name)
            elif source == 'body':
                content_type = req.headers.get('Content-Type', 'application/json')
                content_type = parse_content_type_header(content_type=content_type)[0]
                schema = param['schema']

                # NOTE: HACK: Workaround for eventlet wsgi server which sets Content-Type to
                # text/plain if Content-Type is not provided in the request.
                # All ouf our API endpoints except /v1/workflows/inspection and
                # /exp/validation/mistral expect application/json so we explicitly set it to that
                # if not provided (set to text/plain by the base http server) and if it's not
                # /v1/workflows/inspection and /exp/validation/mistral API endpoints.
                if not self.is_gunicorn and content_type == 'text/plain':
                    operation_id = endpoint['operationId']

                    if ('workflow_inspection_controller' not in operation_id and
                            'mistral_validation_controller' not in operation_id):
                        content_type = 'application/json'

                # Note: We also want to perform validation if no body is explicitly provided - in a
                # lot of POST, PUT scenarios, body is mandatory
                if not req.body and content_type == 'application/json':
                    req.body = b'{}'

                try:
                    if content_type == 'application/json':
                        data = req.json
                    elif content_type == 'text/plain':
                        data = req.body
                    elif content_type in ['application/x-www-form-urlencoded',
                                          'multipart/form-data']:
                        data = urlparse.parse_qs(req.body)
                    else:
                        raise ValueError('Unsupported Content-Type: "%s"' % (content_type))
                except Exception as e:
                    detail = 'Failed to parse request body: %s' % six.text_type(e)
                    raise exc.HTTPBadRequest(detail=detail)

                # Special case for Python 3
                if six.PY3 and content_type == 'text/plain' and isinstance(data, six.binary_type):
                    # Convert bytes to text type (string / unicode)
                    data = data.decode('utf-8')

                try:
                    CustomValidator(schema, resolver=self.spec_resolver).validate(data)
                except (jsonschema.ValidationError, ValueError) as e:
                    raise exc.HTTPBadRequest(detail=getattr(e, 'message', six.text_type(e)),
                                             comment=traceback.format_exc())

                if content_type == 'text/plain':
                    kw[argument_name] = data
                else:
                    class Body(object):
                        def __init__(self, **entries):
                            self.__dict__.update(entries)

                    ref = schema.get('$ref', None)
                    if ref:
                        with self.spec_resolver.resolving(ref) as resolved:
                            schema = resolved

                    if 'x-api-model' in schema:
                        input_type = schema.get('type', [])
                        _, Model = op_resolver(schema['x-api-model'])

                        if input_type and not isinstance(input_type, (list, tuple)):
                            input_type = [input_type]

                        # root attribute is not an object, we need to use wrapper attribute to
                        # make it work with **kwarg expansion
                        if input_type and 'array' in input_type:
                            data = {'data': data}

                        instance = self._get_model_instance(model_cls=Model, data=data)

                        # Call validate on the API model - note we should eventually move all
                        # those model schema definitions into openapi.yaml
                        try:
                            instance = instance.validate()
                        except (jsonschema.ValidationError, ValueError) as e:
                            raise exc.HTTPBadRequest(detail=getattr(e, 'message', six.text_type(e)),
                                                     comment=traceback.format_exc())
                    else:
                        LOG.debug('Missing x-api-model definition for %s, using generic Body '
                                  'model.' % (endpoint['operationId']))
                        model = Body
                        instance = self._get_model_instance(model_cls=model, data=data)

                    kw[argument_name] = instance

            # Making sure all required params are present
            required = param.get('required', False)
            if required and kw[argument_name] is None:
                detail = 'Required parameter "%s" is missing' % name
                raise exc.HTTPBadRequest(detail=detail)

            # Validating and casting param types
            param_type = param.get('type', None)
            if kw[argument_name] is not None:
                if param_type == 'boolean':
                    positive = ('true', '1', 'yes', 'y')
                    negative = ('false', '0', 'no', 'n')

                    if str(kw[argument_name]).lower() not in positive + negative:
                        detail = 'Parameter "%s" is not of type boolean' % argument_name
                        raise exc.HTTPBadRequest(detail=detail)

                    kw[argument_name] = str(kw[argument_name]).lower() in positive
                elif param_type == 'integer':
                    regex = r'^-?[0-9]+$'

                    if not re.search(regex, str(kw[argument_name])):
                        detail = 'Parameter "%s" is not of type integer' % argument_name
                        raise exc.HTTPBadRequest(detail=detail)

                    kw[argument_name] = int(kw[argument_name])
                elif param_type == 'number':
                    regex = r'^[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?$'

                    if not re.search(regex, str(kw[argument_name])):
                        detail = 'Parameter "%s" is not of type float' % argument_name
                        raise exc.HTTPBadRequest(detail=detail)

                    kw[argument_name] = float(kw[argument_name])
                elif param_type == 'array' and param.get('items', {}).get('type', None) == 'string':
                    if kw[argument_name] is None:
                        kw[argument_name] = []
                    elif isinstance(kw[argument_name], (list, tuple)):
                        # argument is already an array
                        pass
                    else:
                        kw[argument_name] = kw[argument_name].split(',')

        # Call the controller
        try:
            controller_instance, func = op_resolver(endpoint['operationId'])
        except Exception as e:
            LOG.exception('Failed to load controller for operation "%s": %s' %
                          (endpoint['operationId'], six.text_type(e)))
            raise e

        try:
            resp = func(**kw)
        except Exception as e:
            LOG.exception('Failed to call controller function "%s" for operation "%s": %s' %
                          (func.__name__, endpoint['operationId'], six.text_type(e)))
            raise e

        # Handle response
        if resp is None:
            resp = Response()

        if not hasattr(resp, '__call__'):
            resp = Response(json=resp)

        operation_id = endpoint['operationId']

        # Process the response removing attributes based on the exclude_attribute and
        # include_attributes query param filter values (if specified)
        include_attributes = kw.get('include_attributes', None)
        exclude_attributes = kw.get('exclude_attributes', None)
        has_include_or_exclude_attributes = bool(include_attributes) or bool(exclude_attributes)

        # NOTE: We do NOT want to process stream controller response
        is_streamming_controller = endpoint.get('x-is-streaming-endpoint',
                                                bool('st2stream' in operation_id))

        if not is_streamming_controller and resp.body and has_include_or_exclude_attributes:
            # NOTE: We need to check for response.body attribute since resp.json throws if JSON
            # response is not available
            mandatory_include_fields = getattr(controller_instance,
                                               'mandatory_include_fields_response', [])
            data = self._process_response(data=resp.json,
                                          mandatory_include_fields=mandatory_include_fields,
                                          include_attributes=include_attributes,
                                          exclude_attributes=exclude_attributes)
            resp.json = data

        responses = endpoint.get('responses', {})
        response_spec = responses.get(str(resp.status_code), None)
        default_response_spec = responses.get('default', None)

        if not response_spec and default_response_spec:
            LOG.debug('No custom response spec found for endpoint "%s", using a default one' %
                      (endpoint['operationId']))
            response_spec_name = 'default'
        else:
            response_spec_name = str(resp.status_code)

        response_spec = response_spec or default_response_spec

        if response_spec and 'schema' in response_spec and not has_include_or_exclude_attributes:
            # NOTE: We don't perform response validation when include or exclude attributes are
            # provided because this means partial response which likely won't pass the validation
            LOG.debug('Using response spec "%s" for endpoint %s and status code %s' %
                     (response_spec_name, endpoint['operationId'], resp.status_code))

            try:
                validator = CustomValidator(response_spec['schema'], resolver=self.spec_resolver)

                response_type = response_spec['schema'].get('type', 'json')
                if response_type == 'string':
                    validator.validate(resp.text)
                else:
                    validator.validate(resp.json)
            except (jsonschema.ValidationError, ValueError):
                LOG.exception('Response validation failed.')
                resp.headers.add('Warning', '199 OpenAPI "Response validation failed"')
        else:
            LOG.debug('No response spec found for endpoint "%s"' % (endpoint['operationId']))

        if cookie_token:
            resp.headerlist.append(('Set-Cookie', cookie_token))

        return resp
예제 #57
0
 def responder(request):
     query = parse_qs(urlparse(request.url).query)
     assert 'id="hsp-1"' == query["jql"][0]
     return (200, {}, SAMPLE_SEARCH_RESPONSE)
예제 #58
0
def _get_connect_params(query):
    params = urlparse.parse_qs(query)
    if any(len(v) > 2 for v in params.values()):
        raise ValueError('DB URI params list has duplicate keys: ' + query)
    return {k: v[0] for k, v in params.items()}
예제 #59
0
    def forward_request(self, method, path, data, headers):
        if method == 'OPTIONS':
            return 200

        # check region
        try:
            aws_stack.check_valid_region(headers)
            aws_stack.set_default_region_in_headers(headers)
        except Exception as e:
            return make_error(message=str(e), code=400)

        if method == 'POST' and path == '/':
            # parse payload and extract fields
            req_data = urlparse.parse_qs(to_str(data), keep_blank_values=True)
            req_action = req_data['Action'][0]
            topic_arn = req_data.get('TargetArn') or req_data.get(
                'TopicArn') or req_data.get('ResourceArn')

            if topic_arn:
                topic_arn = topic_arn[0]
                topic_arn = aws_stack.fix_account_id_in_arns(topic_arn)

            if req_action == 'SetSubscriptionAttributes':
                sub = get_subscription_by_arn(req_data['SubscriptionArn'][0])
                if not sub:
                    return make_error(
                        message='Unable to find subscription for given ARN',
                        code=400)

                attr_name = req_data['AttributeName'][0]
                attr_value = req_data['AttributeValue'][0]
                sub[attr_name] = attr_value
                return make_response(req_action)

            elif req_action == 'GetSubscriptionAttributes':
                sub = get_subscription_by_arn(req_data['SubscriptionArn'][0])
                if not sub:
                    return make_error(
                        message='Unable to find subscription for given ARN',
                        code=400)

                content = '<Attributes>'
                for key, value in sub.items():
                    content += '<entry><key>%s</key><value>%s</value></entry>\n' % (
                        key, value)
                content += '</Attributes>'
                return make_response(req_action, content=content)

            elif req_action == 'Subscribe':
                if 'Endpoint' not in req_data:
                    return make_error(
                        message='Endpoint not specified in subscription',
                        code=400)

            elif req_action == 'ConfirmSubscription':
                if 'TopicArn' not in req_data:
                    return make_error(
                        message=
                        'TopicArn not specified in confirm subscription request',
                        code=400)

                if 'Token' not in req_data:
                    return make_error(
                        message=
                        'Token not specified in confirm subscription request',
                        code=400)

                do_confirm_subscription(
                    req_data.get('TopicArn')[0],
                    req_data.get('Token')[0])

            elif req_action == 'Unsubscribe':
                if 'SubscriptionArn' not in req_data:
                    return make_error(
                        message=
                        'SubscriptionArn not specified in unsubscribe request',
                        code=400)

                do_unsubscribe(req_data.get('SubscriptionArn')[0])

            elif req_action == 'DeleteTopic':
                do_delete_topic(topic_arn)

            elif req_action == 'Publish':
                if req_data.get('Subject') == ['']:
                    return make_error(code=400,
                                      code_string='InvalidParameter',
                                      message='Subject')

                # No need to create a topic to send SMS or single push notifications with SNS
                # but we can't mock a sending so we only return that it went well
                if 'PhoneNumber' not in req_data and 'TargetArn' not in req_data:
                    if topic_arn not in SNS_SUBSCRIPTIONS.keys():
                        return make_error(code=404,
                                          code_string='NotFound',
                                          message='Topic does not exist')

                publish_message(topic_arn, req_data)

                # return response here because we do not want the request to be forwarded to SNS backend
                return make_response(req_action)

            elif req_action == 'ListTagsForResource':
                tags = do_list_tags_for_resource(topic_arn)
                content = '<Tags/>'
                if len(tags) > 0:
                    content = '<Tags>'
                    for tag in tags:
                        content += '<member>'
                        content += '<Key>%s</Key>' % tag['Key']
                        content += '<Value>%s</Value>' % tag['Value']
                        content += '</member>'
                    content += '</Tags>'
                return make_response(req_action, content=content)

            elif req_action == 'CreateTopic':
                topic_arn = aws_stack.sns_topic_arn(req_data['Name'][0])
                self._extract_tags(topic_arn, req_data)

            elif req_action == 'TagResource':
                self._extract_tags(topic_arn, req_data)
                return make_response(req_action)

            elif req_action == 'UntagResource':
                tags_to_remove = []
                req_tags = {
                    k: v
                    for k, v in req_data.items()
                    if k.startswith('TagKeys.member.')
                }
                req_tags = req_tags.values()
                for tag in req_tags:
                    tags_to_remove.append(tag[0])
                do_untag_resource(topic_arn, tags_to_remove)
                return make_response(req_action)

            data = self._reset_account_id(data)
            return Request(data=data, headers=headers, method=method)

        return True
예제 #60
0
def parse_connection_kwargs(server, db=None, **kwargs):
    """
    Return a connection pool configured from the given URL.

    For example::

        redis://[:password]@localhost:6379/0
        rediss://[:password]@localhost:6379/0
        unix://[:password]@/path/to/socket.sock?db=0

    Three URL schemes are supported:
        redis:// creates a normal TCP socket connection
        rediss:// creates a SSL wrapped TCP socket connection
        unix:// creates a Unix Domain Socket connection

    There are several ways to specify a database number. The parse function
    will return the first specified option:
        1. A ``db`` querystring option, e.g. redis://localhost?db=0
        2. If using the redis:// scheme, the path argument of the url, e.g.
           redis://localhost/0
        3. The ``db`` argument to this function.

    If none of these options are specified, db=0 is used.

    Any additional querystring arguments and keyword arguments will be
    passed along to the ConnectionPool class's initializer. In the case
    of conflicting arguments, querystring arguments always win.

    NOTE: taken from `redis.ConnectionPool.from_url` in redis-py
    """
    kwargs['unix_socket_path'] = ''
    if '://' in server:
        url = server
        url_string = url
        url = urlparse(url)
        qs = ''

        # in python2.6, custom URL schemes don't recognize querystring values
        # they're left as part of the url.path.
        if '?' in url.path and not url.query:
            # chop the querystring including the ? off the end of the url
            # and reparse it.
            qs = url.path.split('?', 1)[1]
            url = urlparse(url_string[:-(len(qs) + 1)])
        else:
            qs = url.query

        url_options = {}

        for name, value in parse_qs(qs).items():
            if value and len(value) > 0:
                url_options[name] = value[0]

        # We only support redis:// and unix:// schemes.
        if url.scheme == 'unix':
            url_options.update({
                'password': url.password,
                'unix_socket_path': url.path,
            })

        else:
            url_options.update({
                'host': url.hostname,
                'port': int(url.port or 6379),
                'password': url.password,
            })

            # If there's a path argument, use it as the db argument if a
            # querystring value wasn't specified
            if 'db' not in url_options and url.path:
                try:
                    url_options['db'] = int(url.path.replace('/', ''))
                except (AttributeError, ValueError):
                    pass

            if url.scheme == 'rediss':
                url_options['ssl'] = True

        # last shot at the db value
        url_options['db'] = int(url_options.get('db', db or 0))

        # update the arguments from the URL values
        kwargs.update(url_options)

        # backwards compatability
        if 'charset' in kwargs:
            warnings.warn(
                DeprecationWarning(
                    '"charset" is deprecated. Use "encoding" instead'))
            kwargs['encoding'] = kwargs.pop('charset')
        if 'errors' in kwargs:
            warnings.warn(
                DeprecationWarning(
                    '"errors" is deprecated. Use "encoding_errors" instead'))
            kwargs['encoding_errors'] = kwargs.pop('errors')
    else:
        unix_socket_path = None
        if ':' in server:
            host, port = server.rsplit(':', 1)
            try:
                port = int(port)
            except (ValueError, TypeError):
                raise ImproperlyConfigured(
                    "{0} from {1} must be an integer".format(
                        repr(port), server))
        else:
            host, port = None, None
            unix_socket_path = server

        kwargs.update(
            host=host,
            port=port,
            unix_socket_path=unix_socket_path,
            db=db,
        )

    return kwargs