Beispiel #1
0
    def fetch(self, url, **kwargs):
        # init HTTPRequest
        session = HTTPRequest('', follow_redirects=False)
        self.init_request(session, url, **kwargs)

        instance_parameters = copy.deepcopy(self._req_params) # 参数

        http_client = AsyncHTTPClient()


        while True:
            self.pre_request(session, url, **kwargs)
            try:
                response = yield http_client.fetch(session, **instance_parameters)
                break
            except HTTPError as httperr:
                # redirects handler
                if httperr.code > 300 and httperr.code < 400:
                    self.post_request(session, httperr.response, url, **kwargs)
                    session.url = httperr.response.effective_url


        del instance_parameters
        self.post_request(session, response, url, **kwargs)

        raise gen.Return(response)
Beispiel #2
0
def open_root_url(step):
    request = HTTPRequest(ROOT_URL + 'user/state/')
    if world.cookie:
        request.headers["Cookie"] = world.cookie
    resp = world.browser.fetch(request)
    world.response = json_decode(resp.body)
    world.response_header = resp.headers
 def get_request(self, url, method='GET', body=None, headers=None):
     url = self.get_url(url)
     req = HTTPRequest(url, method=method, body=body)
     req.headers['Content-Type'] = 'application/json; charset=UTF-8'
     if headers:
         req.headers.update(headers)
     return req
 def fetch(self, request, callback, **kwargs):
     if not isinstance(request, HTTPRequest):
         request = HTTPRequest(url=request, **kwargs)
     if not isinstance(request.headers, HTTPHeaders):
         request.headers = HTTPHeaders(request.headers)
     callback = stack_context.wrap(callback)
     _HTTPConnection(self.io_loop, request, callback)
    def _create_http_request(self, method, host, port, path,
                             params=None, data=None, **kwargs):

        url = 'http://{host}:{port}{uri}'.format(host=host, port=port, uri=path)

        if params and isinstance(params, dict):
            url += '?' + urlencode(params)

        request = HTTPRequest(
            method=method,
            url=url,
            allow_nonstandard_methods=True,
            connect_timeout=self._connect_timeout,
            request_timeout=self._request_timeout,
            **kwargs
        )

        if data and method in ['POST', 'PUT', 'PATCH']:
            try:
                request.body = json.dumps(data)
            except TypeError as e:
                logging.error(str(e))
                raise DBApiError(e)

        return request
Beispiel #6
0
 def getData(self,url,method,data,cookie):
     try:
         client = HTTPClient()
         request = HTTPRequest(
                 url,
                 method=method,
                 headers={
                     'Cookie':cookie
                 }
             )
         if data and method=="GET":
             url = url_concat(url,data)
             url = url.replace("+","%20")
             request.url = url
         elif data and method=="POST":
             realData = {}
             for i in data:
                 realData[i[0]] = i[1]
             data = urllib.urlencode(realData)
             request.body = data
         response = client.fetch(request)
         return json.loads(response.body)
     except Exception,e:
         # print str(e)
         #traceback.print_exc()
         return str(e)
 def getData(self,url,method,data,cookie):
     try:
         client = HTTPClient()
         request = HTTPRequest(
                 url,
                 method=method,
                 headers={
                     'Cookie':cookie
                 }
             )
         if data and method=="GET":
             data = json.loads(data)
             url = url_concat(url,data)
             request.url = url
         elif data and method=="POST":
             data = json.loads(data)
             print data
             data = urllib.urlencode(data)
             request.body = data
         # print request.url
         response = client.fetch(request)
         return response.body
     except Exception,e:
         # print str(e)
         return None
Beispiel #8
0
    def fetch(self, request, callback, **kwargs):
        if not isinstance(request, HTTPRequest):
            request = HTTPRequest(url=request, **kwargs)
        # We're going to modify this (to add Host, Accept-Encoding, etc),
        # so make sure we don't modify the caller's object.  This is also
        # where normal dicts get converted to HTTPHeaders objects.
        request.headers = httputil.HTTPHeaders(request.headers)
        callback = stack_context.wrap(callback)

        key = object()
        self.queue.append((key, request, callback))

        if not len(self.active) < self.max_clients:
            timeout_handle = self.io_loop.add_timeout(
                time.time() + min(request.connect_timeout,
                                  request.request_timeout),
                functools.partial(self._on_timeout, key))
        else:
            timeout_handle = None

        self.waiting[key] = (request, callback, timeout_handle)
        self._process_queue()
        if self.queue:
            logging.debug(
                'max_clients limit reached, request queued. '
                '%d active, %d queued requests.' % (
                    len(self.active), len(self.queue))
            )
    def perform_request(self, request, response, method):
        try:
            constants = request_consts[method]

            url = request[constants.URL]
            timeout = request[constants.TIMEOUT]

            http_request = HTTPRequest(url=url, method=method)
            http_request.request_timeout = float(timeout)/1000

            if method == 'POST':
                http_request.body = request[constants.BODY]

            #adds cookies to request
            params_num = len(request)
            if constants.COOKIES <= params_num - 1:
                cookies = request[constants.COOKIES]
                if len(cookies) > 0:
                    list_of_cookies = list('{0}={1}'.format(cookie, value) for cookie, value in cookies.iteritems())
                    cookies_str = '; '.join(list_of_cookies)

                    http_request.headers.add('Cookie', cookies_str)

            #adds headers to request
            if constants.HEADERS <= params_num - 1:
                for name, values_list in request[constants.HEADERS].iteritems():
                    for value in values_list:
                        http_request.headers.add(name, value)

            self.logger.info("Downloading {0}, headers {1}, method {2}".format(url, http_request.headers, method))
            http_response = yield self.http_client.fetch(http_request)

            response_headers = self._get_headers_from_response(http_response)
            response.write((True, http_response.body, http_response.code, response_headers,))

            response.close()
            self.logger.info("{0} has been successfuly downloaded".format(url))
        except HTTPError as e:
            self.logger.info("Error ({0}) occured while downloading {1}".format(e.message, url))

            if e.response is not None:
                http_response = e.response
                response_headers = self._get_headers_from_response(http_response)
                response.write((False, http_response.body, http_response.code, response_headers,))
            else:
                response.write((False, '', e.code, {},))

            response.close()
        except socket.gaierror as e:
            self.logger.info("Error ({0}) occured while downloading {1}".format(e.message, url))
            response.write((False, '', e.errno, {},))
            response.close()
        except Exception as e:
            self.logger.error("Unhandled error ({0}) occured in perform_request, report about this problem "
                          "to httpclient service developers. Method is {1}, stacktrace is: {2}".format(
                                e.message, method, traceback.format_exc()))

            response.write((False, '', 0, {},))
            response.close()
 def create_request(self):
     request = HTTPRequest(url=self.request_data['url'], headers=self.request_data['headers'])
     self.patch_request(request)
     request.headers['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.102 Safari/537.36'
     #request.connect_timeout = 0
     request.request_timeout = 0
     request.header_only = False
     return request
 def fetch(self, path, body=None, **kwargs):
     kwargs['url'] = self.get_url(path)
     request = HTTPRequest(**kwargs)
     if body is not None:
         request.body = body
     request.allow_nonstandard_methods = True
     self.http_client.fetch(request, self.stop, method=None)
     return self.wait()
Beispiel #12
0
def send_logout_request(step):
    request = HTTPRequest(ROOT_URL + "logout/", validate_cert=False)
    if world.cookie:
        request.headers["Cookie"] = world.cookie
    world.response = world.browser.fetch(request)
    if world.response.code == 200:
        world.cookie = ""
        world.browser.cookie = ""
 def fetch(self, request, callback, **kwargs):
     if not isinstance(request, HTTPRequest):
         request = HTTPRequest(url=request, **kwargs)
     if not isinstance(request.headers, HTTPHeaders):
         request.headers = HTTPHeaders(request.headers)
     callback = stack_context.wrap(callback)
     self.queue.append((request, callback))
     self._process_queue()
Beispiel #14
0
 def fetch(self, path, **kwargs):
     kwargs['url'] = self.get_url(path)
     body = kwargs.pop('body', None)
     request = HTTPRequest(**kwargs)
     request.body = body
     request.allow_nonstandard_methods = True
     request.request_timeout = TIMEOUT
     self.http_client.fetch(request, self.stop, **kwargs)
     return self.wait()
    def wrapper(self, request, callback, **kwargs):
        with HttpClientTransContext("tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch") as context:
            if context:
                from tornado.httpclient import HTTPRequest

                if not isinstance(request, HTTPRequest):
                    request = HTTPRequest(url=request, **kwargs)
                request._td_httpclient_node = context
            return func(self, request, callback, **kwargs)
Beispiel #16
0
    def post(self):
        # param from the client 
        message = {}
        message['status'] = 'error'
        message['returned_url'] = 'null'
        #url = self.get_argument('url', None)
        comic_id = self.get_argument('img', None)
        self.threash_hold = self.get_argument('threshold', 90)
        if not self.threash_hold:
            self.threash_hold = float(90)
        else:
            self.threash_hold =  float(self.threash_hold)
        if comic_id:
            comic_id = int(comic_id[5:])
        if len(self.request.files['file']) > 0:
            img_file = self.request.files['file'][0]
        # check the client params  
        if not img_file or comic_id <= 0:
            self.write(json.dumps(message))
            self.finish()
        else:
            self.comic_picture_id = comic_id 

        ######################################################
        # print self.client_cv_img 
        filename = img_file['filename']
        saved_path = self.IMAGE_PATH + filename 
        self.client_saved_img = saved_path 
        if not os.path.exists(filename):
            with open(saved_path, "wb") as fp:
                fp.write(img_file['body'])

        # save the client img 
        self.client_cv_img = scv.Image(r'%s' % str(saved_path)) 

        ######################################################
        # ok, we save the client image and gen the SimpleCV img 
        # we pass the client img url to oxford to get the params 
        # get parameters 
        get_params = urllib.urlencode({
                'analyzesFaceLandmarks': 'true',
                'analyzesAge': 'true',
                'analyzesGender': 'true',
                'analyzesHeadPose': 'true',
                })
        url = self.API_URL % get_params
        post_params = {}
        post_params['url'] = self.HOST + saved_path  
        #print post_params['url']
        # request 
        request = HTTPRequest(url, method='POST')
        request.body = json.dumps(post_params)
        request.headers['Content-Type'] = 'application/json'
        request.headers['Ocp-Apim-Subscription-key'] = self.TOKEN 
        # fetch 
        client = AsyncHTTPClient()
        client.fetch(request, self.handleResponse)
Beispiel #17
0
 def fetch(self, request, **kwargs):
     headers = kwargs['headers'] if 'headers' in kwargs else default_headers
     if not isinstance(request, HTTPRequest):
         request = HTTPRequest(url=request, headers = headers, **kwargs)
     request.headers = HTTPHeaders(request.headers if request.headers else headers)
     try:
         return self.http.fetch(request)
     except (HTTPError, SyntaxError), e:
         #todo log
         print "[ERROR]", e
Beispiel #18
0
    def fetch(self, url, headers=None, body=None, method="GET", callback=None, raise_error=True, cache=None, **kwargs):
        headers = headers or {}
        body = body or "{}"
        """very simlar with AsyncHTTPClient.fetch
        """
        if self._closed:
            raise RuntimeError("fetch() called on closed AsyncHTTPClient")
        future = TracebackFuture()
        if isinstance(body, dict):
            for k,v in body.items():
                if v is None:
                    del body[k]
            body = urllib.urlencode(body)
        for k,v in headers.items(): #headers 只能接收str
            if v:
                headers[k] = str(headers[k])
            else:
                del headers[k]
        request = HTTPRequest(url=url,method=method,headers=headers,body=body, allow_nonstandard_methods=True, request_timeout=600 ,**kwargs)
        # We may modify this (to add Host, Accept-Encoding, etc),
        # so make sure we don't modify the caller's object.  This is also
        # where normal dicts get converted to HTTPHeaders objects.
        request.headers = httputil.HTTPHeaders(request.headers)
        request = _RequestProxy(request, self.defaults)
        if callback is not None:
            callback = stack_context.wrap(callback)

            def handle_future(future):
                exc = future.exception()
                if isinstance(exc, HTTPError) and exc.response is not None:
                    response = exc.response
                elif exc is not None:
                    response = HTTPResponse(
                        request, 599, error=exc,
                        request_time=time.time() - request.start_time)
                else:
                    response = future.result()
                self.io_loop.add_callback(callback, response)
            future.add_done_callback(handle_future)

        def handle_response(response):
            if raise_error and response.error:
                future.set_exception(response.error)
            else:
                try:
                    resp = json.loads(str(response.body))
                    if resp.get("statusCode") and resp.get("statusCode")==800:
                        future.set_result(resp)
                        log.info(json.dumps({"response":resp,"body":body,"headers":headers,"url":url}))
                    else:
                        future.set_result({"error_type":"statusCode is not 800", "response":resp,"body":body,"headers":headers,"url":url})
                        log.error(json.dumps({"error_type":"statusCode is not 800", "response":resp,"body":body,"headers":headers,"url":url}))
                except Exception,e:
                    future.set_result({"error_type":"json.loads failed!","error":str(e),"response.body":response.body,"body":body,"headers":headers,"url":url})
                    log.error(json.dumps({"error_type":"json.loads failed!","error":str(e),"response.body":response.body,"body":body,"headers":headers,"url":url}))
Beispiel #19
0
 def _request(self, extension, body, callback):
     """Wraps tornado syntax for sending an HTTP request."""
     request = HTTPRequest('http://{}/{}'.format(self.ip, extension),
                           connect_timeout=self.timeout,
                           request_timeout=self.timeout)
     if body:
         request.body = body
         request.method = 'POST'
     if extension == 'ToolWeb/Cmd':
         request.headers = {'Content-Type': 'text/xml'}
     self.client.fetch(request, callback)
Beispiel #20
0
    def fetch(self, request, callback, **kwargs):
        if not isinstance(request, HTTPRequest):
            request = HTTPRequest(url=request, **kwargs)
        # We're going to modify this (to add Host, Accept-Encoding, etc),
        # so make sure we don't modify the caller's object.  This is also
        # where normal dicts get converted to HTTPHeaders objects.
        request.headers = httputil.HTTPHeaders(request.headers)

        # Early prepare
        request = _HTTP2Stream.prepare_request(request, self.host)
        super(SimpleAsyncHTTP2Client, self).fetch(request, callback, **kwargs)
Beispiel #21
0
 def fetch(self, request, callback, **kwargs):
     if not isinstance(request, HTTPRequest):
         request = HTTPRequest(url=request, **kwargs)
     if not isinstance(request.headers, HTTPHeaders):
         request.headers = HTTPHeaders(request.headers)
     callback = stack_context.wrap(callback)
     self.queue.append((request, callback))
     self._process_queue()
     if self.queue:
         logging.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." % (
                 len(self.active), len(self.queue)))
Beispiel #22
0
    def post(self, url, body):
        """
        Perform a POST request.
        """

        if hasattr(self, "root_url") and self.root_url:
            url = self.root_url + url

        request = HTTPRequest(url, method="POST", body=body, validate_cert=False)
        if hasattr(self, "cookie") and self.cookie:
            request.headers["Cookie"] = self.cookie
        return HTTPClient.fetch(self, request)
Beispiel #23
0
    def delete(self, url):
        """
        Perform a DELETE request.
        """

        if hasattr(self, "root_url") and self.root_url:
            url = self.root_url + url

        request = HTTPRequest(url, method="DELETE", validate_cert=False)
        if self.cookie:
            request.headers["Cookie"] = self.cookie
        return HTTPClient.fetch(self, request)
Beispiel #24
0
    def get(self, url):
        """
        Perform a GET request.
        """

        if hasattr(self, "root_url") and self.root_url:
            url = self.root_url + url

        request = HTTPRequest(url, validate_cert=False)
        if hasattr(self, "cookie") and self.cookie:
            request.headers["Cookie"] = self.cookie
        return HTTPClient.fetch(self, request)
Beispiel #25
0
def do_send_sms(cell, data):
	http_client = HTTPClient()
	sms_url = "http://172.31.11.203:8080/notify/sms/"
	request = HTTPRequest(sms_url)
	request.headers["Content-Type"] = "application/json"
	request.headers["HTTP_HEAD_ENCODING"] = "utf-8"
	request.method = "POST"
	request.body = '{"id":"0","phones":"' + cell + '","content":"' + data + '"}'
	resp = http_client.fetch(request)
	if resp.code != 201:
		raise RuntimeError("SMS Gateway Error: " + str(resp.code))
	print resp.code, resp.body
Beispiel #26
0
def post_a_new_picture_via_the_dedicated_resource(step):
    time.sleep(1)
    file = open("test.jpg", "r")
    
    (contentType, body) = encode_multipart_formdata([], 
                            [("picture", "test.jpg", file.read())])
    headers = {'Content-Type':contentType} 
    request = HTTPRequest(url=world.browser.root_url + "pictures/all/", 
                          method="POST", body=body, headers=headers,
                          validate_cert=False)
    if hasattr(world.browser, "cookie"):
            request.headers["Cookie"] = world.browser.cookie
    world.browser.fetch(request)
Beispiel #27
0
    def send_request(self, method, url, data=None, data_type="json"):
        method = method.upper()

        has_payload = method == self.POST or method == self.PUT
        is_CUD = has_payload or method == self.DELETE

        full_url = urlparse.urlunparse((controller_address['scheme'],
                                        controller_address['host'] + ':' + str(controller_address['port']),
                                        url,
                                        None, None, None))

        headers = {
            'Content-Type': HttpClient.set_content_type(data_type)
        }
        request = HTTPRequest(url=full_url,
                              method=method,
                              headers=headers,
                              auth_username=controller_auth['username'],
                              auth_password=controller_auth['password'],
                              connect_timeout=http_client_settings.get("timeouts", {}).get("connect", 3),
                              request_timeout=http_client_settings.get("timeouts", {}).get("request", 10))

        if has_payload:
            if data_type == "json":
                request.body = json.dumps(data)

        if is_CUD:
            if self.dry_run:
                logger.info("\nDRY RUN")
            logger.debug("\n\nSending {} request.\nUrl: {}\nBody: {}\n".format(method, full_url, request.body))

        if is_CUD and self.dry_run:
            response = json.dumps({
                "status": "ok",
                "msg": "dry_run"
            })
        else:
            try:
                response = HTTPClient().fetch(request)
                if not self.fail_silently and not self.is_ok(response.code):
                    raise HttpClientException(response)

                logger.debug("\n\nResponse ({}).\nUrl: {}\nBody: {}\n".format(response.code, full_url, response.body))

                return response
            except HTTPError as e:
                logger.debug("HttpClient error: {}".format(e.message))
                if not self.fail_silently:
                    raise HttpClientException(e)
                return None
        return response
Beispiel #28
0
  def fetch(self, request, callback=None, raise_error=True, **kwargs):
        """Executes a request, asynchronously returning an `HTTPResponse`.

        The request may be either a string URL or an `HTTPRequest` object.
        If it is a string, we construct an `HTTPRequest` using any additional
        kwargs: ``HTTPRequest(request, **kwargs)``

        This method returns a `.Future` whose result is an
        `HTTPResponse`.  By default, the ``Future`` will raise an `HTTPError`
        if the request returned a non-200 response code. Instead, if
        ``raise_error`` is set to False, the response will always be
        returned regardless of the response code.

        If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
        In the callback interface, `HTTPError` is not automatically raised.
        Instead, you must check the response's ``error`` attribute or
        call its `~HTTPResponse.rethrow` method.
        """
        if self._closed:
            raise RuntimeError("fetch() called on closed AsyncHTTPClient")
        if not isinstance(request, HTTPRequest):
            request = HTTPRequest(url=request, **kwargs)
        # We may modify this (to add Host, Accept-Encoding, etc),
        # so make sure we don't modify the caller's object.  This is also
        # where normal dicts get converted to HTTPHeaders objects.
        request.headers = httputil.HTTPHeaders(request.headers)
        request = _RequestProxy(request, self.defaults)
        future = TracebackFuture()
        if callback is not None:
            callback = stack_context.wrap(callback)

            def handle_future(future):
                exc = future.exception()
                if isinstance(exc, HTTPError) and exc.response is not None:
                    response = exc.response
                elif exc is not None:
                    response = HTTPResponse(
                        request, 599, error=exc,
                        request_time=time.time() - request.start_time)
                else:
                    response = future.result()
                self.io_loop.add_callback(callback, response)
            future.add_done_callback(handle_future)

        def handle_response(response):
            if raise_error and response.error:
                future.set_exception(response.error)
            else:
                future.set_result(response)
        self.fetch_impl(request, handle_response)
        return future
Beispiel #29
0
    def _make_proxy_request(self, request_data):
        timeout = float(request_data.get("timeout", DEFAULT_TIMEOUT))
        validate_cert = bool(request_data.get("validate_cert") or True)
        max_redirects = request_data.get("max_http_redirects") or 0
        follow_redirects = max_redirects > 0  # 0 means do not follow redirects

        url = request_data.get("url")
        params = request_data.get("data")
        post_type = request_data.get("post_type")
        if params and post_type is None:
            url = "%s?%s" % (url, urlencode(params))

        logger.info("[%s]agent request url: %s", self.id, url)

        proxy_request = HTTPRequest(
            url, validate_cert=validate_cert,
            headers=self._get_proxy_request_headers(request_data),
            method=request_data.get("method", "GET"),
            allow_nonstandard_methods=True,
            connect_timeout=timeout,
            request_timeout=timeout,
            streaming_callback=self._streaming_callback,
            header_callback=self._header_callback,
            follow_redirects=follow_redirects,
            max_redirects=max_redirects,
            prepare_curl_callback=self.prepare_curl_callback,
        )

        role_name = request_data.get("role")
        if role_name:
            InterfaceRoleManager.set_curl_interface_role(
                proxy_request, role_name,
            )

        keystone_auth_info = request_data.get("keystone")
        if keystone_auth_info:
            logger.warning(
                "[%s]agent request required keystone token",
            )
            auth_headers = yield self._get_keystone_auth_headers(
                keystone_auth_info, validate_cert=validate_cert,
            )
            if not auth_headers:
                raise gen.Return()
            proxy_request.headers.update(auth_headers)

        body = self._get_request_body(request_data)
        if body:
            proxy_request.body = body

        raise gen.Return(proxy_request)
 def fetch(self, request, callback, **kwargs):
     if not isinstance(request, HTTPRequest):
         request = HTTPRequest(url=request, **kwargs)
     # We're going to modify this (to add Host, Accept-Encoding, etc),
     # so make sure we don't modify the caller's object.  This is also
     # where normal dicts get converted to HTTPHeaders objects.
     request.headers = HTTPHeaders(request.headers)
     callback = stack_context.wrap(callback)
     self.queue.append((request, callback))
     self._process_queue()
     if self.queue:
         logging.debug("max_clients limit reached, request queued. "
                       "%d active, %d queued requests." % (
                 len(self.active), len(self.queue)))
Beispiel #31
0
    def handle_user(user):
        """Handle one user.

        Create a list of their servers, and async exec them.  Wait for
        that to be done, and if all servers are stopped, possibly cull
        the user.
        """
        # shutdown servers first.
        # Hub doesn't allow deleting users with running servers.
        # named servers contain the 'servers' dict
        if 'servers' in user:
            servers = user['servers']
        # Otherwise, server data is intermingled in with the user
        # model
        else:
            servers = {}
            if user['server']:
                servers[''] = {
                    'started': user.get('started'),
                    'last_activity': user['last_activity'],
                    'pending': user['pending'],
                    'url': user['server'],
                }
        server_futures = [
            handle_server(user, server_name, server)
            for server_name, server in servers.items()
        ]
        results = yield multi(server_futures)
        if not cull_users:
            return
        # some servers are still running, cannot cull users
        still_alive = len(results) - sum(results)
        if still_alive:
            app_log.debug("Not culling user %s with %i servers still alive",
                          user['name'], still_alive)
            return False

        should_cull = False
        if user.get('created'):
            age = now - parse_date(user['created'])
        else:
            # created may be undefined on jupyterhub < 0.9
            age = None

        # check last activity
        # last_activity can be None in 0.9
        if user['last_activity']:
            inactive = now - parse_date(user['last_activity'])
        else:
            # no activity yet, use start date
            # last_activity may be None with jupyterhub 0.9,
            # which introduces the 'created' field which is never None
            inactive = age

        should_cull = (inactive is not None
                       and inactive.total_seconds() >= inactive_limit)
        if should_cull:
            app_log.info("Culling user %s (inactive for %s)", user['name'],
                         inactive)

        if max_age and not should_cull:
            # only check created if max_age is specified
            # so that we can still be compatible with jupyterhub 0.8
            # which doesn't define the 'started' field
            if age is not None and age.total_seconds() >= max_age:
                app_log.info("Culling user %s (age: %s, inactive for %s)",
                             user['name'], format_td(age), format_td(inactive))
                should_cull = True

        if not should_cull:
            app_log.debug("Not culling user %s (created: %s, last active: %s)",
                          user['name'], format_td(age), format_td(inactive))
            return False

        req = HTTPRequest(
            url=url + '/users/%s' % user['name'],
            method='DELETE',
            headers=auth_header,
        )
        yield fetch(req)
        return True
Beispiel #32
0
def cull_idle(
    url,
    api_token,
    inactive_limit,
    cull_users=False,
    remove_named_servers=False,
    max_age=0,
    concurrency=10,
):
    """Shutdown idle single-user servers

    If cull_users, inactive *users* will be deleted as well.
    """
    auth_header = {
        'Authorization': 'token %s' % api_token,
    }
    req = HTTPRequest(
        url=url + '/users',
        headers=auth_header,
    )
    now = datetime.now(timezone.utc)
    client = AsyncHTTPClient()

    if concurrency:
        semaphore = Semaphore(concurrency)

        @coroutine
        def fetch(req):
            """client.fetch wrapped in a semaphore to limit concurrency"""
            yield semaphore.acquire()
            try:
                return (yield client.fetch(req))
            finally:
                yield semaphore.release()
    else:
        fetch = client.fetch

    resp = yield fetch(req)
    users = json.loads(resp.body.decode('utf8', 'replace'))
    futures = []

    @coroutine
    def handle_server(user, server_name, server):
        """Handle (maybe) culling a single server

        Returns True if server is now stopped (user removable),
        False otherwise.
        """
        log_name = user['name']
        if server_name:
            log_name = '%s/%s' % (user['name'], server_name)
        if server.get('pending'):
            app_log.warning("Not culling server %s with pending %s", log_name,
                            server['pending'])
            return False

        if server.get('started'):
            age = now - parse_date(server['started'])
        else:
            # started may be undefined on jupyterhub < 0.9
            age = None

        # check last activity
        # last_activity can be None in 0.9
        if server['last_activity']:
            inactive = now - parse_date(server['last_activity'])
        else:
            # no activity yet, use start date
            # last_activity may be None with jupyterhub 0.9,
            # which introduces the 'started' field which is never None
            # for running servers
            inactive = age

        should_cull = (inactive is not None
                       and inactive.total_seconds() >= inactive_limit)
        if should_cull:
            app_log.info("Culling server %s (inactive for %s)", log_name,
                         format_td(inactive))

        if max_age and not should_cull:
            # only check started if max_age is specified
            # so that we can still be compatible with jupyterhub 0.8
            # which doesn't define the 'started' field
            if age is not None and age.total_seconds() >= max_age:
                app_log.info("Culling server %s (age: %s, inactive for %s)",
                             log_name, format_td(age), format_td(inactive))
                should_cull = True

        if not should_cull:
            app_log.debug("Not culling server %s (age: %s, inactive for %s)",
                          log_name, format_td(age), format_td(inactive))
            return False

        body = None
        if server_name:
            # culling a named server
            # A named server can be stopped and kept available to the user
            # for starting again or stopped and removed. To remove the named
            # server we have to pass an additional option in the body of our
            # DELETE request.
            delete_url = url + "/users/%s/servers/%s" % (
                quote(user['name']),
                quote(server['name']),
            )
            if remove_named_servers:
                body = json.dumps({"remove": True})
        else:
            delete_url = url + '/users/%s/server' % quote(user['name'])

        req = HTTPRequest(
            url=delete_url,
            method='DELETE',
            headers=auth_header,
            body=body,
            allow_nonstandard_methods=True,
        )
        resp = yield fetch(req)
        if resp.code == 202:
            app_log.warning(
                "Server %s is slow to stop",
                log_name,
            )
            # return False to prevent culling user with pending shutdowns
            return False
        return True

    @coroutine
    def handle_user(user):
        """Handle one user.

        Create a list of their servers, and async exec them.  Wait for
        that to be done, and if all servers are stopped, possibly cull
        the user.
        """
        # shutdown servers first.
        # Hub doesn't allow deleting users with running servers.
        # named servers contain the 'servers' dict
        if 'servers' in user:
            servers = user['servers']
        # Otherwise, server data is intermingled in with the user
        # model
        else:
            servers = {}
            if user['server']:
                servers[''] = {
                    'started': user.get('started'),
                    'last_activity': user['last_activity'],
                    'pending': user['pending'],
                    'url': user['server'],
                }
        server_futures = [
            handle_server(user, server_name, server)
            for server_name, server in servers.items()
        ]
        results = yield multi(server_futures)
        if not cull_users:
            return
        # some servers are still running, cannot cull users
        still_alive = len(results) - sum(results)
        if still_alive:
            app_log.debug("Not culling user %s with %i servers still alive",
                          user['name'], still_alive)
            return False

        should_cull = False
        if user.get('created'):
            age = now - parse_date(user['created'])
        else:
            # created may be undefined on jupyterhub < 0.9
            age = None

        # check last activity
        # last_activity can be None in 0.9
        if user['last_activity']:
            inactive = now - parse_date(user['last_activity'])
        else:
            # no activity yet, use start date
            # last_activity may be None with jupyterhub 0.9,
            # which introduces the 'created' field which is never None
            inactive = age

        should_cull = (inactive is not None
                       and inactive.total_seconds() >= inactive_limit)
        if should_cull:
            app_log.info("Culling user %s (inactive for %s)", user['name'],
                         inactive)

        if max_age and not should_cull:
            # only check created if max_age is specified
            # so that we can still be compatible with jupyterhub 0.8
            # which doesn't define the 'started' field
            if age is not None and age.total_seconds() >= max_age:
                app_log.info("Culling user %s (age: %s, inactive for %s)",
                             user['name'], format_td(age), format_td(inactive))
                should_cull = True

        if not should_cull:
            app_log.debug("Not culling user %s (created: %s, last active: %s)",
                          user['name'], format_td(age), format_td(inactive))
            return False

        req = HTTPRequest(
            url=url + '/users/%s' % user['name'],
            method='DELETE',
            headers=auth_header,
        )
        yield fetch(req)
        return True

    for user in users:
        futures.append((user['name'], handle_user(user)))

    for (name, f) in futures:
        try:
            result = yield f
        except Exception:
            app_log.exception("Error processing %s", name)
        else:
            if result:
                app_log.debug("Finished culling %s", name)
Beispiel #33
0
    def authenticate(self, handler, data=None):
        code = handler.get_argument("code")
        # TODO: Configure the curl_httpclient for tornado
        http_client = AsyncHTTPClient()

        params = dict(redirect_uri=self.get_callback_url(handler),
                      code=code,
                      grant_type='authorization_code')
        params.update(self.extra_params)

        if self.token_url:
            url = self.token_url
        else:
            raise ValueError(
                "Please set the OAUTH2_TOKEN_URL environment variable")

        b64key = base64.b64encode(
            bytes("{}:{}".format(self.client_id, self.client_secret), "utf8"))

        headers = {
            "Accept": "application/json",
            "User-Agent": "JupyterHub",
            "Authorization": "Basic {}".format(b64key.decode("utf8"))
        }
        req = HTTPRequest(
            url,
            method="POST",
            headers=headers,
            validate_cert=self.tls_verify,
            body=urllib.parse.urlencode(
                params)  # Body is required for a POST...
        )

        resp = yield http_client.fetch(req)

        resp_json = json.loads(resp.body.decode('utf8', 'replace'))

        access_token = resp_json['access_token']
        refresh_token = resp_json.get('refresh_token', None)
        token_type = resp_json['token_type']
        scope = (resp_json.get('scope', '')).split(' ')

        # Determine who the logged in user is
        headers = {
            "Accept": "application/json",
            "User-Agent": "JupyterHub",
            "Authorization": "{} {}".format(token_type, access_token)
        }
        if self.userdata_url:
            url = url_concat(self.userdata_url, self.userdata_params)
        else:
            raise ValueError(
                "Please set the OAUTH2_USERDATA_URL environment variable")

        req = HTTPRequest(url,
                          method=self.userdata_method,
                          headers=headers,
                          validate_cert=self.tls_verify,
                          body=urllib.parse.urlencode(
                              {'access_token': access_token}))
        resp = yield http_client.fetch(req)
        resp_json = json.loads(resp.body.decode('utf8', 'replace'))

        if not resp_json.get(self.username_key):
            self.log.error("OAuth user contains no key %s: %s",
                           self.username_key, resp_json)
            return

        return {
            'name': resp_json.get(self.username_key),
            'auth_state': {
                'access_token': access_token,
                'refresh_token': refresh_token,
                'oauth_user': resp_json,
                'scope': scope,
            }
        }
Beispiel #34
0
async def test_facts_paging(server, client, order_by_column, order,
                            env_with_facts):
    """Test querying facts with paging, using different sorting parameters."""
    env, _, _ = env_with_facts
    result = await client.get_all_facts(
        env,
        filter={"name": "res"},
    )
    assert result.code == 200
    assert len(result.result["data"]) == 6
    all_facts_in_expected_order = sorted(result.result["data"],
                                         key=itemgetter(order_by_column, "id"),
                                         reverse=order == "DESC")
    all_fact_ids_in_expected_order = fact_ids(all_facts_in_expected_order)

    result = await client.get_all_facts(env,
                                        limit=2,
                                        sort=f"{order_by_column}.{order}",
                                        filter={"name": "res"})
    assert result.code == 200
    assert len(result.result["data"]) == 2
    assert fact_ids(
        result.result["data"]) == all_fact_ids_in_expected_order[:2]

    assert result.result["metadata"] == {
        "total": 6,
        "before": 0,
        "after": 4,
        "page_size": 2
    }
    assert result.result["links"].get("next") is not None
    assert result.result["links"].get("prev") is None

    port = get_bind_port()
    base_url = "http://localhost:%s" % (port, )
    http_client = AsyncHTTPClient()

    # Test link for next page
    url = f"""{base_url}{result.result["links"]["next"]}"""
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    assert fact_ids(response["data"]) == all_fact_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }

    # Test link for next page
    url = f"""{base_url}{response["links"]["next"]}"""
    # The filters should be present for the links as well
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    next_page_ids = fact_ids(response["data"])
    assert next_page_ids == all_fact_ids_in_expected_order[4:]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is None
    assert response["metadata"] == {
        "total": 6,
        "before": 4,
        "after": 0,
        "page_size": 2
    }

    # Test link for previous page
    url = f"""{base_url}{response["links"]["prev"]}"""
    assert "limit=2" in url
    assert "filter.name=res" in url
    request = HTTPRequest(
        url=url,
        headers={"X-Inmanta-tid": env},
    )
    response = await http_client.fetch(request, raise_error=False)
    assert response.code == 200
    response = json.loads(response.body.decode("utf-8"))
    prev_page_ids = fact_ids(response["data"])
    assert prev_page_ids == all_fact_ids_in_expected_order[2:4]
    assert response["links"].get("prev") is not None
    assert response["links"].get("next") is not None
    assert response["metadata"] == {
        "total": 6,
        "before": 2,
        "after": 2,
        "page_size": 2
    }
Beispiel #35
0
 def test_defaults_none(self):
     proxy = _RequestProxy(HTTPRequest('http://example.com/'), None)
     self.assertIs(proxy.auth_username, None)
    def post(self):
        retjson = {'code': 200, 'content': ''}
        cardnum = self.get_argument('cardnum', default=None)
        password = self.get_argument('password', default=None)
        if not (cardnum and password):
            retjson['code'] = 400
            retjson['content'] = 'params lack'
        else:
            # read from cache
            try:
                status = self.db.query(PeDetailCache).filter(
                    PeDetailCache.cardnum == cardnum).one()
                if int(strftime('%H', localtime(
                        time()))) < 8 or (status.date > int(time()) - 10000
                                          and status.text != '*'):
                    if status.text == '*':
                        retjson['content'] = []
                        self.write(
                            json.dumps(retjson, ensure_ascii=False, indent=2))
                        self.finish()
                        return
                    self.write(base64.b64decode(status.text))
                    self.finish()
                    return
            except NoResultFound:
                status = PeDetailCache(cardnum=cardnum,
                                       text='*',
                                       date=int(time()))
                self.db.add(status)
                try:
                    self.db.commit()
                except:
                    self.db.rollback()

            try:
                client = AsyncHTTPClient()
                data = {
                    "Login.Token1": cardnum,
                    "Login.Token2": password,
                    'goto': "http://mynew.seu.edu.cn/loginSuccess.portal",
                    'gotoOnFail': "http://mynew.seu.edu.cn/loginFailure.portal"
                }
                data1 = {
                    'IDToken0': '',
                    'IDToken1': cardnum,
                    'IDToken2': password,
                    'IDButton': 'Submit',
                    'goto': 'http://zccx.seu.edu.cn/',
                    'gx_charset': 'gb2312'
                }

                cookie1 = ''
                request = HTTPRequest(loginurl1,
                                      method='POST',
                                      body=urllib.urlencode(data1),
                                      follow_redirects=False)
                initcookie = ''
                try:
                    response = yield client.fetch(request)
                except HTTPError as e:
                    initcookie = e.response.headers['Set-Cookie']
                init_cookie1 = initcookie.split(';')[4].split(',')[
                    1]  #+initcookie.split(';')[0]
                header = {
                    'Host': 'zccx.seu.edu.cn',
                    'Accept':
                    'textml,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
                    'User-Agent':
                    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36',
                    'Referer': 'http://zccx.seu.edu.cn/',
                    'Connection': 'Keep-alive',
                    'Accept-Encoding': 'gzip, deflate',
                    'Accept-Language': 'zh-CN,zh;q=0.8',
                    'Cookie':
                    init_cookie1 + ';' + cookie1 + ';' + ';amblcookie=02'
                }
                request = HTTPRequest(runurl, method='GET', headers=header)

                response = yield client.fetch(request)
                cookie1 = response.headers['Set-Cookie']
                header[
                    'Cookie'] = init_cookie1 + ';' + cookie1 + ';' + ';amblcookie=02'
                getpeurl = "http://zccx.seu.edu.cn/SportWeb/gym/gymExercise/gymExercise_query_result_2.jsp?xh=%s" % (
                    cardnum)
                request = HTTPRequest(getpeurl,
                                      headers=header,
                                      request_timeout=8)
                response = yield client.fetch(request)
                spider = RunningParser()
                spider.getRunningTable(response.body)
                retjson['content'] = spider.table
            except Exception, e:
                retjson['code'] = 500
                retjson['content'] = str(e)
Beispiel #37
0
    def _curl_setup_request(
        self,
        curl: pycurl.Curl,
        request: HTTPRequest,
        buffer: BytesIO,
        headers: httputil.HTTPHeaders,
    ) -> None:
        curl.setopt(pycurl.URL, native_str(request.url))

        # libcurl's magic "Expect: 100-continue" behavior causes delays
        # with servers that don't support it (which include, among others,
        # Google's OpenID endpoint).  Additionally, this behavior has
        # a bug in conjunction with the curl_multi_socket_action API
        # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
        # which increases the delays.  It's more trouble than it's worth,
        # so just turn off the feature (yes, setting Expect: to an empty
        # value is the official way to disable this)
        if "Expect" not in request.headers:
            request.headers["Expect"] = ""

        # libcurl adds Pragma: no-cache by default; disable that too
        if "Pragma" not in request.headers:
            request.headers["Pragma"] = ""

        curl.setopt(
            pycurl.HTTPHEADER,
            [
                "%s: %s" % (native_str(k), native_str(v))
                for k, v in request.headers.get_all()
            ],
        )

        curl.setopt(
            pycurl.HEADERFUNCTION,
            functools.partial(self._curl_header_callback, headers,
                              request.header_callback),
        )
        if request.streaming_callback:

            def write_function(b: Union[bytes, bytearray]) -> int:
                assert request.streaming_callback is not None
                self.io_loop.add_callback(request.streaming_callback, b)
                return len(b)

        else:
            write_function = buffer.write
        curl.setopt(pycurl.WRITEFUNCTION, write_function)
        curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
        curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
        assert request.connect_timeout is not None
        curl.setopt(pycurl.CONNECTTIMEOUT_MS,
                    int(1000 * request.connect_timeout))
        assert request.request_timeout is not None
        curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
        if request.user_agent:
            curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
        else:
            curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
        if request.network_interface:
            curl.setopt(pycurl.INTERFACE, request.network_interface)
        if request.decompress_response:
            curl.setopt(pycurl.ENCODING, "gzip,deflate")
        else:
            curl.setopt(pycurl.ENCODING, None)
        if request.proxy_host and request.proxy_port:
            curl.setopt(pycurl.PROXY, request.proxy_host)
            curl.setopt(pycurl.PROXYPORT, request.proxy_port)
            if request.proxy_username:
                assert request.proxy_password is not None
                credentials = httputil.encode_username_password(
                    request.proxy_username, request.proxy_password)
                curl.setopt(pycurl.PROXYUSERPWD, credentials)

            if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic":
                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
            elif request.proxy_auth_mode == "digest":
                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
            else:
                raise ValueError("Unsupported proxy_auth_mode %s" %
                                 request.proxy_auth_mode)
        else:
            curl.setopt(pycurl.PROXY, "")
            curl.unsetopt(pycurl.PROXYUSERPWD)
        if request.validate_cert:
            curl.setopt(pycurl.SSL_VERIFYPEER, 1)
            curl.setopt(pycurl.SSL_VERIFYHOST, 2)
        else:
            curl.setopt(pycurl.SSL_VERIFYPEER, 0)
            curl.setopt(pycurl.SSL_VERIFYHOST, 0)
        if request.ca_certs is not None:
            curl.setopt(pycurl.CAINFO, request.ca_certs)
        else:
            # There is no way to restore pycurl.CAINFO to its default value
            # (Using unsetopt makes it reject all certificates).
            # I don't see any way to read the default value from python so it
            # can be restored later.  We'll have to just leave CAINFO untouched
            # if no ca_certs file was specified, and require that if any
            # request uses a custom ca_certs file, they all must.
            pass

        if request.allow_ipv6 is False:
            # Curl behaves reasonably when DNS resolution gives an ipv6 address
            # that we can't reach, so allow ipv6 unless the user asks to disable.
            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
        else:
            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)

        # Set the request method through curl's irritating interface which makes
        # up names for almost every single method
        curl_options = {
            "GET": pycurl.HTTPGET,
            "POST": pycurl.POST,
            "PUT": pycurl.UPLOAD,
            "HEAD": pycurl.NOBODY,
        }
        custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
        for o in curl_options.values():
            curl.setopt(o, False)
        if request.method in curl_options:
            curl.unsetopt(pycurl.CUSTOMREQUEST)
            curl.setopt(curl_options[request.method], True)
        elif request.allow_nonstandard_methods or request.method in custom_methods:
            curl.setopt(pycurl.CUSTOMREQUEST, request.method)
        else:
            raise KeyError("unknown method " + request.method)

        body_expected = request.method in ("POST", "PATCH", "PUT")
        body_present = request.body is not None
        if not request.allow_nonstandard_methods:
            # Some HTTP methods nearly always have bodies while others
            # almost never do. Fail in this case unless the user has
            # opted out of sanity checks with allow_nonstandard_methods.
            if (body_expected and not body_present) or (body_present
                                                        and not body_expected):
                raise ValueError(
                    "Body must %sbe None for method %s (unless "
                    "allow_nonstandard_methods is true)" %
                    ("not " if body_expected else "", request.method))

        if body_expected or body_present:
            if request.method == "GET":
                # Even with `allow_nonstandard_methods` we disallow
                # GET with a body (because libcurl doesn't allow it
                # unless we use CUSTOMREQUEST). While the spec doesn't
                # forbid clients from sending a body, it arguably
                # disallows the server from doing anything with them.
                raise ValueError("Body must be None for GET request")
            request_buffer = BytesIO(utf8(request.body or ""))

            def ioctl(cmd: int) -> None:
                if cmd == curl.IOCMD_RESTARTREAD:  # type: ignore
                    request_buffer.seek(0)

            curl.setopt(pycurl.READFUNCTION, request_buffer.read)
            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
            if request.method == "POST":
                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ""))
            else:
                curl.setopt(pycurl.UPLOAD, True)
                curl.setopt(pycurl.INFILESIZE, len(request.body or ""))

        if request.auth_username is not None:
            assert request.auth_password is not None
            if request.auth_mode is None or request.auth_mode == "basic":
                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
            elif request.auth_mode == "digest":
                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
            else:
                raise ValueError("Unsupported auth_mode %s" %
                                 request.auth_mode)

            userpwd = httputil.encode_username_password(
                request.auth_username, request.auth_password)
            curl.setopt(pycurl.USERPWD, userpwd)
            curl_log.debug(
                "%s %s (username: %r)",
                request.method,
                request.url,
                request.auth_username,
            )
        else:
            curl.unsetopt(pycurl.USERPWD)
            curl_log.debug("%s %s", request.method, request.url)

        if request.client_cert is not None:
            curl.setopt(pycurl.SSLCERT, request.client_cert)

        if request.client_key is not None:
            curl.setopt(pycurl.SSLKEY, request.client_key)

        if request.ssl_options is not None:
            raise ValueError("ssl_options not supported in curl_httpclient")

        if threading.active_count() > 1:
            # libcurl/pycurl is not thread-safe by default.  When multiple threads
            # are used, signals should be disabled.  This has the side effect
            # of disabling DNS timeouts in some environments (when libcurl is
            # not linked against ares), so we don't do it when there is only one
            # thread.  Applications that use many short-lived threads may need
            # to set NOSIGNAL manually in a prepare_curl_callback since
            # there may not be any other threads running at the time we call
            # threading.activeCount.
            curl.setopt(pycurl.NOSIGNAL, 1)
        if request.prepare_curl_callback is not None:
            request.prepare_curl_callback(curl)
Beispiel #38
0
 def test_neither_set(self):
     proxy = _RequestProxy(HTTPRequest('http://example.com/'), dict())
     self.assertIs(proxy.auth_username, None)
    async def request(self, method, url, data=None, params=dict()) -> object:
        client = AsyncHTTPClient()
        done = False
        retries = 0
        if data is not None:
            data = json_encode(data)
        try:

            def handle_response(response):
                if 'status' in response and \
                        'code' in response['status'] and \
                        200 <= int(response['status']['code']) <= 299:

                    return response

                else:
                    error = response['status'].get('err', {}) or \
                            response['status'].get('error', {}) or \
                            response['status'].get('message', {}) or \
                            url + " request failed with " + response['status'].get('code', 400) + " response"

                    raise APIException(error)

            url = url_concat(url, params)
            headers = {'Content-Type': 'application/json'}
            req = HTTPRequest(url=url,
                              headers=headers,
                              method=method,
                              body=data)

            async def fetch():
                nonlocal retries, req, done
                retries += 1
                with async_timeout.timeout(120):

                    resp = await client.fetch(req)

                    if resp.headers['Content-Type'] == 'application/json':
                        body = json_decode(resp.body)
                    else:
                        body = resp.body

                    done = True
                    return handle_response(body)

            return await fetch()

        except asyncio.TimeoutError as ex:
            if retries <= MAX_RETRY:
                return await fetch()
            else:
                traceback.print_exc()
                raise ex
        except HTTPError as ex:
            if retries <= 3:
                return await fetch()
            else:
                traceback.print_exc()
                raise ex

        except Exception as ex:
            traceback.print_exc()
            raise ex
        finally:
            if done:
                client.close()
Beispiel #40
0
 def test_null_headers_setter(self):
     request = HTTPRequest('http://example.com')
     request.headers = None
     self.assertEqual(request.headers, {})
    def authenticate(self, handler, data=None):
        code = handler.get_argument("code")
        # TODO: Configure the curl_httpclient for tornado
        http_client = AsyncHTTPClient()

        params = dict(redirect_uri=self.get_callback_url(handler),
                      code=code,
                      grant_type='authorization_code')

        if self.token_url:
            url = self.token_url
        else:
            raise ValueError(
                "Please set the OAUTH2_TOKEN_URL environment variable")

        b64key = base64.b64encode(
            bytes("{}:{}".format(self.client_id, self.client_secret), "utf8"))

        headers = {
            "Accept": "application/json",
            "User-Agent": "JupyterHub",
            "Authorization": "Basic {}".format(b64key.decode("utf8"))
        }
        req = HTTPRequest(
            url,
            method="POST",
            headers=headers,
            body=urllib.parse.urlencode(
                params)  # Body is required for a POST...
        )

        resp = yield http_client.fetch(req)

        resp_json = json.loads(resp.body.decode('utf8', 'replace'))

        access_token = resp_json['access_token']
        refresh_token = resp_json.get('refresh_token', None)
        token_type = resp_json['token_type']
        scope = (resp_json.get('scope', '')).split(' ')

        # Determine who the logged in user is
        headers = {
            "Accept": "application/json",
            "User-Agent": "JupyterHub",
            "Authorization": "{} {}".format(token_type, access_token)
        }
        if self.userdata_url:
            url = url_concat(self.userdata_url, self.userdata_params)
        else:
            raise ValueError(
                "Please set the OAUTH2_USERDATA_URL environment variable")

        req = HTTPRequest(
            url,
            method=self.userdata_method,
            headers=headers,
        )

        resp = yield http_client.fetch(req)

        resp_json = json.loads(resp.body.decode('utf8', 'replace'))

        username = resp_json.get(self.username_key)
        if not username:
            username = resp_json.get("username")

        if not username:
            self.log.error("OAuth user contains no key %s: %s",
                           self.username_key, resp_json)
            return

        if self.authorized_groups:
            authorized = False

            if username in self.authorized_groups:
                authorized = True

            if not authorized:
                _group_urls = self.group_urls.split(",")
                for g_url in _group_urls:
                    groups_req = HTTPRequest(g_url.strip(),
                                             method="GET",
                                             headers=headers)
                    groups_resp = None
                    try:
                        groups_resp = yield http_client.fetch(groups_req)
                    except HTTPClientError as e:
                        if e.response:
                            self.log.error(
                                "failed to fetch groups for: %s. Reason: %s",
                                g_url, e.response.reason)

                        continue

                    groups_resp_json = json.loads(
                        groups_resp.body.decode('utf8', 'replace'))

                    # Determine whether the user is member of one of the authorized groups
                    user_group_id = [g["id"] for g in groups_resp_json]
                    for group_id in self.authorized_groups.split(","):
                        if group_id in user_group_id:
                            authorized = True
                            break

                    if authorized:
                        break

            if not authorized:
                return

        return {
            'name': username,
            'auth_state': {
                'access_token': access_token,
                'refresh_token': refresh_token,
                'oauth_user': resp_json,
                'scope': scope,
            }
        }
Beispiel #42
0
def checkresults(db, api):
    ini = time()
    info(
        '========================================================================='
    )
    info(' Starting CV Request processing - {}'.format(
        datetime.now().isoformat()))
    info(
        '========================================================================='
    )
    try:
        time_limit = int(environ.get('CVREQ_TIMELIMIT', 7200))
    except Exception as e:
        info(e)
        time_limit = 7200
    # Check results execute GET in the CV Server URL to acquire results for cv requests
    AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
    http_client = AsyncHTTPClient()
    cvreqs = db.cvrequests.find()
    # retrieve data from the cursor
    lcvreqids = [x['iid'] for x in cvreqs]
    rmcv = db.cvresults.remove({'cvrequest_iid': {
        '$nin': lcvreqids
    }},
                               multi=True)
    info('    Clear cvresults without cvrequests: {}'.format(rmcv))
    # Get ids with status != finished or error
    cvreqs = db.cvrequests.find({'status': {'$nin': ['finished', 'error']}})
    # retrieve data from the cursor
    cvreqs = [x for x in cvreqs]
    info('    CV Request not finished of error - count: ' + str(len(cvreqs)))
    # Connection preset
    params = {
        'headers':
        HTTPHeaders({
            "content-type": "application/json",
            "ApiKey": api['CV_APIKEY']
        }),
        'url':
        api['CVSERVER_URL'] + '/linc/v1/classify',
        'method':
        'POST',
        'body':
        '',
        'request_timeout':
        5,
        'validate_cert':
        False
    }
    # Check if cvresults exists
    for cvreq in cvreqs:
        info(
            "========================================================================"
        )
        info(" ### Checking CV Request: " + str(cvreq['iid']) + " ###")
        info("  ## Image set submitted: " + str(cvreq['image_set_iid']) +
             " ##")
        cvres = db.cvresults.find_one({'cvrequest_iid': cvreq['iid']})
        # Restart after 10 minutes
        if cvres:
            info('  >> Created at: {}'.format(cvres['created_at']))
            info('  >>        now: {}'.format(datetime.now()))
            if (datetime.now() - cvres['created_at']).seconds > time_limit:
                #info("  !!! The recognition process took more than 10 minutes... restarting")
                info("!!! The CV Request took more than 2 hours to finish")
                info("!!! Marking it with error status")
                db.cvrequests.update({'iid': cvreq['iid']}, {
                    '$set': {
                        'status': 'error',
                        'updated_at': datetime.now()
                    }
                })
                cvrem_restart = db.cvresults.remove(
                    {'cvrequest_iid': cvreq['iid']})
                cvres = None
                info(
                    "========================================================================"
                )
                continue
        if not cvres:
            # Create the CVResults
            iid = db.counters.find_and_modify(query={'_id': 'cvresults'},
                                              update={'$inc': {
                                                  'next': 1
                                              }},
                                              new=True,
                                              upsert=True)
            newcvres = dict()
            newcvres['cvrequest_iid'] = cvreq['iid']
            newcvres['iid'] = iid['next']
            newcvres['match_probability'] = '{}'
            dt = datetime.now()
            newcvres['created_at'] = dt
            newcvres['updated_at'] = dt
            ncvresobjid = db.cvresults.insert(newcvres)
            info('CV results created id: ' + str(ncvresobjid))
            cvres = db.cvresults.find_one({'cvrequest_iid': cvreq['iid']})
        # Cvres exists, so try to get data
        info('  ## CV Results id.....: {}  ## '.format(cvres['iid']))
        req_body = loads(cvreq['request_body'])
        resp_cvr = loads(cvres['match_probability'])
        if len(resp_cvr) == 0:
            resp_cvr['cv'] = list()
            resp_cvr['whisker'] = list()
        # Check for cv results
        # cv_topk_classifier_accuracy
        # whisker_topk_classifier_accuracy
        if not req_body.get('classifiers', False):
            info(' >>> CV Request invalid - id: {}'.format(cvreq['iid']))
            info(' >>> No classifiers found.')
        else:
            # Check CV
            finished = {'cv': 0, 'whisker': 0}
            for clf in ['cv', 'whisker']:
                if req_body['classifiers'].get(clf, False):
                    info('    Processing calls for the classifier {}'.format(
                        clf.upper()))
                    add = len(resp_cvr[clf]) == 0
                    if add:
                        # Submit requests
                        for n, clf_call in enumerate(req_body[clf + '_calls']):
                            dparams = params.copy()
                            dparams['body'] = dumps(clf_call)
                            request = HTTPRequest(**dparams)
                            try:
                                response = yield http_client.fetch(request)
                            except HTTPError as e:
                                info(e)
                                response = e.response
                            except Exception as e:
                                info(e)
                                response = None
                            if response and response.code in [200, 201]:
                                info('          Call {} #{} - success'.format(
                                    clf.upper(), n))
                                resp_cvr[clf].append(
                                    loads(response.body.decode('utf-8')))
                            else:
                                info('          Call {} #{} - fail'.format(
                                    clf.upper(), n))
                                resp_cvr[clf].append('FAILURE')
                    else:
                        # Check results
                        for n, clf_call in enumerate(req_body[clf + '_calls']):
                            info(resp_cvr[clf][n])
                            # {'id': '432f7612-8b7d-4132-baae-f93f094abb7f', 'status': 'PENDING', 'errors': []}
                            if isinstance(resp_cvr[clf][n],
                                          dict) and resp_cvr[clf][n].get(
                                              'status', None) == 'finished':
                                info(
                                    '          Request CV #{} finished'.format(
                                        n))
                                finished[clf] += 1
                            else:
                                info('       Check results for CV #{}'.format(
                                    n))
                                dparams = params.copy()
                                del dparams['body']
                                dparams['method'] = 'GET'
                                url = api[
                                    'CVSERVER_URL'] + '/linc/v1/results/' + resp_cvr[
                                        clf][n]['id']
                                info('       {}'.format(url))
                                dparams['url'] = url
                                request = HTTPRequest(**dparams)
                                try:
                                    response = yield http_client.fetch(request)
                                except HTTPError as e:
                                    info(e)
                                    response = e.response
                                except Exception as e:
                                    info(e)
                                    response = None
                                if response.code in [200, 201]:
                                    info('          Call #{} - success'.format(
                                        n))
                                    resp_data = loads(
                                        response.body.decode('utf-8'))
                                    info('          Status: {}'.format(
                                        resp_data['status']))
                                    if resp_data['status'] == 'finished':
                                        info('       Resp data: {}'.format(
                                            resp_data))
                                        resp_cvr[clf][n] = resp_data.copy()
                                    elif resp_data['status'] == 'error':
                                        info(
                                            '      Forcing it to be FINISHED (Workaround)'
                                        )
                                        finished[clf] += 1
                                else:
                                    info('          Call #{} - fail'.format(n))
            dt = datetime.now()
            if finished['cv'] == len(
                    req_body['cv_calls']) and finished['whisker'] == len(
                        req_body['whisker_calls']):
                info(' Loading capabilities...')
                dparams = params.copy()
                del dparams['body']
                dparams['method'] = 'GET'
                dparams['url'] = api['CVSERVER_URL'] + '/linc/v1/capabilities'
                request = HTTPRequest(**dparams)
                try:
                    response = yield http_client.fetch(request)
                except HTTPError as e:
                    info(e)
                    response = e.response
                except Exception as e:
                    info(e)
                    response = None
                if response.code in [200, 201]:
                    info(' ### CV Request finished ###')
                    db.cvrequests.update(
                        {'iid': cvreq['iid']},
                        {'$set': {
                            'status': 'finished',
                            'updated_at': dt
                        }})
                    resp_cvr['capabilities'] = loads(
                        response.body.decode('utf-8'))
                    resp_cvr['execution'] = dt.timestamp(
                    ) - cvres['created_at'].timestamp()
                else:
                    info(' Fail to retrieve capabilities info...')
            db.cvresults.update({'cvrequest_iid': cvreq['iid']}, {
                '$set': {
                    'match_probability': dumps(resp_cvr),
                    'updated_at': dt
                }
            })
            api['cache'].delete('imgset-' + str(cvreq['image_set_iid']))
            info('   Cache delete for image set id: {}'.format(
                cvreq['image_set_iid']))
    info(
        '========================================================================='
    )
    info(' CV Request processing finished - Execution time: {0:.2f} s'.format(
        time() - ini))
    info(
        '========================================================================='
    )
Beispiel #43
0
    def post(self):

        pay_uid = self.session.get('uid')
        charge_uid = self.get_argument('charge_uid')
        goodsId = self.get_argument('goods_id')
        invite_code = self.get_argument('invite_code', '')
        res = self.check_invite_code(invite_code, charge_uid)
        if not res[0]:
            # self.write("<script>alert('不符合条件,无法充值');window.history.back();</script>")
            u_error(self, res[1])
            raise tornado.gen.Return()

        out_trade_no, goodsName, goodsPrice, cards, present_card = self.get_trade_and_goods(
            goodsId, charge_uid)
        appid = WECHAT_APP_ID
        mch_id = WECHAT_MCHID
        nonce_str = "".join(
            random.sample('zyxwvutsrqponmlkjihgfedcba123456789', 32))
        body = "房卡X" + str(
            int(cards) + int(present_card if present_card else 0))
        # 金额
        print '----------totalfee---------------'
        print goodsPrice
        print '----------totalfee---------------'
        total_fee = int(float(goodsPrice) * 100)
        spbill_create_ip = "120.79.229.113"
        trade_type = "JSAPI"
        openid = self.session.get("openid")
        notify_url = "agent.qkgame.com.cn/notify"
        key = WECHAT_KEY

        send_list = [
            'appid', 'mch_id', 'nonce_str', 'body', 'out_trade_no',
            'total_fee', 'spbill_create_ip', 'trade_type', 'openid',
            'notify_url'
        ]
        send_list.sort()
        stringA = ""
        for val in send_list:
            stringA += val + "=" + str(eval(val)) + "&"

        stringA += "key=" + key
        m = hashlib.md5()
        m.update(stringA)
        sign = m.hexdigest().upper()
        send_list.append('sign')

        dic = {}
        for s in send_list:
            dic[s] = str(eval(s))
        dic = {"xml": dic}

        data = xmltodict.unparse(dic).encode('utf-8')
        url = "https://api.mch.weixin.qq.com/pay/unifiedorder"
        client = AsyncHTTPClient()
        req = HTTPRequest(
            url,
            method="POST",
            body=data,
        )

        # 发送请求获取prepayid
        resp = yield client.fetch(req)
        print '--------------'
        print resp.body
        print '--------------'
        res_dic = xmltodict.parse(resp.body)
        prepay_id = res_dic["xml"]["prepay_id"]
        timeStamp = int(time.time())
        package = "prepay_id=" + str(prepay_id)
        signType = "MD5"
        appId = WECHAT_APP_ID
        nonceStr = nonce_str

        jssdk_list = ['appId', 'nonceStr', 'timeStamp', 'package', 'signType']
        jssdk_list.sort()
        stringA = ""
        for val in jssdk_list:
            stringA += val + "=" + str(eval(val)) + "&"

        stringA += "key=" + key
        m = hashlib.md5()
        m.update(stringA)
        paySign = m.hexdigest().upper()
        self.add_order_to_pending(nonceStr, prepay_id, goodsName, total_fee,
                                  goodsId, cards, present_card, out_trade_no,
                                  paySign, charge_uid)

        u_success(self,
                  '请求成功',
                  data={
                      'signType': signType,
                      'appId': appId,
                      'timeStamp': str(timeStamp),
                      'nonceStr': nonceStr,
                      'package': package,
                      'paySign': paySign
                  })
Beispiel #44
0
 def test_default_set(self):
     proxy = _RequestProxy(HTTPRequest('http://example.com/'),
                           dict(network_interface='foo'))
     self.assertEqual(proxy.network_interface, 'foo')
Beispiel #45
0
 def test_headers(self):
     request = HTTPRequest('http://example.com', headers={'foo': 'bar'})
     self.assertEqual(request.headers, {'foo': 'bar'})
Beispiel #46
0
    def post(self):

        uid = self.session.get("uid")
        money = self.get_argument('money', '')
        res = self.check_money_valid(money)
        if not res[0]:
            u_error(self, res[1])

        res = self.check_withdraw_can(uid, money)
        if not res[0]:
            u_error(self, res[1])
            raise tornado.gen.Return()

        if self.redis.get('short:forbid:user:%s' % uid):
            u_error(self, '您的提现过于频繁,请稍等一会!')
            raise tornado.gen.Return()

        self.redis.setex('short:forbid:user:%s' % uid, 10, 10)

        if not money:
            # self.write("请填写正确的金额!")
            u_error(self, '请填写正确的金额!')
            raise tornado.gen.Return()
        else:
            try:
                money_cent = int(float(money) * 100)
            except Exception as e:
                # self.write("格式不正确")
                u_error(self, '格式不正确')
                raise tornado.gen.Return()

        if not self.check_have_money(uid, money_cent):
            # self.write("你的余额不够!")
            u_error(self, '你的余额不够')
            raise tornado.gen.Return()

        openid = self.session.get("openid")
        mch_appid = WECHAT_APP_ID
        mchid = WECHAT_MCHID
        nonce_str = "".join(
            random.sample('zyxwvutsrqponmlkjihgfedcba123456789', 32))
        partner_trade_no = uuid.uuid4().hex
        check_name = "NO_CHECK"
        amount = money_cent
        desc = "代理提现"
        spbill_create_ip = "120.79.229.113"
        key = WECHAT_KEY

        send_list = [
            'openid', 'mch_appid', 'mchid', 'nonce_str', 'partner_trade_no',
            'check_name', 'amount', 'desc', 'spbill_create_ip'
        ]
        send_list.sort()
        stringA = ""
        for val in send_list:
            stringA += val + "=" + str(eval(val)) + "&"

        stringA += "key=" + key
        m = hashlib.md5()
        m.update(stringA)
        sign = m.hexdigest().upper()
        send_list.append('sign')

        dic = {}
        for s in send_list:
            dic[s] = str(eval(s))
        dic = {"xml": dic}
        data = xmltodict.unparse(dic)
        url = "https://api.mch.weixin.qq.com/mmpaymkttransfers/promotion/transfers"

        client = AsyncHTTPClient()
        req = HTTPRequest(url,
                          method="POST",
                          body=data,
                          client_key=self._key_pem,
                          client_cert=self._cert_pem)
        resp = yield client.fetch(req)

        res_dic = xmltodict.parse(resp.body)
        res = self.check_success(res_dic, amount)
        if res:
            # self.redirect('/cash/take?withdraw_res=1')
            u_jump(self, '/cash/take?withdraw_res=1')
        else:
            # self.write('提现失败!')
            try:
                # res = str(res_dic['xml']['return_msg'])
                if res_dic['xml']['err_code'] == "NOTENOUGH":
                    res = "商户可用提现余额不足,请不要着急,建议联系客服处理。"
                elif res_dic['xml']['err_code'] == "SENDNUM_LIMIT":
                    res = "提现次数超出3次,提示:已达到单日最大提现次数,请改天再来。"
            except Exception as e:
                print '----except-----------'
                print str(e)
                print '---------------'
                res = "提现失败!"
            u_error(self, res)
Beispiel #47
0
    async def authenticate(self, handler, data=None, google_groups=None):
        code = handler.get_argument("code")
        body = urllib.parse.urlencode(
            dict(
                code=code,
                redirect_uri=self.get_callback_url(handler),
                client_id=self.client_id,
                client_secret=self.client_secret,
                grant_type="authorization_code",
            ))

        req = HTTPRequest(
            self.token_url,
            method="POST",
            headers={"Content-Type": "application/x-www-form-urlencoded"},
            body=body,
        )
        user = await self.fetch(req, "completing oauth")
        access_token = str(user['access_token'])
        refresh_token = user.get('refresh_token', None)

        req = HTTPRequest(
            url_concat(
                self.user_info_url,
                {'access_token': access_token},
            ))
        bodyjs = await self.fetch(req, "fetching user info")
        user_email = bodyjs['email']
        user_email_domain = user_email.split('@')[1]
        username = bodyjs[self.username_claim]

        if not bodyjs['verified_email']:
            self.log.warning("Google OAuth unverified email attempt: %s",
                             user_email)
            raise HTTPError(403,
                            "Google email {} not verified".format(user_email))

        if self.hosted_domain:
            if user_email_domain not in self.hosted_domain:
                self.log.warning(
                    "Google OAuth unauthorized domain attempt: %s", user_email)
                raise HTTPError(
                    403,
                    "Google account domain @{} not authorized.".format(
                        user_email_domain),
                )
            if len(self.hosted_domain) == 1 and user_email == username:
                # unambiguous domain, use only base name
                username = user_email.split('@')[0]

        if refresh_token is None:
            self.log.debug(
                "Refresh token was empty, will try to pull refresh_token from previous auth_state"
            )
            user = handler.find_user(username)

            if user and user.encrypted_auth_state:
                self.log.debug(
                    "encrypted_auth_state was found, will try to decrypt and pull refresh_token from it"
                )
                try:
                    encrypted = user.encrypted_auth_state
                    auth_state = await decrypt(encrypted)
                    refresh_token = auth_state.get('refresh_token')
                except (ValueError, InvalidToken, EncryptionUnavailable) as e:
                    self.log.warning(
                        "Failed to retrieve encrypted auth_state for %s because %s",
                        username,
                        e,
                    )

        user_info = {
            'name': username,
            'auth_state': {
                'access_token': access_token,
                'refresh_token': refresh_token,
                'google_user': bodyjs,
            },
        }

        if self.admin_google_groups or self.allowed_google_groups:
            user_info = await self._add_google_groups_info(
                user_info, google_groups)

        return user_info
Beispiel #48
0
 def test_if_modified_since(self):
     http_date = datetime.datetime.utcnow()
     request = HTTPRequest('http://example.com',
                           if_modified_since=http_date)
     self.assertEqual(request.headers,
                      {'If-Modified-Since': format_timestamp(http_date)})
Beispiel #49
0
    def handle_server(user, server_name, server):
        """Handle (maybe) culling a single server

        Returns True if server is now stopped (user removable),
        False otherwise.
        """
        log_name = user['name']
        if server_name:
            log_name = '%s/%s' % (user['name'], server_name)
        if server.get('pending'):
            app_log.warning("Not culling server %s with pending %s", log_name,
                            server['pending'])
            return False

        if server.get('started'):
            age = now - parse_date(server['started'])
        else:
            # started may be undefined on jupyterhub < 0.9
            age = None

        # check last activity
        # last_activity can be None in 0.9
        if server['last_activity']:
            inactive = now - parse_date(server['last_activity'])
        else:
            # no activity yet, use start date
            # last_activity may be None with jupyterhub 0.9,
            # which introduces the 'started' field which is never None
            # for running servers
            inactive = age

        should_cull = (inactive is not None
                       and inactive.total_seconds() >= inactive_limit)
        if should_cull:
            app_log.info("Culling server %s (inactive for %s)", log_name,
                         format_td(inactive))

        if max_age and not should_cull:
            # only check started if max_age is specified
            # so that we can still be compatible with jupyterhub 0.8
            # which doesn't define the 'started' field
            if age is not None and age.total_seconds() >= max_age:
                app_log.info("Culling server %s (age: %s, inactive for %s)",
                             log_name, format_td(age), format_td(inactive))
                should_cull = True

        if not should_cull:
            app_log.debug("Not culling server %s (age: %s, inactive for %s)",
                          log_name, format_td(age), format_td(inactive))
            return False

        body = None
        if server_name:
            # culling a named server
            # A named server can be stopped and kept available to the user
            # for starting again or stopped and removed. To remove the named
            # server we have to pass an additional option in the body of our
            # DELETE request.
            delete_url = url + "/users/%s/servers/%s" % (
                quote(user['name']),
                quote(server['name']),
            )
            if remove_named_servers:
                body = json.dumps({"remove": True})
        else:
            delete_url = url + '/users/%s/server' % quote(user['name'])

        req = HTTPRequest(
            url=delete_url,
            method='DELETE',
            headers=auth_header,
            body=body,
            allow_nonstandard_methods=True,
        )
        resp = yield fetch(req)
        if resp.code == 202:
            app_log.warning(
                "Server %s is slow to stop",
                log_name,
            )
            # return False to prevent culling user with pending shutdowns
            return False
        return True
Beispiel #50
0
 def test_request_set(self):
     proxy = _RequestProxy(
         HTTPRequest('http://example.com/', user_agent='foo'), dict())
     self.assertEqual(proxy.user_agent, 'foo')
Beispiel #51
0
async def test_client_fetch_timeout():
    async with slow_server() as server:
        gateway = Gateway(server.address, auth=BasicAuth("alice"))
        with pytest.raises(TimeoutError):
            await gateway._fetch(
                HTTPRequest(url=server.address, request_timeout=1))
Beispiel #52
0
    def authenticate(self, handler, data=None):
        """We set up auth_state based on additional CILogon info if we
        receive it.
        """
        code = handler.get_argument("code")
        # TODO: Configure the curl_httpclient for tornado
        http_client = AsyncHTTPClient()

        # Exchange the OAuth code for a CILogon Access Token
        # See: http://www.cilogon.org/oidc
        headers = {
            "Accept": "application/json",
            "User-Agent": "JupyterHub",
        }

        params = dict(
            client_id=self.client_id,
            client_secret=self.client_secret,
            redirect_uri=self.oauth_callback_url,
            code=code,
            grant_type='authorization_code',
        )

        url = url_concat("https://%s/oauth2/token" % CILOGON_HOST, params)

        req = HTTPRequest(url, headers=headers, method="POST", body='')

        resp = yield http_client.fetch(req)
        token_response = json.loads(resp.body.decode('utf8', 'replace'))
        access_token = token_response['access_token']
        self.log.info("Access token acquired.")
        # Determine who the logged in user is
        params = dict(access_token=access_token)
        req = HTTPRequest(url_concat(
            "https://%s/oauth2/userinfo" % CILOGON_HOST, params),
                          headers=headers)
        resp = yield http_client.fetch(req)
        resp_json = json.loads(resp.body.decode('utf8', 'replace'))

        claimlist = [self.username_claim]
        if self.additional_username_claims:
            claimlist.extend(self.additional_username_claims)

        for claim in claimlist:
            username = resp_json.get(claim)
            if username:
                break
        if not username:
            if len(claimlist) < 2:
                self.log.error("Username claim %s not found in response: %s",
                               self.username_claim, sorted(resp_json.keys()))
            else:
                self.log.error("No username claim from %r in response: %s",
                               claimlist, sorted(resp_json.keys()))
            raise web.HTTPError(500, "Failed to get username from CILogon")

        if self.idp_whitelist:
            gotten_name, gotten_idp = username.split('@')
            if gotten_idp not in self.idp_whitelist:
                self.log.error(
                    "Trying to login from not whitelisted domain %s",
                    gotten_idp)
                raise web.HTTPError(
                    500, "Trying to login from not whitelisted domain")
            if len(self.idp_whitelist) == 1 and self.strip_idp_domain:
                username = gotten_name
        userdict = {"name": username}
        # Now we set up auth_state
        userdict["auth_state"] = auth_state = {}
        # Save the token response and full CILogon reply in auth state
        # These can be used for user provisioning
        #  in the Lab/Notebook environment.
        auth_state['token_response'] = token_response
        # store the whole user model in auth_state.cilogon_user
        # keep access_token as well, in case anyone was relying on it
        auth_state['access_token'] = access_token
        auth_state['cilogon_user'] = resp_json
        return userdict
Beispiel #53
0
 def request(self,
             url,
             data=None,
             method="GET",
             tried=False,
             full_response=False):
     #print("Token: {0}".format(self.token_manager.token))
     if self.token_manager.token in [None, ""]:
         yield self.get_token()
         tried = True
     headers = self.get_headers()
     #print("Headers:{0}".format(headers))
     if data != None:
         if method in [None, "GET"]:
             method = "POST"
         print("building request: {0}, {1}, {2}".format(method, url, data))
         request = HTTPRequest(url,
                               method=method,
                               headers=headers,
                               body=data)
     else:
         request = HTTPRequest(url, method=method, headers=headers)
     http_client = AsyncHTTPClient()
     try:
         response = yield http_client.fetch(request)
         if not full_response:
             message = json.loads(response.body.decode("utf-8"))
             response = {
                 "success": True,
                 "code": response.code,
                 "data": message,
                 "headers": response.headers._dict
             }
     except HTTPError as he:
         print("Requester.request HTTPError Code: {0}, {1}".format(
             he.code, he.message))
         try:
             print(he.response.body)
         except Exception as e:
             pass
         if not full_response:
             try:
                 response = {
                     "success": False,
                     "code": he.code,
                     "message": he.message,
                     "headers": he.response.headers._dict
                 }
             except Exception as ex:
                 response = {
                     "success": False,
                     "code": he.code,
                     "message": he.message
                 }
         else:
             response = he.response
         if not tried:
             if he.code == 401:
                 print("Token may have expired, regenerating")
                 yield self.get_token()
                 response = yield self.request(url, data, method, True,
                                               full_response)
             elif he.code == 400:
                 try:
                     print("request error he.response.body:{0}".format(
                         he.response.body))
                     jval = json.loads(he.response.body.decode('utf-8'))
                     if "Invalid access token" in jval['error']['message'][
                             'error_description']:
                         yield self.get_token()
                         response = yield self.request(
                             url, data, method, True, full_response)
                 except Exception as ex:
                     print(ex)
             elif he.code == 429:
                 print("SHOULD DO SOMETHING HERE FOR 429")
             else:
                 print("SOME OTHER CODE: {0}".format(he.code))
                 #response = yield self.request(url, data, method, True, full_response)
         else:
             print("Failed on second attempt.")
     except Exception as e:
         traceback.print_exc()
         message = "{0}".format(e)
         if not full_response:
             response = {
                 "success": False,
                 "code": 500,
                 "message": message,
                 "headers": {}
             }
         else:
             response = None
     raise tornado.gen.Return(response)
    def github_api_request(self, api_url, etag=None):
        client = AsyncHTTPClient()
        if self.auth:
            # Add auth params. After logging!
            api_url = url_concat(api_url, self.auth)

        headers = {}
        if etag:
            headers['If-None-Match'] = etag
        req = HTTPRequest(api_url, headers=headers, user_agent="BinderHub")

        try:
            resp = yield client.fetch(req)
        except HTTPError as e:
            if e.code == 304:
                resp = e.response
            elif (
                e.code == 403
                and e.response
                and e.response.headers.get('x-ratelimit-remaining') == '0'
            ):
                rate_limit = e.response.headers['x-ratelimit-limit']
                reset_timestamp = int(e.response.headers['x-ratelimit-reset'])
                reset_seconds = int(reset_timestamp - time.time())
                self.log.error(
                    "GitHub Rate limit ({limit}) exceeded. Reset in {delta}.".format(
                        limit=rate_limit,
                        delta=timedelta(seconds=reset_seconds),
                    )
                )
                # round expiry up to nearest 5 minutes
                minutes_until_reset = 5 * (1 + (reset_seconds // 60 // 5))

                raise ValueError("GitHub rate limit exceeded. Try again in %i minutes."
                    % minutes_until_reset
                )
            # Status 422 is returned by the API when we try and resolve a non
            # existent reference
            elif e.code in (404, 422):
                return None
            else:
                raise

        # record and log github rate limit
        remaining = int(resp.headers['x-ratelimit-remaining'])
        rate_limit = int(resp.headers['x-ratelimit-limit'])
        reset_timestamp = int(resp.headers['x-ratelimit-reset'])

        # record with prometheus
        GITHUB_RATE_LIMIT.set(remaining)

        # log at different levels, depending on remaining fraction
        fraction = remaining / rate_limit
        if fraction < 0.2:
            log = self.log.warning
        elif fraction < 0.5:
            log = self.log.info
        else:
            log = self.log.debug

        # str(timedelta) looks like '00:32'
        delta = timedelta(seconds=int(reset_timestamp - time.time()))
        log("GitHub rate limit remaining {remaining}/{limit}. Reset in {delta}.".format(
            remaining=remaining, limit=rate_limit, delta=delta,
        ))
        return resp
Beispiel #55
0
 def test_bad_attribute(self):
     proxy = _RequestProxy(HTTPRequest('http://example.com/'), dict())
     with self.assertRaises(AttributeError):
         proxy.foo
Beispiel #56
0
 def test_headers_setter(self):
     request = HTTPRequest('http://example.com')
     request.headers = {'bar': 'baz'}
     self.assertEqual(request.headers, {'bar': 'baz'})
Beispiel #57
0
 def test_both_set(self):
     proxy = _RequestProxy(
         HTTPRequest('http://example.com/', proxy_host='foo'),
         dict(proxy_host='bar'))
     self.assertEqual(proxy.proxy_host, 'foo')
Beispiel #58
0
 def test_body_setter(self):
     request = HTTPRequest('http://example.com')
     request.body = 'foo'
     self.assertEqual(request.body, utf8('foo'))
Beispiel #59
0
async def ws_client(http_server, http_server_port):
    """ return a websocket client """
    request = HTTPRequest(f'ws://localhost:{http_server_port[1]}/ws')
    result = await websocket_connect(request)
    return result
Beispiel #60
0
    async def authenticate(self, handler, data=None):
        code = handler.get_argument("code")

        # Exchange the OAuth code for a GitLab Access Token
        #
        # See: https://github.com/gitlabhq/gitlabhq/blob/HEAD/doc/api/oauth2.md

        # GitLab specifies a POST request yet requires URL parameters
        params = dict(
            client_id=self.client_id,
            client_secret=self.client_secret,
            code=code,
            grant_type="authorization_code",
            redirect_uri=self.get_callback_url(handler),
        )

        validate_server_cert = self.validate_server_cert

        url = url_concat("%s/oauth/token" % self.gitlab_url, params)

        req = HTTPRequest(
            url,
            method="POST",
            headers={"Accept": "application/json"},
            validate_cert=validate_server_cert,
            body='',  # Body is required for a POST...
        )

        resp_json = await self.fetch(req, label="getting access token")
        access_token = resp_json['access_token']

        # memoize gitlab version for class lifetime
        if self.gitlab_version is None:
            self.gitlab_version = await self._get_gitlab_version(access_token)
            self.member_api_variant = 'all/' if self.gitlab_version >= [12, 4] else ''

        # Determine who the logged in user is
        req = HTTPRequest(
            "%s/user" % self.gitlab_api,
            method="GET",
            validate_cert=validate_server_cert,
            headers=_api_headers(access_token),
        )
        resp_json = await self.fetch(req, label="getting gitlab user")

        username = resp_json["username"]
        user_id = resp_json["id"]
        is_admin = resp_json.get("is_admin", False)

        # Check if user is a member of any allowed groups or projects.
        # These checks are performed here, as it requires `access_token`.
        user_in_group = user_in_project = False
        is_group_specified = is_project_id_specified = False

        if self.allowed_gitlab_groups:
            is_group_specified = True
            user_in_group = await self._check_membership_allowed_groups(
                user_id, access_token
            )

        # We skip project_id check if user is in allowed group.
        if self.allowed_project_ids and not user_in_group:
            is_project_id_specified = True
            user_in_project = await self._check_membership_allowed_project_ids(
                user_id, access_token
            )

        no_config_specified = not (is_group_specified or is_project_id_specified)

        if (
            (is_group_specified and user_in_group)
            or (is_project_id_specified and user_in_project)
            or no_config_specified
        ):
            return {
                'name': username,
                'auth_state': {'access_token': access_token, 'gitlab_user': resp_json},
            }
        else:
            self.log.warning("%s not in group or project allowed list", username)
            return None