コード例 #1
0
ファイル: database.py プロジェクト: Benozo/catawampus
 def _ensure_connected(self):
     # Mysql by default closes client connections that are idle for
     # 8 hours, but the client library does not report this fact until
     # you try to perform a query and it fails.  Protect against this
     # case by preemptively closing and reopening the connection
     # if it has been idle for too long (7 hours by default).
     if (self._db is None or
         (monotime() - self._last_use_time > self.max_idle_time)):
         self.reconnect()
     self._last_use_time = monotime()
コード例 #2
0
ファイル: database.py プロジェクト: prahlad574/my
 def _ensure_connected(self):
     # Mysql by default closes client connections that are idle for
     # 8 hours, but the client library does not report this fact until
     # you try to perform a query and it fails.  Protect against this
     # case by preemptively closing and reopening the connection
     # if it has been idle for too long (7 hours by default).
     if (self._db is None
             or (monotime() - self._last_use_time > self.max_idle_time)):
         self.reconnect()
     self._last_use_time = monotime()
コード例 #3
0
ファイル: ioloop.py プロジェクト: Benozo/catawampus
 def __init__(self, deadline, callback, monotonic):
     if isinstance(deadline, (int, long, float)):
         if monotonic:
             self.deadline = deadline
         else:
             if hasattr(time, 'monotonic'):
                 import inspect
                 logging.warning('non-monotonic time _Timeout() created at %s:%d',
                                 inspect.stack()[2][1], inspect.stack()[2][2])
             self.deadline = deadline - time.time() + monotime()
     elif isinstance(deadline, datetime.timedelta):
         self.deadline = monotime() + _Timeout.timedelta_to_seconds(deadline)
     else:
         raise TypeError("Unsupported deadline %r" % deadline)
     self.callback = callback
コード例 #4
0
ファイル: ioloop.py プロジェクト: athul04/honeything
 def _schedule_next(self):
     if self._running:
         current_time = monotime()
         while self._next_timeout <= current_time:
             self._next_timeout += self.callback_time / 1000.0
         self._timeout = self.io_loop.add_timeout(self._next_timeout,
                                 self._run, monotonic=True)
コード例 #5
0
ファイル: curl_httpclient.py プロジェクト: prahlad574/my
    def _process_queue(self):
        with stack_context.NullContext():
            while True:
                started = 0
                while self._free_list and self._requests:
                    started += 1
                    curl = self._free_list.pop()
                    (request, callback) = self._requests.popleft()
                    curl.info = {
                        "headers": httputil.HTTPHeaders(),
                        "buffer": cStringIO.StringIO(),
                        "request": request,
                        "callback": callback,
                        "curl_start_time": monotime(),
                    }
                    # Disable IPv6 to mitigate the effects of this bug
                    # on curl versions <= 7.21.0
                    # http://sourceforge.net/tracker/?func=detail&aid=3017819&group_id=976&atid=100976
                    if pycurl.version_info()[2] <= 0x71500:  # 7.21.0
                        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
                    _curl_setup_request(curl, request, curl.info["buffer"],
                                        curl.info["headers"])
                    self._multi.add_handle(curl)

                if not started:
                    break
コード例 #6
0
ファイル: database.py プロジェクト: Benozo/catawampus
    def __init__(self, host, database, user=None, password=None,
                 max_idle_time=7 * 3600):
        self.host = host
        self.database = database
        self.max_idle_time = max_idle_time

        args = dict(conv=CONVERSIONS, use_unicode=True, charset="utf8",
                    db=database, init_command='SET time_zone = "+0:00"',
                    sql_mode="TRADITIONAL")
        if user is not None:
            args["user"] = user
        if password is not None:
            args["passwd"] = password

        # We accept a path to a MySQL socket file or a host(:port) string
        if "/" in host:
            args["unix_socket"] = host
        else:
            self.socket = None
            pair = host.split(":")
            if len(pair) == 2:
                args["host"] = pair[0]
                args["port"] = int(pair[1])
            else:
                args["host"] = host
                args["port"] = 3306

        self._db = None
        self._db_args = args
        self._last_use_time = monotime()
        try:
            self.reconnect()
        except Exception:
            logging.error("Cannot connect to MySQL on %s", self.host,
                          exc_info=True)
コード例 #7
0
ファイル: curl_httpclient.py プロジェクト: Benozo/catawampus
    def _process_queue(self):
        with stack_context.NullContext():
            while True:
                started = 0
                while self._free_list and self._requests:
                    started += 1
                    curl = self._free_list.pop()
                    (request, callback) = self._requests.popleft()
                    curl.info = {
                        "headers": httputil.HTTPHeaders(),
                        "buffer": cStringIO.StringIO(),
                        "request": request,
                        "callback": callback,
                        "curl_start_time": monotime(),
                    }
                    # Disable IPv6 to mitigate the effects of this bug
                    # on curl versions <= 7.21.0
                    # http://sourceforge.net/tracker/?func=detail&aid=3017819&group_id=976&atid=100976
                    if pycurl.version_info()[2] <= 0x71500:  # 7.21.0
                        curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
                    _curl_setup_request(curl, request, curl.info["buffer"],
                                        curl.info["headers"])
                    self._multi.add_handle(curl)

                if not started:
                    break
コード例 #8
0
 def cleanup(self):
     try:
         yield
     except Exception, e:
         logging.warning("uncaught exception", exc_info=True)
         self._run_callback(HTTPResponse(self.request, 599, error=e, request_time=monotime() - self.start_time))
         if hasattr(self, "stream"):
             self.stream.close()
コード例 #9
0
 def _schedule_next(self):
     if self._running:
         current_time = monotime()
         while self._next_timeout <= current_time:
             self._next_timeout += self.callback_time / 1000.0
         self._timeout = self.io_loop.add_timeout(self._next_timeout,
                                                  self._run,
                                                  monotonic=True)
コード例 #10
0
ファイル: simple_httpclient.py プロジェクト: prahlad574/my
 def _on_timeout(self):
     self._timeout = None
     self._run_callback(
         HTTPResponse(self.request,
                      599,
                      request_time=monotime() - self.start_time,
                      error=HTTPError(599, "Timeout")))
     self.stream.close()
コード例 #11
0
ファイル: ioloop.py プロジェクト: ultilix/catawampus
 def __init__(self, deadline, callback, monotonic):
     if isinstance(deadline, (int, long, float)):
         if monotonic:
             self.deadline = deadline
         else:
             if hasattr(time, 'monotonic'):
                 import inspect
                 logging.warning(
                     'non-monotonic time _Timeout() created at %s:%d',
                     inspect.stack()[2][1],
                     inspect.stack()[2][2])
             self.deadline = deadline - time.time() + monotime()
     elif isinstance(deadline, datetime.timedelta):
         self.deadline = monotime() + _Timeout.timedelta_to_seconds(
             deadline)
     else:
         raise TypeError("Unsupported deadline %r" % deadline)
     self.callback = callback
コード例 #12
0
    def __init__(self, environ):
        """Parses the given WSGI environ to construct the request."""
        self.method = environ["REQUEST_METHOD"]
        self.path = urllib.quote(environ.get("SCRIPT_NAME", ""))
        self.path += urllib.quote(environ.get("PATH_INFO", ""))
        self.uri = self.path
        self.arguments = {}
        self.query = environ.get("QUERY_STRING", "")
        if self.query:
            self.uri += "?" + self.query
            arguments = parse_qs_bytes(native_str(self.query))
            for name, values in arguments.iteritems():
                values = [v for v in values if v]
                if values:
                    self.arguments[name] = values
        self.version = "HTTP/1.1"
        self.headers = httputil.HTTPHeaders()
        if environ.get("CONTENT_TYPE"):
            self.headers["Content-Type"] = environ["CONTENT_TYPE"]
        if environ.get("CONTENT_LENGTH"):
            self.headers["Content-Length"] = environ["CONTENT_LENGTH"]
        for key in environ:
            if key.startswith("HTTP_"):
                self.headers[key[5:].replace("_", "-")] = environ[key]
        if self.headers.get("Content-Length"):
            self.body = environ["wsgi.input"].read(
                int(self.headers["Content-Length"]))
        else:
            self.body = ""
        self.protocol = environ["wsgi.url_scheme"]
        self.remote_ip = environ.get("REMOTE_ADDR", "")
        if environ.get("HTTP_HOST"):
            self.host = environ["HTTP_HOST"]
        else:
            self.host = environ["SERVER_NAME"]

        # Parse request body
        self.files = {}
        content_type = self.headers.get("Content-Type", "")
        if content_type.startswith("application/x-www-form-urlencoded"):
            for name, values in parse_qs_bytes(native_str(
                    self.body)).iteritems():
                self.arguments.setdefault(name, []).extend(values)
        elif content_type.startswith("multipart/form-data"):
            if 'boundary=' in content_type:
                boundary = content_type.split('boundary=', 1)[1]
                if boundary:
                    httputil.parse_multipart_form_data(utf8(boundary),
                                                       self.body,
                                                       self.arguments,
                                                       self.files)
            else:
                logging.warning("Invalid multipart/form-data")

        self._start_time = monotime()
        self._finish_time = None
コード例 #13
0
ファイル: wsgi.py プロジェクト: athul04/honeything
    def __init__(self, environ):
        """Parses the given WSGI environ to construct the request."""
        self.method = environ["REQUEST_METHOD"]
        self.path = urllib.quote(environ.get("SCRIPT_NAME", ""))
        self.path += urllib.quote(environ.get("PATH_INFO", ""))
        self.uri = self.path
        self.arguments = {}
        self.query = environ.get("QUERY_STRING", "")
        if self.query:
            self.uri += "?" + self.query
            arguments = parse_qs_bytes(native_str(self.query))
            for name, values in arguments.iteritems():
                values = [v for v in values if v]
                if values:
                    self.arguments[name] = values
        self.version = "HTTP/1.1"
        self.headers = httputil.HTTPHeaders()
        if environ.get("CONTENT_TYPE"):
            self.headers["Content-Type"] = environ["CONTENT_TYPE"]
        if environ.get("CONTENT_LENGTH"):
            self.headers["Content-Length"] = environ["CONTENT_LENGTH"]
        for key in environ:
            if key.startswith("HTTP_"):
                self.headers[key[5:].replace("_", "-")] = environ[key]
        if self.headers.get("Content-Length"):
            self.body = environ["wsgi.input"].read(
                int(self.headers["Content-Length"]))
        else:
            self.body = ""
        self.protocol = environ["wsgi.url_scheme"]
        self.remote_ip = environ.get("REMOTE_ADDR", "")
        if environ.get("HTTP_HOST"):
            self.host = environ["HTTP_HOST"]
        else:
            self.host = environ["SERVER_NAME"]

        # Parse request body
        self.files = {}
        content_type = self.headers.get("Content-Type", "")
        if content_type.startswith("application/x-www-form-urlencoded"):
            for name, values in parse_qs_bytes(native_str(self.body)).iteritems():
                self.arguments.setdefault(name, []).extend(values)
        elif content_type.startswith("multipart/form-data"):
            if 'boundary=' in content_type:
                boundary = content_type.split('boundary=', 1)[1]
                if boundary:
                    httputil.parse_multipart_form_data(
                        utf8(boundary), self.body, self.arguments, self.files)
            else:
                logging.warning("Invalid multipart/form-data")

        self._start_time = monotime()
        self._finish_time = None
コード例 #14
0
ファイル: simple_httpclient.py プロジェクト: prahlad574/my
 def _on_body(self, data):
     if self._timeout is not None:
         self.io_loop.remove_timeout(self._timeout)
         self._timeout = None
     original_request = getattr(self.request, "original_request",
                                self.request)
     if (self.request.follow_redirects and self.request.max_redirects > 0
             and self.code in (301, 302, 303, 307)):
         new_request = copy.copy(self.request)
         new_request.url = urlparse.urljoin(self.request.url,
                                            self.headers["Location"])
         new_request.max_redirects -= 1
         del new_request.headers["Host"]
         # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
         # client SHOULD make a GET request
         if self.code == 303:
             new_request.method = "GET"
             new_request.body = None
             for h in [
                     "Content-Length", "Content-Type", "Content-Encoding",
                     "Transfer-Encoding"
             ]:
                 try:
                     del self.request.headers[h]
                 except KeyError:
                     pass
         new_request.original_request = original_request
         final_callback = self.final_callback
         self.final_callback = None
         self._release()
         self.client.fetch(new_request, final_callback)
         self.stream.close()
         return
     if self._decompressor:
         data = self._decompressor.decompress(data)
     if self.request.streaming_callback:
         if self.chunks is None:
             # if chunks is not None, we already called streaming_callback
             # in _on_chunk_data
             self.request.streaming_callback(data)
         buffer = BytesIO()
     else:
         buffer = BytesIO(data)  # TODO: don't require one big string?
     response = HTTPResponse(original_request,
                             self.code,
                             headers=self.headers,
                             request_time=monotime() - self.start_time,
                             buffer=buffer,
                             effective_url=self.request.url)
     self._run_callback(response)
     self.stream.close()
コード例 #15
0
ファイル: simple_httpclient.py プロジェクト: prahlad574/my
 def cleanup(self):
     try:
         yield
     except Exception, e:
         logging.warning("uncaught exception", exc_info=True)
         self._run_callback(
             HTTPResponse(
                 self.request,
                 599,
                 error=e,
                 request_time=monotime() - self.start_time,
             ))
         if hasattr(self, "stream"):
             self.stream.close()
コード例 #16
0
    def __init__(self,
                 method,
                 uri,
                 version="HTTP/1.0",
                 headers=None,
                 body=None,
                 remote_ip=None,
                 protocol=None,
                 host=None,
                 files=None,
                 connection=None):
        self.method = method
        self.uri = uri
        self.version = version
        self.headers = headers or httputil.HTTPHeaders()
        self.body = body or ""
        if connection and connection.xheaders:
            # Squid uses X-Forwarded-For, others use X-Real-Ip
            self.remote_ip = self.headers.get(
                "X-Real-Ip", self.headers.get("X-Forwarded-For", remote_ip))
            if not self._valid_ip(self.remote_ip):
                self.remote_ip = remote_ip
            # AWS uses X-Forwarded-Proto
            self.protocol = self.headers.get(
                "X-Scheme", self.headers.get("X-Forwarded-Proto", protocol))
            if self.protocol not in ("http", "https"):
                self.protocol = "http"
        else:
            self.remote_ip = remote_ip
            if protocol:
                self.protocol = protocol
            elif connection and isinstance(connection.stream,
                                           iostream.SSLIOStream):
                self.protocol = "https"
            else:
                self.protocol = "http"
        self.host = host or self.headers.get("Host") or "127.0.0.1"
        self.files = files or {}
        self.connection = connection
        self._start_time = monotime()
        self._finish_time = None

        self.path, sep, self.query = uri.partition('?')
        arguments = parse_qs_bytes(self.query)
        self.arguments = {}
        for name, values in arguments.iteritems():
            values = [v for v in values if v]
            if values:
                self.arguments[name] = values
コード例 #17
0
 def _on_body(self, data):
     if self._timeout is not None:
         self.io_loop.remove_timeout(self._timeout)
         self._timeout = None
     original_request = getattr(self.request, "original_request", self.request)
     if self.request.follow_redirects and self.request.max_redirects > 0 and self.code in (301, 302, 303, 307):
         new_request = copy.copy(self.request)
         new_request.url = urlparse.urljoin(self.request.url, self.headers["Location"])
         new_request.max_redirects -= 1
         del new_request.headers["Host"]
         # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
         # client SHOULD make a GET request
         if self.code == 303:
             new_request.method = "GET"
             new_request.body = None
             for h in ["Content-Length", "Content-Type", "Content-Encoding", "Transfer-Encoding"]:
                 try:
                     del self.request.headers[h]
                 except KeyError:
                     pass
         new_request.original_request = original_request
         final_callback = self.final_callback
         self.final_callback = None
         self._release()
         self.client.fetch(new_request, final_callback)
         self.stream.close()
         return
     if self._decompressor:
         data = self._decompressor.decompress(data)
     if self.request.streaming_callback:
         if self.chunks is None:
             # if chunks is not None, we already called streaming_callback
             # in _on_chunk_data
             self.request.streaming_callback(data)
         buffer = BytesIO()
     else:
         buffer = BytesIO(data)  # TODO: don't require one big string?
     response = HTTPResponse(
         original_request,
         self.code,
         headers=self.headers,
         request_time=monotime() - self.start_time,
         buffer=buffer,
         effective_url=self.request.url,
     )
     self._run_callback(response)
     self.stream.close()
コード例 #18
0
ファイル: database.py プロジェクト: prahlad574/my
    def __init__(self,
                 host,
                 database,
                 user=None,
                 password=None,
                 max_idle_time=7 * 3600):
        self.host = host
        self.database = database
        self.max_idle_time = max_idle_time

        args = dict(conv=CONVERSIONS,
                    use_unicode=True,
                    charset="utf8",
                    db=database,
                    init_command='SET time_zone = "+0:00"',
                    sql_mode="TRADITIONAL")
        if user is not None:
            args["user"] = user
        if password is not None:
            args["passwd"] = password

        # We accept a path to a MySQL socket file or a host(:port) string
        if "/" in host:
            args["unix_socket"] = host
        else:
            self.socket = None
            pair = host.split(":")
            if len(pair) == 2:
                args["host"] = pair[0]
                args["port"] = int(pair[1])
            else:
                args["host"] = host
                args["port"] = 3306

        self._db = None
        self._db_args = args
        self._last_use_time = monotime()
        try:
            self.reconnect()
        except Exception:
            logging.error("Cannot connect to MySQL on %s",
                          self.host,
                          exc_info=True)
コード例 #19
0
ファイル: httpserver.py プロジェクト: Benozo/catawampus
    def __init__(self, method, uri, version="HTTP/1.0", headers=None,
                 body=None, remote_ip=None, protocol=None, host=None,
                 files=None, connection=None):
        self.method = method
        self.uri = uri
        self.version = version
        self.headers = headers or httputil.HTTPHeaders()
        self.body = body or ""
        if connection and connection.xheaders:
            # Squid uses X-Forwarded-For, others use X-Real-Ip
            self.remote_ip = self.headers.get(
                "X-Real-Ip", self.headers.get("X-Forwarded-For", remote_ip))
            if not self._valid_ip(self.remote_ip):
                self.remote_ip = remote_ip
            # AWS uses X-Forwarded-Proto
            self.protocol = self.headers.get(
                "X-Scheme", self.headers.get("X-Forwarded-Proto", protocol))
            if self.protocol not in ("http", "https"):
                self.protocol = "http"
        else:
            self.remote_ip = remote_ip
            if protocol:
                self.protocol = protocol
            elif connection and isinstance(connection.stream,
                                           iostream.SSLIOStream):
                self.protocol = "https"
            else:
                self.protocol = "http"
        self.host = host or self.headers.get("Host") or "127.0.0.1"
        self.files = files or {}
        self.connection = connection
        self._start_time = monotime()
        self._finish_time = None

        self.path, sep, self.query = uri.partition('?')
        arguments = parse_qs_bytes(self.query)
        self.arguments = {}
        for name, values in arguments.iteritems():
            values = [v for v in values if v]
            if values:
                self.arguments[name] = values
コード例 #20
0
ファイル: curl_httpclient.py プロジェクト: prahlad574/my
 def _finish(self, curl, curl_error=None, curl_message=None):
     info = curl.info
     curl.info = None
     self._multi.remove_handle(curl)
     self._free_list.append(curl)
     buffer = info["buffer"]
     if curl_error:
         error = CurlError(curl_error, curl_message)
         code = error.code
         effective_url = None
         buffer.close()
         buffer = None
     else:
         error = None
         code = curl.getinfo(pycurl.HTTP_CODE)
         effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
         buffer.seek(0)
     # the various curl timings are documented at
     # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
     time_info = dict(
         queue=info["curl_start_time"] - info["request"].start_time,
         namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
         connect=curl.getinfo(pycurl.CONNECT_TIME),
         pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
         starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
         total=curl.getinfo(pycurl.TOTAL_TIME),
         redirect=curl.getinfo(pycurl.REDIRECT_TIME),
     )
     try:
         info["callback"](HTTPResponse(request=info["request"],
                                       code=code,
                                       headers=info["headers"],
                                       buffer=buffer,
                                       effective_url=effective_url,
                                       error=error,
                                       request_time=monotime() -
                                       info["curl_start_time"],
                                       time_info=time_info))
     except Exception:
         self.handle_callback_exception(info["callback"])
コード例 #21
0
ファイル: curl_httpclient.py プロジェクト: Benozo/catawampus
 def _finish(self, curl, curl_error=None, curl_message=None):
     info = curl.info
     curl.info = None
     self._multi.remove_handle(curl)
     self._free_list.append(curl)
     buffer = info["buffer"]
     if curl_error:
         error = CurlError(curl_error, curl_message)
         code = error.code
         effective_url = None
         buffer.close()
         buffer = None
     else:
         error = None
         code = curl.getinfo(pycurl.HTTP_CODE)
         effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
         buffer.seek(0)
     # the various curl timings are documented at
     # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
     time_info = dict(
         queue=info["curl_start_time"] - info["request"].start_time,
         namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
         connect=curl.getinfo(pycurl.CONNECT_TIME),
         pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
         starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
         total=curl.getinfo(pycurl.TOTAL_TIME),
         redirect=curl.getinfo(pycurl.REDIRECT_TIME),
         )
     try:
         info["callback"](HTTPResponse(
             request=info["request"], code=code, headers=info["headers"],
             buffer=buffer, effective_url=effective_url, error=error,
             request_time=monotime() - info["curl_start_time"],
             time_info=time_info))
     except Exception:
         self.handle_callback_exception(info["callback"])
コード例 #22
0
 def _on_timeout(self):
     self._timeout = None
     self._run_callback(
         HTTPResponse(self.request, 599, request_time=monotime() - self.start_time, error=HTTPError(599, "Timeout"))
     )
     self.stream.close()
コード例 #23
0
ファイル: httpserver.py プロジェクト: Benozo/catawampus
 def finish(self):
     """Finishes this HTTP request on the open connection."""
     self.connection.finish()
     self._finish_time = monotime()
コード例 #24
0
ファイル: httpserver.py プロジェクト: Benozo/catawampus
 def request_time(self):
     """Returns the amount of time it took for this request to execute."""
     if self._finish_time is None:
         return monotime() - self._start_time
     else:
         return self._finish_time - self._start_time
コード例 #25
0
 def request_time(self):
     """Returns the amount of time it took for this request to execute."""
     if self._finish_time is None:
         return monotime() - self._start_time
     else:
         return self._finish_time - self._start_time
コード例 #26
0
ファイル: simple_httpclient.py プロジェクト: prahlad574/my
 def _on_close(self):
     self._run_callback(
         HTTPResponse(self.request,
                      599,
                      request_time=monotime() - self.start_time,
                      error=HTTPError(599, "Connection closed")))
コード例 #27
0
ファイル: ioloop.py プロジェクト: athul04/honeything
    def start(self):
        """Starts the I/O loop.

        The loop will run until one of the I/O handlers calls stop(), which
        will make the loop stop after the current event iteration completes.
        """
        if self._stopped:
            self._stopped = False
            return
        self._thread_ident = thread.get_ident()
        self._running = True
        while True:
            poll_timeout = 3600.0

            # Prevent IO event starvation by delaying new callbacks
            # to the next iteration of the event loop.
            with self._callback_lock:
                callbacks = self._callbacks
                self._callbacks = []
            for callback in callbacks:
                self._run_callback(callback)

            if self._timeouts:
                now = monotime()
                while self._timeouts:
                    if self._timeouts[0].callback is None:
                        # the timeout was cancelled
                        heapq.heappop(self._timeouts)
                    elif self._timeouts[0].deadline <= now:
                        timeout = heapq.heappop(self._timeouts)
                        self._run_callback(timeout.callback)
                    else:
                        seconds = self._timeouts[0].deadline - now
                        poll_timeout = min(seconds, poll_timeout)
                        break

            if self._callbacks:
                # If any callbacks or timeouts called add_callback,
                # we don't want to wait in poll() before we run them.
                poll_timeout = 0.0

            if not self._running:
                break

            if self._blocking_signal_threshold is not None:
                # clear alarm so it doesn't fire while poll is waiting for
                # events.
                signal.setitimer(signal.ITIMER_REAL, 0, 0)

            try:
                event_pairs = self._impl.poll(poll_timeout)
            except Exception, e:
                # Depending on python version and IOLoop implementation,
                # different exception types may be thrown and there are
                # two ways EINTR might be signaled:
                # * e.errno == errno.EINTR
                # * e.args is like (errno.EINTR, 'Interrupted system call')
                if (getattr(e, 'errno', None) == errno.EINTR or
                    (isinstance(getattr(e, 'args', None), tuple) and
                     len(e.args) == 2 and e.args[0] == errno.EINTR)):
                    continue
                else:
                    raise

            if self._blocking_signal_threshold is not None:
                signal.setitimer(signal.ITIMER_REAL,
                                 self._blocking_signal_threshold, 0)

            # Pop one fd at a time from the set of pending fds and run
            # its handler. Since that handler may perform actions on
            # other file descriptors, there may be reentrant calls to
            # this IOLoop that update self._events
            self._events.update(event_pairs)
            while self._events:
                fd, events = self._events.popitem()
                try:
                    self._handlers[fd](fd, events)
                except (OSError, IOError), e:
                    if e.args[0] == errno.EPIPE:
                        # Happens when the client closes the connection
                        pass
                    else:
                        #logging.error("Exception in I/O handler for fd %s",
                        #              fd, exc_info=True)
                        ht.logger.error("Exception in I/O handler for fd %s",
                                      fd, exc_info=True)
                except Exception:
                    #logging.error("Exception in I/O handler for fd %s",
                    #              fd, exc_info=True)
                    ht.logger.error("Exception in I/O handler for fd %s",
                                  fd, exc_info=True)
コード例 #28
0
 def finish(self):
     """Finishes this HTTP request on the open connection."""
     self.connection.finish()
     self._finish_time = monotime()
コード例 #29
0
ファイル: twisted.py プロジェクト: prahlad574/my
 def seconds(self):
     return monotime()
コード例 #30
0
    def start(self):
        """Starts the I/O loop.

        The loop will run until one of the I/O handlers calls stop(), which
        will make the loop stop after the current event iteration completes.
        """
        if self._stopped:
            self._stopped = False
            return
        self._thread_ident = thread.get_ident()
        self._running = True
        while True:
            poll_timeout = 3600.0

            # Prevent IO event starvation by delaying new callbacks
            # to the next iteration of the event loop.
            with self._callback_lock:
                callbacks = self._callbacks
                self._callbacks = []
            for callback in callbacks:
                self._run_callback(callback)

            if self._timeouts:
                now = monotime()
                while self._timeouts:
                    if self._timeouts[0].callback is None:
                        # the timeout was cancelled
                        heapq.heappop(self._timeouts)
                    elif self._timeouts[0].deadline <= now:
                        timeout = heapq.heappop(self._timeouts)
                        self._run_callback(timeout.callback)
                    else:
                        seconds = self._timeouts[0].deadline - now
                        poll_timeout = min(seconds, poll_timeout)
                        break

            if self._callbacks:
                # If any callbacks or timeouts called add_callback,
                # we don't want to wait in poll() before we run them.
                poll_timeout = 0.0

            if not self._running:
                break

            if self._blocking_signal_threshold is not None:
                # clear alarm so it doesn't fire while poll is waiting for
                # events.
                signal.setitimer(signal.ITIMER_REAL, 0, 0)

            try:
                event_pairs = self._impl.poll(poll_timeout)
            except Exception, e:
                # Depending on python version and IOLoop implementation,
                # different exception types may be thrown and there are
                # two ways EINTR might be signaled:
                # * e.errno == errno.EINTR
                # * e.args is like (errno.EINTR, 'Interrupted system call')
                if (getattr(e, 'errno', None) == errno.EINTR or
                    (isinstance(getattr(e, 'args', None), tuple)
                     and len(e.args) == 2 and e.args[0] == errno.EINTR)):
                    continue
                else:
                    raise

            if self._blocking_signal_threshold is not None:
                signal.setitimer(signal.ITIMER_REAL,
                                 self._blocking_signal_threshold, 0)

            # Pop one fd at a time from the set of pending fds and run
            # its handler. Since that handler may perform actions on
            # other file descriptors, there may be reentrant calls to
            # this IOLoop that update self._events
            self._events.update(event_pairs)
            while self._events:
                fd, events = self._events.popitem()
                try:
                    self._handlers[fd](fd, events)
                except (OSError, IOError), e:
                    if e.args[0] == errno.EPIPE:
                        # Happens when the client closes the connection
                        pass
                    else:
                        #logging.error("Exception in I/O handler for fd %s",
                        #              fd, exc_info=True)
                        ht.logger.error("Exception in I/O handler for fd %s",
                                        fd,
                                        exc_info=True)
                except Exception:
                    #logging.error("Exception in I/O handler for fd %s",
                    #              fd, exc_info=True)
                    ht.logger.error("Exception in I/O handler for fd %s",
                                    fd,
                                    exc_info=True)
コード例 #31
0
 def start(self):
     """Starts the timer."""
     self._running = True
     self._next_timeout = monotime()
     self._schedule_next()
コード例 #32
0
    def __init__(self, io_loop, client, request, release_callback, final_callback, max_buffer_size):
        self.start_time = monotime()
        self.io_loop = io_loop
        self.client = client
        self.request = request
        self.release_callback = release_callback
        self.final_callback = final_callback
        self.code = None
        self.headers = None
        self.chunks = None
        self._decompressor = None
        # Timeout handle returned by IOLoop.add_timeout
        self._timeout = None
        with stack_context.StackContext(self.cleanup):
            parsed = urlparse.urlsplit(_unicode(self.request.url))
            if ssl is None and parsed.scheme == "https":
                raise ValueError("HTTPS requires either python2.6+ or " "curl_httpclient")
            if parsed.scheme not in ("http", "https"):
                raise ValueError("Unsupported url scheme: %s" % self.request.url)
            # urlsplit results have hostname and port results, but they
            # didn't support ipv6 literals until python 2.7.
            netloc = parsed.netloc
            if "@" in netloc:
                userpass, _, netloc = netloc.rpartition("@")
            match = re.match(r"^(.+):(\d+)$", netloc)
            if match:
                host = match.group(1)
                port = int(match.group(2))
            else:
                host = netloc
                port = 443 if parsed.scheme == "https" else 80
            if re.match(r"^\[.*\]$", host):
                # raw ipv6 addresses in urls are enclosed in brackets
                host = host[1:-1]
            parsed_hostname = host  # save final parsed host for _on_connect
            if self.client.hostname_mapping is not None:
                host = self.client.hostname_mapping.get(host, host)

            if request.allow_ipv6:
                af = socket.AF_UNSPEC
            else:
                # We only try the first IP we get from getaddrinfo,
                # so restrict to ipv4 by default.
                af = socket.AF_INET

            addrinfo = socket.getaddrinfo(host, port, af, socket.SOCK_STREAM, 0, 0)
            af, socktype, proto, canonname, sockaddr = addrinfo[0]

            if parsed.scheme == "https":
                ssl_options = {}
                if request.validate_cert:
                    ssl_options["cert_reqs"] = ssl.CERT_REQUIRED
                if request.ca_certs is not None:
                    ssl_options["ca_certs"] = request.ca_certs
                else:
                    ssl_options["ca_certs"] = _DEFAULT_CA_CERTS
                if request.client_key is not None:
                    ssl_options["keyfile"] = request.client_key
                if request.client_cert is not None:
                    ssl_options["certfile"] = request.client_cert

                # SSL interoperability is tricky.  We want to disable
                # SSLv2 for security reasons; it wasn't disabled by default
                # until openssl 1.0.  The best way to do this is to use
                # the SSL_OP_NO_SSLv2, but that wasn't exposed to python
                # until 3.2.  Python 2.7 adds the ciphers argument, which
                # can also be used to disable SSLv2.  As a last resort
                # on python 2.6, we set ssl_version to SSLv3.  This is
                # more narrow than we'd like since it also breaks
                # compatibility with servers configured for TLSv1 only,
                # but nearly all servers support SSLv3:
                # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html
                if sys.version_info >= (2, 7):
                    ssl_options["ciphers"] = "DEFAULT:!SSLv2"
                else:
                    # This is really only necessary for pre-1.0 versions
                    # of openssl, but python 2.6 doesn't expose version
                    # information.
                    ssl_options["ssl_version"] = ssl.PROTOCOL_SSLv3

                self.stream = SSLIOStream(
                    socket.socket(af, socktype, proto),
                    io_loop=self.io_loop,
                    ssl_options=ssl_options,
                    max_buffer_size=max_buffer_size,
                )
            else:
                self.stream = IOStream(
                    socket.socket(af, socktype, proto), io_loop=self.io_loop, max_buffer_size=max_buffer_size
                )
            timeout = min(request.connect_timeout, request.request_timeout)
            if timeout:
                self._timeout = self.io_loop.add_timeout(self.start_time + timeout, self._on_timeout, monotonic=True)
            self.stream.set_close_callback(self._on_close)
            self.stream.connect(sockaddr, functools.partial(self._on_connect, parsed, parsed_hostname))
コード例 #33
0
ファイル: ioloop.py プロジェクト: athul04/honeything
 def start(self):
     """Starts the timer."""
     self._running = True
     self._next_timeout = monotime()
     self._schedule_next()
コード例 #34
0
    def __init__(self, url, method="GET", headers=None, body=None,
                 auth_username=None, auth_password=None,
                 connect_timeout=20.0, request_timeout=20.0,
                 if_modified_since=None, follow_redirects=True,
                 max_redirects=5, user_agent=None, use_gzip=True,
                 network_interface=None, streaming_callback=None,
                 header_callback=None, prepare_curl_callback=None,
                 proxy_host=None, proxy_port=None, proxy_username=None,
                 proxy_password='', allow_nonstandard_methods=False,
                 validate_cert=True, ca_certs=None,
                 allow_ipv6=None,
                 client_key=None, client_cert=None):
        """Creates an `HTTPRequest`.

        All parameters except `url` are optional.

        :arg string url: URL to fetch
        :arg string method: HTTP method, e.g. "GET" or "POST"
        :arg headers: Additional HTTP headers to pass on the request
        :type headers: `~tornado.httputil.HTTPHeaders` or `dict`
        :arg string auth_username: Username for HTTP "Basic" authentication
        :arg string auth_password: Password for HTTP "Basic" authentication
        :arg float connect_timeout: Timeout for initial connection in seconds
        :arg float request_timeout: Timeout for entire request in seconds
        :arg datetime if_modified_since: Timestamp for ``If-Modified-Since``
           header
        :arg bool follow_redirects: Should redirects be followed automatically
           or return the 3xx response?
        :arg int max_redirects: Limit for `follow_redirects`
        :arg string user_agent: String to send as ``User-Agent`` header
        :arg bool use_gzip: Request gzip encoding from the server
        :arg string network_interface: Network interface to use for request
        :arg callable streaming_callback: If set, `streaming_callback` will
           be run with each chunk of data as it is received, and
           `~HTTPResponse.body` and `~HTTPResponse.buffer` will be empty in
           the final response.
        :arg callable header_callback: If set, `header_callback` will
           be run with each header line as it is received, and
           `~HTTPResponse.headers` will be empty in the final response.
        :arg callable prepare_curl_callback: If set, will be called with
           a `pycurl.Curl` object to allow the application to make additional
           `setopt` calls.
        :arg string proxy_host: HTTP proxy hostname.  To use proxies,
           `proxy_host` and `proxy_port` must be set; `proxy_username` and
           `proxy_pass` are optional.  Proxies are currently only support
           with `curl_httpclient`.
        :arg int proxy_port: HTTP proxy port
        :arg string proxy_username: HTTP proxy username
        :arg string proxy_password: HTTP proxy password
        :arg bool allow_nonstandard_methods: Allow unknown values for `method`
           argument?
        :arg bool validate_cert: For HTTPS requests, validate the server's
           certificate?
        :arg string ca_certs: filename of CA certificates in PEM format,
           or None to use defaults.  Note that in `curl_httpclient`, if
           any request uses a custom `ca_certs` file, they all must (they
           don't have to all use the same `ca_certs`, but it's not possible
           to mix requests with ca_certs and requests that use the defaults.
        :arg bool allow_ipv6: Use IPv6 when available?  Default is false in
           `simple_httpclient` and true in `curl_httpclient`
        :arg string client_key: Filename for client SSL key, if any
        :arg string client_cert: Filename for client SSL certificate, if any
        """
        if headers is None:
            headers = httputil.HTTPHeaders()
        if if_modified_since:
            timestamp = calendar.timegm(if_modified_since.utctimetuple())
            headers["If-Modified-Since"] = email.utils.formatdate(
                timestamp, localtime=False, usegmt=True)
        self.proxy_host = proxy_host
        self.proxy_port = proxy_port
        self.proxy_username = proxy_username
        self.proxy_password = proxy_password
        self.url = url
        self.method = method
        self.headers = headers
        self.body = utf8(body)
        self.auth_username = auth_username
        self.auth_password = auth_password
        self.connect_timeout = connect_timeout
        self.request_timeout = request_timeout
        self.follow_redirects = follow_redirects
        self.max_redirects = max_redirects
        self.user_agent = user_agent
        self.use_gzip = use_gzip
        self.network_interface = network_interface
        self.streaming_callback = streaming_callback
        self.header_callback = header_callback
        self.prepare_curl_callback = prepare_curl_callback
        self.allow_nonstandard_methods = allow_nonstandard_methods
        self.validate_cert = validate_cert
        self.ca_certs = ca_certs
        self.allow_ipv6 = allow_ipv6
        self.client_key = client_key
        self.client_cert = client_cert
        self.start_time = monotime()
コード例 #35
0
ファイル: twisted.py プロジェクト: Benozo/catawampus
 def seconds(self):
     return monotime()
コード例 #36
0
 def _on_close(self):
     self._run_callback(
         HTTPResponse(
             self.request, 599, request_time=monotime() - self.start_time, error=HTTPError(599, "Connection closed")
         )
     )
コード例 #37
0
ファイル: simple_httpclient.py プロジェクト: prahlad574/my
    def __init__(self, io_loop, client, request, release_callback,
                 final_callback, max_buffer_size):
        self.start_time = monotime()
        self.io_loop = io_loop
        self.client = client
        self.request = request
        self.release_callback = release_callback
        self.final_callback = final_callback
        self.code = None
        self.headers = None
        self.chunks = None
        self._decompressor = None
        # Timeout handle returned by IOLoop.add_timeout
        self._timeout = None
        with stack_context.StackContext(self.cleanup):
            parsed = urlparse.urlsplit(_unicode(self.request.url))
            if ssl is None and parsed.scheme == "https":
                raise ValueError("HTTPS requires either python2.6+ or "
                                 "curl_httpclient")
            if parsed.scheme not in ("http", "https"):
                raise ValueError("Unsupported url scheme: %s" %
                                 self.request.url)
            # urlsplit results have hostname and port results, but they
            # didn't support ipv6 literals until python 2.7.
            netloc = parsed.netloc
            if "@" in netloc:
                userpass, _, netloc = netloc.rpartition("@")
            match = re.match(r'^(.+):(\d+)$', netloc)
            if match:
                host = match.group(1)
                port = int(match.group(2))
            else:
                host = netloc
                port = 443 if parsed.scheme == "https" else 80
            if re.match(r'^\[.*\]$', host):
                # raw ipv6 addresses in urls are enclosed in brackets
                host = host[1:-1]
            parsed_hostname = host  # save final parsed host for _on_connect
            if self.client.hostname_mapping is not None:
                host = self.client.hostname_mapping.get(host, host)

            if request.allow_ipv6:
                af = socket.AF_UNSPEC
            else:
                # We only try the first IP we get from getaddrinfo,
                # so restrict to ipv4 by default.
                af = socket.AF_INET

            addrinfo = socket.getaddrinfo(host, port, af, socket.SOCK_STREAM,
                                          0, 0)
            af, socktype, proto, canonname, sockaddr = addrinfo[0]

            if parsed.scheme == "https":
                ssl_options = {}
                if request.validate_cert:
                    ssl_options["cert_reqs"] = ssl.CERT_REQUIRED
                if request.ca_certs is not None:
                    ssl_options["ca_certs"] = request.ca_certs
                else:
                    ssl_options["ca_certs"] = _DEFAULT_CA_CERTS
                if request.client_key is not None:
                    ssl_options["keyfile"] = request.client_key
                if request.client_cert is not None:
                    ssl_options["certfile"] = request.client_cert

                # SSL interoperability is tricky.  We want to disable
                # SSLv2 for security reasons; it wasn't disabled by default
                # until openssl 1.0.  The best way to do this is to use
                # the SSL_OP_NO_SSLv2, but that wasn't exposed to python
                # until 3.2.  Python 2.7 adds the ciphers argument, which
                # can also be used to disable SSLv2.  As a last resort
                # on python 2.6, we set ssl_version to SSLv3.  This is
                # more narrow than we'd like since it also breaks
                # compatibility with servers configured for TLSv1 only,
                # but nearly all servers support SSLv3:
                # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html
                if sys.version_info >= (2, 7):
                    ssl_options["ciphers"] = "DEFAULT:!SSLv2"
                else:
                    # This is really only necessary for pre-1.0 versions
                    # of openssl, but python 2.6 doesn't expose version
                    # information.
                    ssl_options["ssl_version"] = ssl.PROTOCOL_SSLv3

                self.stream = SSLIOStream(socket.socket(af, socktype, proto),
                                          io_loop=self.io_loop,
                                          ssl_options=ssl_options,
                                          max_buffer_size=max_buffer_size)
            else:
                self.stream = IOStream(socket.socket(af, socktype, proto),
                                       io_loop=self.io_loop,
                                       max_buffer_size=max_buffer_size)
            timeout = min(request.connect_timeout, request.request_timeout)
            if timeout:
                self._timeout = self.io_loop.add_timeout(self.start_time +
                                                         timeout,
                                                         self._on_timeout,
                                                         monotonic=True)
            self.stream.set_close_callback(self._on_close)
            self.stream.connect(
                sockaddr,
                functools.partial(self._on_connect, parsed, parsed_hostname))