예제 #1
0
파일: opener.py 프로젝트: Riamse/urllib3
    def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True,
                timeout=None, pool_timeout=None, release_conn=None, **response_kw):
        """
        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
        with custom cross-host redirect logic and only sends the request-uri
        portion of the ``url``.

        The given ``url`` parameter must be absolute, such that an appropriate
        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
        """
        if headers is None:
            headers = {k.lower(): v for (k, v) in self.headers.items()}
        headers.setdefault("cookie", "")
        for key, val in self.headers.items():
            headers.setdefault(key, val)
        # Now the updated Cookie string will be stored into the HTTP request.
        # The cookie header may contain duplicate entries (e.g. k=a; k=b;)
        headers["cookie"] = self.headers["cookie"] + headers["cookie"]
        # This will be resolved by putting the header in the SimpleCookie
        self.cookie_session.feed(self.headers)
        self.cookie_session.feed(headers)
        headers["cookie"] = self.cookie_session.extract()
        response = HTTPConnectionPool.urlopen(self, method, url, body, headers, retries, False, assert_same_host, timeout, pool_timeout,
            release_conn, **response_kw)
        self.cookie_session.feed(self.headers)
        self.cookie_session.feed(response.headers)
        self.cookie_session.feed(headers)
        headers["cookie"] = self.cookie_session.extract()
        redirect_location = redirect and response.get_redirect_location()
        if not redirect_location:
            return response
        if response.status == 303:
            method = "GET"
        return self.urlopen(method, redirect_location, body, headers, retries - 1, redirect, assert_same_host, timeout, pool_timeout,
            release_conn, **response_kw)
예제 #2
0
        def _test(exception):
            pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True)

            # Verify that the request succeeds after two attempts, and that the
            # connection is left on the response object, instead of being
            # released back into the pool.
            pool._make_request = _raise_once_make_request_function(exception)
            response = pool.urlopen('GET', '/', retries=1,
                                    release_conn=False, preload_content=False,
                                    chunked=True)
            self.assertEqual(pool.pool.qsize(), 0)
            self.assertEqual(pool.num_connections, 2)
            self.assertTrue(response.connection is not None)

            response.release_conn()
            self.assertEqual(pool.pool.qsize(), 1)
            self.assertTrue(response.connection is None)
class ConnectionPoolManager(LoggerMixin):
    def __init__(self,
                 host,
                 port,
                 certfile=None,
                 keyfile=None,
                 cacertfile=None,
                 force_ssl=False,
                 *args,
                 **kw):
        super(ConnectionPoolManager, self).__init__(*args, **kw)

        self.logger.debug("Creating ConnectionPoolManager for %s:%s", host,
                          port)

        if certfile or keyfile or force_ssl:
            #https://docs.python.org/2/library/ssl.html#ssl.SSLContext
            from ssl import SSLContext, PROTOCOL_SSLv23
            ssl_context = SSLContext(PROTOCOL_SSLv23)
            ssl_context.load_cert_chain(certfile=certfile, keyfile=keyfile)
            ssl_context.load_verify_locations(cafile=cacertfile)
            #https://docs.python.org/2/library/httplib.html
            self.__pool = HTTPSConnectionPool(host,
                                              port,
                                              maxsize=16,
                                              context=ssl_context)
        else:
            self.__pool = HTTPConnectionPool(host, port, maxsize=16)

    def request(self, method, path, body, headers, timeout):
        return Urllib3ResponseWrapper(
            self.__pool.urlopen(method,
                                path,
                                body,
                                headers,
                                timeout=timeout,
                                pool_timeout=30,
                                preload_content=False,
                                assert_same_host=False))
예제 #4
0
파일: http.py 프로젝트: ruezetle/snuba
class HTTPBatchWriter(BatchWriter):
    def __init__(
        self,
        schema: TableSchema,
        host,
        port,
        encoder: Callable[[WriterTableRow], bytes],
        options=None,
        table_name=None,
        chunk_size: int = 1,
    ):
        """
        Builds a writer to send a batch to Clickhouse.

        :param schema: The dataset schema to take the table name from
        :param host: Clickhosue host
        :param port: Clickhosue port
        :param encoder: A function that will be applied to each row to turn it into bytes
        :param options: options passed to Clickhouse
        :param table_name: Overrides the table coming from the schema (generally used for uplaoding
            on temporary tables)
        :param chunk_size: The chunk size (in rows).
            We send data to the server with Transfer-Encoding: chunked. If 0 we send the entire
            content in one chunk.
        """
        self.__pool = HTTPConnectionPool(host, port)
        self.__options = options if options is not None else {}
        self.__table_name = table_name or schema.get_table_name()
        self.__chunk_size = chunk_size
        self.__encoder = encoder

    def _prepare_chunks(self,
                        rows: Iterable[WriterTableRow]) -> Iterable[bytes]:
        chunk = []
        for row in rows:
            chunk.append(self.__encoder(row))
            if self.__chunk_size and len(chunk) == self.__chunk_size:
                yield b"".join(chunk)
                chunk = []

        if chunk:
            yield b"".join(chunk)

    def write(self, rows: Iterable[WriterTableRow]):
        response = self.__pool.urlopen(
            "POST",
            "/?" + urlencode({
                **self.__options,
                "query":
                f"INSERT INTO {self.__table_name} FORMAT JSONEachRow",
            }),
            headers={
                "Connection": "keep-alive",
                "Accept-Encoding": "gzip,deflate"
            },
            body=self._prepare_chunks(rows),
            chunked=True,
        )

        if response.status != 200:
            # XXX: This should be switched to just parse the JSON body after
            # https://github.com/yandex/ClickHouse/issues/6272 is available.
            content = response.data.decode("utf8")
            details = CLICKHOUSE_ERROR_RE.match(content)
            if details is not None:
                code, type, message = details.groups()
                raise ClickhouseError(int(code), message)
            else:
                raise HTTPError(
                    f"Received unexpected {response.status} response: {content}"
                )
예제 #5
0
class DataBoxConnector(object):
    def __init__(self, host, port, user="", passwd=""):
        self.c = HTTPConnectionPool(host, port)
        url = url_prefix + "/metadata/authenticate"
        headers = {
            "content-type": "application/x-www-form-urlencoded",
        }

        params = {"user": user, "passwd": passwd}
        resp = self.c.urlopen("POST",
                              url,
                              body=urlencode(params),
                              headers=headers)

        jdata = json.loads(resp.data.decode("utf-8"))
        # jdata = json.loads(resp.data)
        self.auth_code = jdata["auth_code"]

    def _load_resp(self, resp):
        ctype = resp.headers["Content-Type"]

        #         print( resp.headers )

        code = resp.headers.get("Error-Code", None)
        reason = resp.headers.get("Error-Message", None)

        if ctype.startswith("application/json") == True:
            jdata = json.loads(resp.data.decode("utf-8"))
        elif ctype.startswith("application/pickle-bytes") == True:
            jdata = pickle.loads(resp.data)
        else:
            jdata = None

        return jdata, resp.status, code, reason

    def _info_query(self, product, ctype, params):
        url = url_prefix + "/info_query/{product}/{ctype}".format(
            product=product, ctype=ctype)

        # params["crs"] = "" if crs is None else crs
        params["format"] = "bytes"

        headers = {
            "content-type": "application/x-www-form-urlencoded",
            "auth_code": self.auth_code
        }

        #         print(params)

        resp = self.c.urlopen("POST",
                              url,
                              body=urlencode(params),
                              headers=headers)
        return self._load_resp(resp)

    def query_by_point(self, product, ctype, x, y, crs=None, timeslice=None):
        '''
        返回 x,y 坐标点所在 cube 信息,返回:bands, crs, bbox, res, size,xy, nctimes
        ctype:数据产品名称
        x, y:坐标
        crs:坐标对应投影信息,如为:None,默认:EPSG:4326
        '''
        return self._info_query(
            product, ctype, crs, {
                "x": x,
                "y": y,
                "start_time": "19600101" if start_time is None else start_time,
                "end_time": "20501231" if end_time is None else end_time
            })

    # info_by_bbox(self, minx, miny, maxx, maxy, start_time, end_time, fmt="json")
    def query_by_bbox(self,
                      product,
                      ctype,
                      bbox,
                      start_time=None,
                      end_time=None):
        return self._info_query(
            product,
            ctype,
            {
                "bbox": ",".join(map(lambda a: str(a), bbox)),
                # "times": "" if timeslice is None else timeslice
                "start_time": "19600101" if start_time is None else start_time,
                "end_time": "20501231" if end_time is None else end_time
            })

    def query_by_geom(self, product, ctype, geom, start_time, end_time):
        return self._info_with(
            product, ctype, {
                "geom": geom,
                "start_time": "19600101" if start_time is None else start_time,
                "end_time": "20501231" if end_time is None else end_time
            })

    def read_by_point(self, product, ctype, x, y, tif_file):
        url = url_prefix + "/read_point/{product}/{ctype}".format(
            product=product, ctype=ctype)

        params = {"x": x, "y": y}
        params["tif_file"] = tif_file

        headers = {
            "content-type": "application/x-www-form-urlencoded",
            "auth_code": self.auth_code
        }

        #         print(params)

        resp = self.c.urlopen("POST",
                              url,
                              body=urlencode(params),
                              headers=headers)
        return self._load_resp(resp)

    def read_by_geom(self, product, ctype, geom, tif_file):
        url = url_prefix + "/read_geom/{product}/{ctype}".format(
            product=product, ctype=ctype)

        params = {
            "geometry": geom,
        }
        #         params[ "geom_info" ] = json.dumps(geom_info, ensure_ascii=False)
        params["tif_file"] = tif_file

        headers = {
            "content-type": "application/x-www-form-urlencoded",
            "auth_code": self.auth_code
        }

        #         print(params)

        resp = self.c.urlopen("POST",
                              url,
                              body=urlencode(params),
                              headers=headers)
        return self._load_resp(resp)
예제 #6
0
class HtClient(object):
    def __init__(self, hostname, port=8060):
        self.c = HTTPConnectionPool(hostname, port)

    def erase_data(self, namespace, key):
        try:
            url = "/ht/del?ns=%s&key=%s" % (
                encode_key(namespace),
                encode_key(key),
            )
            resp = self.c.urlopen("DELETE", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding="utf-8")
            return jdata.get("code", None) == 1
        except Exception as e:
            print(e)
        return False

    def erase_nss(
        self,
        namespace,
    ):
        try:
            url = "/ht/del?ns=%s" % (encode_key(namespace), )
            resp = self.c.urlopen("DELETE", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding="utf-8")
            return jdata.get("code", None) == 1
        except Exception as e:
            print(e)
        return False

    def put_data(self,
                 namespace,
                 key,
                 content,
                 ctype=None,
                 ttl=DEFAULT_TTL,
                 overwrite="yes",
                 **kwargs):
        try:
            ctype = "" if ctype is None else ctype
            url = "/ht/put?ns=%s&key=%s&ctype=%s&ttl=%s&overwrite=%s" % (
                encode_key(namespace), encode_key(key), ctype, ttl, overwrite)

            cdata = kwargs
            headers = {"cdata": json.dumps(cdata, ensure_ascii=False)}

            resp = self.c.urlopen("PUT",
                                  str(url),
                                  body=content,
                                  headers=headers)
            jdata = json.loads(resp.data.decode("utf-8"), encoding="utf-8")
            return jdata["code"] == 1
        except Exception as e:
            print_error(e)
        return False

    put = put_data

    def has_data(self, namespace, key):
        try:
            url = "/ht/get?ns=%s&key=%s&action=has" % (encode_key(namespace),
                                                       encode_key(key))
            resp = self.c.urlopen("GET", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding="utf-8")
            return jdata.get("code", None) == 1
        except Exception as e:
            print(e)
        return False

    has = has_data

    def get_stat(self, node=None):
        try:
            url = "/ht/stat?node=%s" % (encode_key(node), )
            resp = self.c.urlopen("GET", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding='utf-8')
            return jdata
        except Exception as e:
            print(e)
        return None

    stat = get_stat

    def get_nss(self, node=None):
        try:
            url = "/ht/stat/nss?node=%s" % (encode_key(node), )
            resp = self.c.urlopen("GET", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding='utf-8')
            return jdata
        except Exception as e:
            print(e)
        return None

    nss = get_nss

    def get_nodes(self):
        try:
            url = "/ht/stat/nodes"
            resp = self.c.urlopen("GET", url)
            jdata = json.loads(resp.data.decode("utf-8"), encoding='utf-8')
            return jdata
        except Exception as e:
            print(e)
        return None

    nodes = get_nodes

    def get_keys(self, namespace, node=None):
        return KeysIter(self.c, encode_key(namespace), node)

    keys = get_keys

    def get_data(self, namespace, key):
        try:
            url = "/ht/get?ns=%s&key=%s" % (encode_key(namespace),
                                            encode_key(key))
            resp = self.c.urlopen("GET", url)
            try:
                cdata = json.loads(resp.headers.get("cdata", "{}"),
                                   encoding="utf-8")
            except:
                cdata = {}
            options = {"cdata": cdata, "ctype": resp.headers.get("ctype", "")}
            return resp.data, cdata, options, resp.status
        except Exception as e:
            print(e)
        return None, {}, {}, 500

    get = get_data