def handle_error_response(self, rbody, rcode, resp, rheaders, stream_error=False): try: error_data = resp["error"] except (KeyError, TypeError): raise error.APIError( "Invalid response object from API: %r (HTTP response code " "was %d)" % (rbody, rcode), rbody, rcode, resp, ) if "internal_message" in error_data: error_data["message"] += "\n\n" + error_data["internal_message"] util.log_info( "OpenAI API error received", error_code=error_data.get("code"), error_type=error_data.get("type"), error_message=error_data.get("message"), error_param=error_data.get("param"), stream_error=stream_error, ) # Rate limits were previously coded as 400's with code 'rate_limit' if rcode == 429: return error.RateLimitError( error_data.get("message"), rbody, rcode, resp, rheaders ) elif rcode in [400, 404, 415]: return error.InvalidRequestError( error_data.get("message"), error_data.get("param"), error_data.get("code"), rbody, rcode, resp, rheaders, ) elif rcode == 401: return error.AuthenticationError( error_data.get("message"), rbody, rcode, resp, rheaders ) elif rcode == 403: return error.PermissionError( error_data.get("message"), rbody, rcode, resp, rheaders ) elif rcode == 409: return error.TryAgain( error_data.get("message"), rbody, rcode, resp, rheaders ) elif stream_error: # TODO: we will soon attach status codes to stream errors parts = [error_data.get("message"), "(Error occurred while streaming.)"] message = " ".join([p for p in parts if p is not None]) return error.APIError(message, rbody, rcode, resp, rheaders) else: return error.APIError( error_data.get("message"), rbody, rcode, resp, rheaders )
def create(cls, *args, timeout=None, **kwargs): start = time.time() while True: try: return super().create(*args, **kwargs) except TryAgain as e: if timeout is not None and time.time() > start + timeout: raise util.log_info("Waiting for snapshot to warm up", error=e)
def generate(self, timeout=None, **params): start = time.time() while True: try: return self.request( "post", self.instance_url() + "/generate", params, stream=params.get("stream"), plain_old_data=True, ) except TryAgain as e: if timeout is not None and time.time() > start + timeout: raise util.log_info("Waiting for model to warm up", error=e)
def create(cls, *args, **kwargs): """ Creates a new embedding for the provided input and parameters. See https://beta.openai.com/docs/api-reference/embeddings for a list of valid parameters. """ start = time.time() timeout = kwargs.pop("timeout", None) if kwargs.get("model", None) is None and kwargs.get("engine", None) is None: raise InvalidRequestError( "Must provide an 'engine' or 'model' parameter to create an Embedding.", param="engine", ) user_provided_encoding_format = kwargs.get("encoding_format", None) # If encoding format was not explicitly specified, we opaquely use base64 for performance if not user_provided_encoding_format: kwargs["encoding_format"] = "base64" while True: try: response = super().create(*args, **kwargs) # If a user specifies base64, we'll just return the encoded string. # This is only for the default case. if not user_provided_encoding_format: for data in response.data: # If an engine isn't using this optimization, don't do anything if type(data["embedding"]) == str: data["embedding"] = np.frombuffer( base64.b64decode(data["embedding"]), dtype="float32").tolist() return response except TryAgain as e: if timeout is not None and time.time() > start + timeout: raise util.log_info("Waiting for model to warm up", error=e)
def request_with_retries(self, method, url, headers, post_data=None, stream=False): self._add_telemetry_header(headers) num_retries = 0 while True: request_start = _now_ms() try: response = self.request(method, url, headers, post_data, stream=stream) connection_error = None except error.APIConnectionError as e: connection_error = e response = None if self._should_retry(response, connection_error, num_retries): if connection_error: util.log_warn( "Encountered a retryable error %s" % connection_error.user_message ) num_retries += 1 sleep_time = self._sleep_time_seconds(num_retries, response) util.log_info( ( "Initiating retry %i for request %s %s after " "sleeping %.2f seconds." % (num_retries, method, url, sleep_time) ) ) time.sleep(sleep_time) else: if response is not None: self._record_request_metrics(response, request_start) return response else: raise connection_error
def request_raw( self, method, url, params=None, supplied_headers=None, files=None, stream=False, request_id: Optional[str] = None, ) -> requests.Response: abs_url = "%s%s" % (self.api_base, url) headers = {} data = None if method == "get" or method == "delete": if params: encoded_params = urlencode([(k, v) for k, v in params.items() if v is not None]) abs_url = _build_api_url(abs_url, encoded_params) elif method in {"post", "put"}: if params and files: raise ValueError( "At most one of params and files may be specified.") if params: data = json.dumps(params).encode() headers["Content-Type"] = "application/json" else: raise error.APIConnectionError( "Unrecognized HTTP method %r. This may indicate a bug in the " "OpenAI bindings. Please contact [email protected] for " "assistance." % (method, )) headers = self.request_headers(method, headers, request_id) if supplied_headers is not None: headers.update(supplied_headers) util.log_info("Request to OpenAI API", method=method, path=abs_url) util.log_debug("Post details", data=data, api_version=self.api_version) if not hasattr(_thread_context, "session"): _thread_context.session = _make_session() try: result = _thread_context.session.request( method, abs_url, headers=headers, data=data, files=files, stream=stream, timeout=TIMEOUT_SECS, ) except requests.exceptions.RequestException as e: raise error.APIConnectionError( "Error communicating with OpenAI") from e util.log_info( "OpenAI API response", path=abs_url, response_code=result.status_code, processing_ms=result.headers.get("OpenAI-Processing-Ms"), ) # Don't read the whole stream for debug logging unless necessary. if openai.log == "debug": util.log_debug("API response body", body=result.content, headers=result.headers) return result
def request_raw( self, method, url, params=None, supplied_headers=None, stream=False ): """ Mechanism for issuing an API call """ if self.api_key: my_api_key = self.api_key else: from openai import api_key my_api_key = api_key if my_api_key is None: raise error.AuthenticationError( "No API key provided. (HINT: set your API key using in code using " '"openai.api_key = <API-KEY>", or you can set the environment variable OPENAI_API_KEY=<API-KEY>). You can generate API keys ' "in the OpenAI web interface. See https://onboard.openai.com " "for details, or email [email protected] if you have any " "questions." ) abs_url = "%s%s" % (self.api_base, url) headers = {} compress = None progress_meter = False if method == "get" or method == "delete": if params: encoded_params = url_encode_params(params) abs_url = _build_api_url(abs_url, encoded_params) else: encoded_params = None post_data = None elif method in {"post", "put"}: if ( supplied_headers is not None and supplied_headers.get("Content-Type") == "multipart/form-data" ): generator = MultipartDataGenerator() generator.add_params(params or {}) post_data = generator.get_post_data() content_type = "multipart/form-data; boundary=%s" % ( generator.boundary, ) # We will overrite Content-Type supplied_headers.pop("Content-Type") progress_meter = True # compress = "gzip" compress = None else: post_data = json.dumps(params).encode() content_type = "application/json" headers["Content-Type"] = content_type encoded_params = post_data if progress_meter: post_data = BufferReader(post_data, desc="Upload progress") if compress == "gzip": if not hasattr(post_data, "read"): post_data = BytesIO(post_data) headers["Content-Encoding"] = "gzip" from openai.gzip_stream import GZIPCompressedStream post_data = GZIPCompressedStream(post_data, compression_level=9) else: raise error.APIConnectionError( "Unrecognized HTTP method %r. This may indicate a bug in the " "OpenAI bindings. Please contact [email protected] for " "assistance." % (method,) ) headers = self.request_headers(my_api_key, method, headers) if supplied_headers is not None: for key, value in six.iteritems(supplied_headers): headers[key] = value util.log_info("Request to OpenAI API", method=method, path=abs_url) util.log_debug( "Post details", post_data=encoded_params, api_version=self.api_version ) rbody, rcode, rheaders, stream = self._client.request_with_retries( method, abs_url, headers, post_data, stream=stream ) util.log_info( "OpenAI API response", path=abs_url, response_code=rcode, processing_ms=rheaders.get("OpenAI-Processing-Ms"), ) util.log_debug("API response body", body=rbody, headers=rheaders) if "Request-Id" in rheaders: request_id = rheaders["Request-Id"] util.log_debug( "Dashboard link for request", link=util.dashboard_link(request_id) ) return rbody, rcode, rheaders, stream, my_api_key