def prepare_response(self, grab): if self.body_file: self.body_file.close() response = Response() response.head = b''.join(self.response_header_chunks) if self.body_path: response.body_path = self.body_path else: response.body = b''.join(self.response_body_chunks) # Clear memory self.response_header_chunks = [] self.response_body_chunks = [] response.code = self.curl.getinfo(pycurl.HTTP_CODE) response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP) response.url = self.curl.getinfo(pycurl.EFFECTIVE_URL) response.parse(charset=grab.config['document_charset']) response.cookies = CookieManager(self.extract_cookiejar()) # We do not need anymore cookies stored in the # curl instance so drop them self.curl.setopt(pycurl.COOKIELIST, 'ALL') return response
def prepare_response(self, grab): if self.body_file: self.body_file.close() response = Response() response.head = b"".join(self.response_header_chunks) if self.body_path: response.body_path = self.body_path else: response.body = b"".join(self.response_body_chunks) # Clear memory self.response_header_chunks = [] self.response_body_chunks = [] response.code = self.curl.getinfo(pycurl.HTTP_CODE) response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP) response.url = self.curl.getinfo(pycurl.EFFECTIVE_URL) response.parse(charset=grab.config["document_charset"]) response.cookies = CookieManager(self.extract_cookiejar()) # We do not need anymore cookies stored in the # curl instance so drop them self.curl.setopt(pycurl.COOKIELIST, "ALL") return response
def prepare_response(self, grab): #if self.body_file: # self.body_file.close() response = Response() head = '' for key, val in self._response.getheaders().items(): head += '%s: %s\r\n' % (key, val) head += '\r\n' response.head = make_str(head, encoding='latin', errors='ignore') #if self.body_path: # response.body_path = self.body_path #else: # response.body = b''.join(self.response_body_chunks) if self._request._response_path: response.body_path = self._request._response_path # Quick dirty hack, actullay, response is fully read into memory self._request._response_file.write(self._response.read()) #data) self._request._response_file.close() else: if self._request.body_maxsize is not None: #if self.response_body_bytes_read > self.config_body_maxsize: # logger.debug('Response body max size limit reached: %s' % # self.config_body_maxsize) response.body = self._response.read(self._request.body_maxsize) else: response.body = self._response.read() #data # Clear memory #self.response_header_chunks = [] response.code = self._response.status #response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) #response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) #response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) #response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) #response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) #response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) #response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP) response.url = self._response.get_redirect_location( ) or self._request.url import email.message hdr = email.message.Message() for key, val in self._response.getheaders().items(): hdr[key] = val response.parse(charset=grab.config['document_charset'], headers=hdr) jar = self.extract_cookiejar(self._response, self._request) response.cookies = CookieManager(jar) # We do not need anymore cookies stored in the # curl instance so drop them #self.curl.setopt(pycurl.COOKIELIST, 'ALL') return response
def prepare_response(self, grab): #if self.body_file: # self.body_file.close() response = Response() head = '' for key, val in self._response.getheaders().items(): head += '%s: %s\r\n' % (key, val) head += '\r\n' response.head = make_str(head, encoding='latin', errors='ignore') #if self.body_path: # response.body_path = self.body_path #else: # response.body = b''.join(self.response_body_chunks) if self._request._response_path: response.body_path = self._request._response_path # Quick dirty hack, actullay, response is fully read into memory self._request._response_file.write(self._response.read())#data) self._request._response_file.close() else: if self._request.body_maxsize is not None: #if self.response_body_bytes_read > self.config_body_maxsize: # logger.debug('Response body max size limit reached: %s' % # self.config_body_maxsize) response.body = self._response.read(self._request.body_maxsize) else: response.body = self._response.read()#data # Clear memory #self.response_header_chunks = [] response.code = self._response.status #response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) #response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) #response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) #response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) #response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) #response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) #response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP) response.url = self._response.get_redirect_location() or self._request.url import email.message hdr = email.message.Message() for key, val in self._response.getheaders().items(): hdr[key] = val response.parse(charset=grab.config['document_charset'], headers=hdr) jar = self.extract_cookiejar(self._response, self._request) response.cookies = CookieManager(jar) # We do not need anymore cookies stored in the # curl instance so drop them #self.curl.setopt(pycurl.COOKIELIST, 'ALL') return response
def prepare_response(self, grab): # py3 hack if PY3K: self.response_head_chunks = decode_list(self.response_head_chunks) if self.body_file: self.body_file.close() response = Response() response.head = ''.join(self.response_head_chunks) if self.body_path: response.body_path = self.body_path else: response.body = b''.join(self.response_body_chunks) # Clear memory self.response_head_chunks = [] self.response_body_chunks = [] response.code = self.curl.getinfo(pycurl.HTTP_CODE) response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) response.url = self.curl.getinfo(pycurl.EFFECTIVE_URL) if grab.config['document_charset'] is not None: response.parse(charset=grab.config['document_charset']) else: response.parse() response.cookies = CookieManager(self.extract_cookiejar()) # We do not need anymore cookies stored in the # curl instance so drop them self.curl.setopt(pycurl.COOKIELIST, 'ALL') return response
def prepare_response(self, grab): try: #if self.body_file: # self.body_file.close() response = Response() head = '' for key, val in self._response.getheaders().items(): head += '%s: %s\r\n' % (key, val) head += '\r\n' response.head = make_str(head, encoding='latin', errors='ignore') #if self.body_path: # response.body_path = self.body_path #else: # response.body = b''.join(self.response_body_chunks) def read_with_timeout(): if self._request.config_nobody: return b'' maxsize = self._request.config_body_maxsize chunks = [] default_chunk_size = 10000 if maxsize: chunk_size = min(default_chunk_size, maxsize + 1) else: chunk_size = default_chunk_size total_size = 0 while True: chunk = self._response.read(chunk_size) if chunk: total_size += len(chunk) chunks.append(chunk) if maxsize and total_size > maxsize: logger.debug( 'Response body max size limit reached: %s' % maxsize) else: break if self._request.timeout: if time.time( ) - self._request.op_started > self._request.timeout: raise GrabTimeoutError data = b''.join(chunks) if maxsize: data = data[:maxsize] return data if self._request._response_path: response.body_path = self._request._response_path # FIXME: Quick dirty hack, actullay, response is fully read into memory self._request._response_file.write(read_with_timeout()) self._request._response_file.close() else: response.body = read_with_timeout() # Clear memory #self.response_header_chunks = [] response.code = self._response.status #response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME) #response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME) #response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME) #response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD) #response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD) #response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD) #response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP) response.url = self._response.get_redirect_location( ) or self._request.url import email.message hdr = email.message.Message() for key, val in self._response.getheaders().items(): hdr[key] = val response.parse(charset=grab.config['document_charset'], headers=hdr) jar = self.extract_cookiejar() #self._response, self._request) response.cookies = CookieManager(jar) # We do not need anymore cookies stored in the # curl instance so drop them #self.curl.setopt(pycurl.COOKIELIST, 'ALL') return response finally: self._response.release_conn()