def get(self, http, locator): # http is an httplib2.Http object. # locator is a KeepLocator object. url = self.root + str(locator) _logger.debug("Request: GET %s", url) try: with timer.Timer() as t: result = http.request(url.encode('utf-8'), 'GET', headers=self.get_headers) except self.HTTP_ERRORS as e: _logger.debug("Request fail: GET %s => %s: %s", url, type(e), str(e)) self.last_result = e else: self.last_result = result self.success_flag = retry.check_http_response_success(result) content = result[1] _logger.info("%s response: %s bytes in %s msec (%.3f MiB/sec)", self.last_status(), len(content), t.msecs, (len(content) / (1024.0 * 1024)) / t.secs) if self.success_flag: resp_md5 = hashlib.md5(content).hexdigest() if resp_md5 == locator.md5sum: return content _logger.warning("Checksum fail: md5(%s) = %s", url, resp_md5) return None
def get(self, locator, timeout=None): # locator is a KeepLocator object. url = self.root + str(locator) _logger.debug("Request: GET %s", url) try: with timer.Timer() as t: result = self.session.get(url.encode('utf-8'), headers=self.get_headers, timeout=timeout) except self.HTTP_ERRORS as e: _logger.debug("Request fail: GET %s => %s: %s", url, type(e), str(e)) self.last_result = e else: self.last_result = result self.success_flag = retry.check_http_response_success(result) content = result.content _logger.info("%s response: %s bytes in %s msec (%.3f MiB/sec)", self.last_status(), len(content), t.msecs, (len(content)/(1024.0*1024))/t.secs if t.secs > 0 else 0) if self.success_flag: resp_md5 = hashlib.md5(content).hexdigest() if resp_md5 == locator.md5sum: return content _logger.warning("Checksum fail: md5(%s) = %s", url, resp_md5) return None
def put(self, hash_s, body, timeout=None): url = self.root + hash_s _logger.debug("Request: PUT %s", url) curl = self._get_user_agent() try: self._headers = {} body_reader = cStringIO.StringIO(body) response_body = cStringIO.StringIO() curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open) curl.setopt(pycurl.URL, url.encode('utf-8')) # Using UPLOAD tells cURL to wait for a "go ahead" from the # Keep server (in the form of a HTTP/1.1 "100 Continue" # response) instead of sending the request body immediately. # This allows the server to reject the request if the request # is invalid or the server is read-only, without waiting for # the client to send the entire block. curl.setopt(pycurl.UPLOAD, True) curl.setopt(pycurl.INFILESIZE, len(body)) curl.setopt(pycurl.READFUNCTION, body_reader.read) curl.setopt(pycurl.HTTPHEADER, [ '{}: {}'.format(k, v) for k, v in self.put_headers.iteritems() ]) curl.setopt(pycurl.WRITEFUNCTION, response_body.write) curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction) self._setcurltimeouts(curl, timeout) try: curl.perform() except Exception as e: raise arvados.errors.HttpError(0, str(e)) self._result = { 'status_code': curl.getinfo(pycurl.RESPONSE_CODE), 'body': response_body.getvalue(), 'headers': self._headers, 'error': False, } ok = retry.check_http_response_success( self._result['status_code']) if not ok: self._result['error'] = arvados.errors.HttpError( self._result['status_code'], self._headers.get('x-status-line', 'Error')) except self.HTTP_ERRORS as e: self._result = { 'error': e, } ok = False self._usable = ok != False # still usable if ok is True or None if self._result.get('status_code', None): # Client is functional. See comment in get(). self._put_user_agent(curl) else: curl.close() if not ok: _logger.debug("Request fail: PUT %s => %s: %s", url, type(self._result['error']), str(self._result['error'])) return False return True
def put(self, hash_s, body, timeout=None): url = self.root + hash_s _logger.debug("Request: PUT %s", url) curl = self._get_user_agent() try: self._headers = {} body_reader = cStringIO.StringIO(body) response_body = cStringIO.StringIO() curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open) curl.setopt(pycurl.URL, url.encode('utf-8')) # Using UPLOAD tells cURL to wait for a "go ahead" from the # Keep server (in the form of a HTTP/1.1 "100 Continue" # response) instead of sending the request body immediately. # This allows the server to reject the request if the request # is invalid or the server is read-only, without waiting for # the client to send the entire block. curl.setopt(pycurl.UPLOAD, True) curl.setopt(pycurl.INFILESIZE, len(body)) curl.setopt(pycurl.READFUNCTION, body_reader.read) curl.setopt(pycurl.HTTPHEADER, [ '{}: {}'.format(k,v) for k,v in self.put_headers.iteritems()]) curl.setopt(pycurl.WRITEFUNCTION, response_body.write) curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction) self._setcurltimeouts(curl, timeout) try: curl.perform() except Exception as e: raise arvados.errors.HttpError(0, str(e)) self._result = { 'status_code': curl.getinfo(pycurl.RESPONSE_CODE), 'body': response_body.getvalue(), 'headers': self._headers, 'error': False, } ok = retry.check_http_response_success(self._result['status_code']) if not ok: self._result['error'] = arvados.errors.HttpError( self._result['status_code'], self._headers.get('x-status-line', 'Error')) except self.HTTP_ERRORS as e: self._result = { 'error': e, } ok = False self._usable = ok != False # still usable if ok is True or None if self._result.get('status_code', None): # Client is functional. See comment in get(). self._put_user_agent(curl) else: curl.close() if not ok: _logger.debug("Request fail: PUT %s => %s: %s", url, type(self._result['error']), str(self._result['error'])) return False return True
def put(self, http, hash_s, body): url = self.root + hash_s _logger.debug("Request: PUT %s", url) try: result = http.request(url.encode('utf-8'), 'PUT', headers=self.put_headers, body=body) except self.HTTP_ERRORS as e: _logger.debug("Request fail: PUT %s => %s: %s", url, type(e), str(e)) self.last_result = e else: self.last_result = result self.success_flag = retry.check_http_response_success(result) return self.success_flag
def put(self, hash_s, body, timeout=None): url = self.root + hash_s _logger.debug("Request: PUT %s", url) try: result = self.session.put(url.encode('utf-8'), data=body, headers=self.put_headers, timeout=timeout) except self.HTTP_ERRORS as e: _logger.debug("Request fail: PUT %s => %s: %s", url, type(e), str(e)) self.last_result = e else: self.last_result = result self.success_flag = retry.check_http_response_success(result) return self.success_flag
def results_map(self, *codes): for code in codes: yield code, arv_retry.check_http_response_success(code)
def get(self, locator, method="GET", timeout=None): # locator is a KeepLocator object. url = self.root + str(locator) _logger.debug("Request: %s %s", method, url) curl = self._get_user_agent() ok = None try: with timer.Timer() as t: self._headers = {} response_body = cStringIO.StringIO() curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open) curl.setopt(pycurl.URL, url.encode('utf-8')) curl.setopt(pycurl.HTTPHEADER, [ '{}: {}'.format(k,v) for k,v in self.get_headers.iteritems()]) curl.setopt(pycurl.WRITEFUNCTION, response_body.write) curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction) if method == "HEAD": curl.setopt(pycurl.NOBODY, True) self._setcurltimeouts(curl, timeout) try: curl.perform() except Exception as e: raise arvados.errors.HttpError(0, str(e)) self._result = { 'status_code': curl.getinfo(pycurl.RESPONSE_CODE), 'body': response_body.getvalue(), 'headers': self._headers, 'error': False, } ok = retry.check_http_response_success(self._result['status_code']) if not ok: self._result['error'] = arvados.errors.HttpError( self._result['status_code'], self._headers.get('x-status-line', 'Error')) except self.HTTP_ERRORS as e: self._result = { 'error': e, } self._usable = ok != False if self._result.get('status_code', None): # The client worked well enough to get an HTTP status # code, so presumably any problems are just on the # server side and it's OK to reuse the client. self._put_user_agent(curl) else: # Don't return this client to the pool, in case it's # broken. curl.close() if not ok: _logger.debug("Request fail: GET %s => %s: %s", url, type(self._result['error']), str(self._result['error'])) return None if method == "HEAD": _logger.info("HEAD %s: %s bytes", self._result['status_code'], self._result.get('content-length')) return True _logger.info("GET %s: %s bytes in %s msec (%.3f MiB/sec)", self._result['status_code'], len(self._result['body']), t.msecs, (len(self._result['body'])/(1024.0*1024))/t.secs if t.secs > 0 else 0) if self.download_counter: self.download_counter.add(len(self._result['body'])) resp_md5 = hashlib.md5(self._result['body']).hexdigest() if resp_md5 != locator.md5sum: _logger.warning("Checksum fail: md5(%s) = %s", url, resp_md5) self._result['error'] = arvados.errors.HttpError( 0, 'Checksum fail') return None return self._result['body']
def results_map(self, *codes): for code in codes: response = fake_requests_response(code, None) yield code, arv_retry.check_http_response_success(response)
def get(self, locator, method="GET", timeout=None): # locator is a KeepLocator object. url = self.root + str(locator) _logger.debug("Request: %s %s", method, url) curl = self._get_user_agent() ok = None try: with timer.Timer() as t: self._headers = {} response_body = BytesIO() curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt( pycurl.OPENSOCKETFUNCTION, lambda *args, **kwargs: self._socket_open( *args, **kwargs)) curl.setopt(pycurl.URL, url.encode('utf-8')) curl.setopt(pycurl.HTTPHEADER, [ '{}: {}'.format(k, v) for k, v in self.get_headers.items() ]) curl.setopt(pycurl.WRITEFUNCTION, response_body.write) curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction) if method == "HEAD": curl.setopt(pycurl.NOBODY, True) self._setcurltimeouts(curl, timeout) try: curl.perform() except Exception as e: raise arvados.errors.HttpError(0, str(e)) finally: if self._socket: self._socket.close() self._socket = None self._result = { 'status_code': curl.getinfo(pycurl.RESPONSE_CODE), 'body': response_body.getvalue(), 'headers': self._headers, 'error': False, } ok = retry.check_http_response_success( self._result['status_code']) if not ok: self._result['error'] = arvados.errors.HttpError( self._result['status_code'], self._headers.get('x-status-line', 'Error')) except self.HTTP_ERRORS as e: self._result = { 'error': e, } self._usable = ok != False if self._result.get('status_code', None): # The client worked well enough to get an HTTP status # code, so presumably any problems are just on the # server side and it's OK to reuse the client. self._put_user_agent(curl) else: # Don't return this client to the pool, in case it's # broken. curl.close() if not ok: _logger.debug("Request fail: GET %s => %s: %s", url, type(self._result['error']), str(self._result['error'])) return None if method == "HEAD": _logger.info("HEAD %s: %s bytes", self._result['status_code'], self._result.get('content-length')) return True _logger.info( "GET %s: %s bytes in %s msec (%.3f MiB/sec)", self._result['status_code'], len(self._result['body']), t.msecs, 1.0 * len(self._result['body']) / 2**20 / t.secs if t.secs > 0 else 0) if self.download_counter: self.download_counter.add(len(self._result['body'])) resp_md5 = hashlib.md5(self._result['body']).hexdigest() if resp_md5 != locator.md5sum: _logger.warning("Checksum fail: md5(%s) = %s", url, resp_md5) self._result['error'] = arvados.errors.HttpError( 0, 'Checksum fail') return None return self._result['body']
def results_map(self, *codes): for code in codes: response = (fake_httplib2_response(code), None) yield code, arv_retry.check_http_response_success(response)
def put(self, hash_s, body, timeout=None): url = self.root + hash_s _logger.debug("Request: PUT %s", url) curl = self._get_user_agent() ok = None try: with timer.Timer() as t: self._headers = {} body_reader = cStringIO.StringIO(body) response_body = cStringIO.StringIO() curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open) curl.setopt(pycurl.URL, url.encode("utf-8")) # Using UPLOAD tells cURL to wait for a "go ahead" from the # Keep server (in the form of a HTTP/1.1 "100 Continue" # response) instead of sending the request body immediately. # This allows the server to reject the request if the request # is invalid or the server is read-only, without waiting for # the client to send the entire block. curl.setopt(pycurl.UPLOAD, True) curl.setopt(pycurl.INFILESIZE, len(body)) curl.setopt(pycurl.READFUNCTION, body_reader.read) curl.setopt(pycurl.HTTPHEADER, ["{}: {}".format(k, v) for k, v in self.put_headers.iteritems()]) curl.setopt(pycurl.WRITEFUNCTION, response_body.write) curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction) self._setcurltimeouts(curl, timeout) try: curl.perform() except Exception as e: raise arvados.errors.HttpError(0, str(e)) self._result = { "status_code": curl.getinfo(pycurl.RESPONSE_CODE), "body": response_body.getvalue(), "headers": self._headers, "error": False, } ok = retry.check_http_response_success(self._result["status_code"]) if not ok: self._result["error"] = arvados.errors.HttpError( self._result["status_code"], self._headers.get("x-status-line", "Error") ) except self.HTTP_ERRORS as e: self._result = {"error": e} self._usable = ok != False # still usable if ok is True or None if self._result.get("status_code", None): # Client is functional. See comment in get(). self._put_user_agent(curl) else: curl.close() if not ok: _logger.debug( "Request fail: PUT %s => %s: %s", url, type(self._result["error"]), str(self._result["error"]) ) return False _logger.info( "PUT %s: %s bytes in %s msec (%.3f MiB/sec)", self._result["status_code"], len(body), t.msecs, (len(body) / (1024.0 * 1024)) / t.secs if t.secs > 0 else 0, ) if self.upload_counter: self.upload_counter.add(len(body)) return True