def _get_spare_chunk(self, chunks_notin, chunks_broken, position, max_attempts=3, check_quality=False, fake_excluded_chunks=None, **kwargs): notin = ChunksHelper(chunks_notin, False).raw() broken = ChunksHelper(chunks_broken, False).raw() if fake_excluded_chunks: for fake_excluded_chunk in fake_excluded_chunks: chunk = fake_excluded_chunk.copy() chunk['hash'] = broken[0]['hash'] chunk['pos'] = broken[0]['pos'] chunk['size'] = broken[0]['size'] broken.append(chunk) spare_data = {"notin": notin, "broken": broken} last_exc = None bal = 0 for attempt in range(max_attempts): try: spare_resp = self.container_client.content_spare( cid=self.container_id, path=self.path, version=self.version, data=spare_data, stgpol=self.policy, position=position, **kwargs) quals = extract_chunk_qualities(spare_resp.get( 'properties', {}), raw=True) if check_quality: bal = ensure_better_chunk_qualities(chunks_broken, quals) break except (exc.ClientException, exc.SpareChunkException) as err: self.logger.info( "Failed to find spare chunk (attempt %d/%d): %s", attempt + 1, max_attempts, err) last_exc = err # TODO(FVE): exponential backoff? else: if isinstance(last_exc, exc.SpareChunkException): exc.reraise(exc.SpareChunkException, last_exc) raise exc.SpareChunkException("No spare chunk: %s" % str(last_exc)) url_list = [] for chunk in spare_resp["chunks"]: url_list.append(chunk["id"]) if check_quality: self.logger.info( "Found %d spare chunks, that will improve " "metachunk quality by %d", len(url_list), bal) return url_list, quals
def _wrapped(self, account, container, obj, *args, **kwargs): try: return fnc(self, account, container, obj, *args, **kwargs) except NotFound as err: if err.status == 406: err.message = "Container '%s' does not exist." % container reraise(NoSuchContainer, err) else: err.message = "Object '%s' does not exist." % obj reraise(NoSuchObject, err)
def fetch_job(self, on_job, timeout=None, **kwargs): job_id = None try: if not self.connected: self.logger.debug('Connecting to %s using tube %s', self.addr, self.tube) self._connect(**kwargs) job_id, data = self.beanstalkd.reserve(timeout=timeout) try: for job_info in on_job(job_id, data, **kwargs): yield job_info except GeneratorExit: # If the reader finishes to handle the job, but does not want # any new job, it will break the generator. This does not mean # the current job has failed, thus we must delete it. self.beanstalkd.delete(job_id) raise except Exception as err: try: self.beanstalkd.bury(job_id) except BeanstalkError as exc: self.logger.error("Could not bury job %s: %s", job_id, exc) exceptions.reraise(err.__class__, err) else: self.beanstalkd.delete(job_id) return except ConnectionError as exc: self.connected = False self.logger.warn('Disconnected from %s using tube %s (job=%s): %s', self.addr, self.tube, job_id, exc) if 'Invalid URL' in str(exc): raise time.sleep(1.0) except exceptions.ExplicitBury as exc: self.logger.warn("Job bury on %s using tube %s (job=%s): %s", self.addr, self.tube, job_id, exc) except BeanstalkError as exc: if isinstance(exc, ResponseError) and 'TIMED_OUT' in str(exc): raise exceptions.OioTimeout() self.logger.exception("ERROR on %s using tube %s (job=%s)", self.addr, self.tube, job_id) except Exception: self.logger.exception("ERROR on %s using tube %s (job=%s)", self.addr, self.tube, job_id)
def xcute_request(self, method, action, params=None, **kwargs): """Make a request to the xcute service.""" self._maybe_refresh_endpoint(**kwargs) if not params: params = dict() try: resp, body = self._request(method, action, params=params, **kwargs) except OioNetworkException as exc: exc_info = sys.exc_info() if self._refresh_delay >= 0.0: self.logger.info( "Refreshing xcute endpoint after error %s", exc) try: self._refresh_endpoint(**kwargs) except Exception as exc: self.logger.warn("%s", exc) reraise(exc_info[0], exc_info[1], exc_info[2]) return resp, body
def _reraise(exc_type, exc_value): reqid = out_headers.get('X-oio-req-id') exceptions.reraise(exc_type, exc_value, "reqid=%s" % reqid)
def oio_exception_from_httperror(exc, reqid=None, url=None): """ Convert an HTTPError from urllib3 to an OioException, and re-raise it. """ extra_dict = dict() if reqid: extra_dict['reqid'] = reqid if url: extra_dict['host'] = urlparse(url).netloc extra = ', '.join('%s=%s' % x for x in extra_dict.items()) if isinstance(exc, urllibexc.MaxRetryError): if isinstance(exc.reason, urllibexc.NewConnectionError): reraise(OioNetworkException, exc.reason, extra) if isinstance(exc.reason, urllibexc.TimeoutError): reraise(OioTimeout, exc.reason, extra) reraise(OioNetworkException, exc, extra) elif isinstance(exc, (urllibexc.ProtocolError, urllibexc.ProxyError, urllibexc.ClosedPoolError)): reraise(OioNetworkException, exc, extra) elif isinstance(exc, urllibexc.TimeoutError): reraise(OioTimeout, exc, extra) else: reraise(OioException, exc, extra)
def _wrapped(self, account, container, *args, **kwargs): try: return fnc(self, account, container, *args, **kwargs) except NotFound as err: err.message = "Container '%s' does not exist." % container reraise(NoSuchContainer, err)
def _wrapped(self, account=None, *args, **kwargs): try: return fnc(self, account, *args, **kwargs) except NotFound as err: err.message = "Account '%s' does not exist." % account reraise(NoSuchAccount, err)
def oio_exception_from_httperror(exc, reqid=None): """ Convert an HTTPError from urllib3 to an OioException, and re-raise it. """ extra = ("reqid=%s" % reqid) if reqid else None if isinstance(exc, MaxRetryError): if isinstance(exc.reason, NewConnectionError): reraise(OioNetworkException, exc.reason, extra) if isinstance(exc.reason, TimeoutError): reraise(OioTimeout, exc.reason, extra) reraise(OioNetworkException, exc, extra) elif isinstance(exc, (ProtocolError, ProxyError, ClosedPoolError)): reraise(OioNetworkException, exc, extra) elif isinstance(exc, TimeoutError): reraise(OioTimeout, exc, extra) else: reraise(OioException, exc, extra)