def get(self, remote_path, local_path): self._logger.info('Downloading %s from CloudFiles to %s' % (remote_path, local_path)) container, obj = self._parse_path(remote_path) dest_path = os.path.join(local_path, os.path.basename(remote_path)) try: connection = self._get_connection() if not self._container_check_cache(container): try: ct = connection.get_container(container) except cloudfiles.errors.NoSuchContainer: raise TransferError("Container '%s' not found" % container) # Cache container object self._container = ct try: o = self._container.get_object(obj) except cloudfiles.errors.NoSuchObject: raise TransferError("Object '%s' not found in container '%s'" % (obj, container)) o.save_to_filename(dest_path) return dest_path except (cloudfiles.errors.ResponseError, OSError, Exception): exc = sys.exc_info() raise TransferError, exc[1], exc[2]
def get(self, remote_path, local_path): self._logger.info('Downloading %s from S3 to %s' % (remote_path, local_path)) bucket_name, key_name = self._parse_path(remote_path) dest_path = os.path.join(local_path, os.path.basename(remote_path)) try: connection = self._get_connection() try: if not self._bucket_check_cache(bucket_name): self._bucket = connection.get_bucket(bucket_name, validate=False) key = self._bucket.get_key(key_name) except S3ResponseError, e: if e.code in ('NoSuchBucket', 'NoSuchKey'): raise TransferError("S3 path '%s' not found" % remote_path) raise key.get_contents_to_filename(dest_path) return dest_path
def _parse_path(self, path): o = urlparse.urlparse(path) if o.scheme != self.schema: raise TransferError('Wrong schema') return o.hostname, o.path[1:]