def download(): with files.DeleteOnError(path) as decomp_out: blob = _uri_to_blob(creds, url) with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl: g = gevent.spawn(write_and_return_error, blob, pl.stdin) try: # Raise any exceptions from write_and_return_error exc = g.get() if exc is not None: raise exc except NotFound as e: # Do not retry if the blob not present, this # can happen under normal situations. pl.abort() logger.warning( msg=('could no longer locate object while ' 'performing wal restore'), detail=('The absolute URI that could not be ' 'located is {url}.'.format(url=url)), hint=('This can be normal when Postgres is trying ' 'to detect what timelines are available ' 'during restoration.')) decomp_out.remove_regardless = True return False logger.info( msg='completed download and decompression', detail='Downloaded and decompressed "{url}" to "{path}"' .format(url=url, path=path)) return True
def download(): with files.DeleteOnError(path) as decomp_out: with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl: g = gevent.spawn(write_and_return_error, url, conn, pl.stdin) try: # Raise any exceptions guarded by # write_and_return_error. exc = g.get() if exc is not None: raise exc except AzureMissingResourceHttpError: # Short circuit any re-try attempts under certain race # conditions. pl.abort() logger.warning( msg=('could no longer locate object while ' 'performing wal restore'), detail=('The absolute URI that could not be ' 'located is {url}.'.format(url=url)), hint=('This can be normal when Postgres is trying ' 'to detect what timelines are available ' 'during restoration.')) decomp_out.remove_regardless = True return False logger.info( msg='completed download and decompression', detail='Downloaded and decompressed "{url}" to "{path}"' .format(url=url, path=path)) return True
def test_no_error(tmpdir): p = str(tmpdir.join('somefile')) with files.DeleteOnError(p) as doe: doe.f.write(b'hello') with open(p, 'rb') as f: assert f.read() == b'hello'
def test_no_error(tmpdir): p = unicode(tmpdir.join('somefile')) with files.DeleteOnError(p) as doe: doe.f.write('hello') with open(p) as f: assert f.read() == 'hello'
def test_explicit_deletion_without_exception(tmpdir): p = unicode(tmpdir.join('somefile')) with files.DeleteOnError(p) as doe: doe.f.write('hello') doe.remove_regardless = True with pytest.raises(IOError) as e: open(p) assert e.value.errno == errno.ENOENT
def test_clear_on_error(tmpdir): p = unicode(tmpdir.join('somefile')) boom = StandardError('Boom') with pytest.raises(StandardError) as e: with files.DeleteOnError(p) as doe: doe.f.write('hello') raise boom assert e.value == boom with pytest.raises(IOError) as e: open(p) assert e.value.errno == errno.ENOENT
def test_clear_on_error(tmpdir): p = str(tmpdir.join('somefile')) boom = Exception('Boom') with pytest.raises(Exception) as e: with files.DeleteOnError(p) as doe: doe.f.write(b'hello') raise boom assert e.value == boom with pytest.raises(IOError) as e: open(p) assert e.value.errno == errno.ENOENT
def download(): with files.DeleteOnError(path) as decomp_out: key = _uri_to_key(creds, url) with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl: g = gevent.spawn(write_and_return_error, key, pl.stdin) try: # Raise any exceptions from write_and_return_error exc = g.get() if exc is not None: raise exc except boto.exception.S3ResponseError, e: if e.status == 404: # Do not retry if the key not present, this # can happen under normal situations. pl.abort() logger.warning( msg=('could no longer locate object while ' 'performing wal restore'), detail=('The absolute URI that could not be ' 'located is {url}.'.format(url=url)), hint=('This can be normal when Postgres is trying ' 'to detect what timelines are available ' 'during restoration.')) decomp_out.remove_regardless = True return False elif e.value.error_code == 'ExpiredToken': # Do not retry if STS token has expired. It can never # succeed in the future anyway. pl.abort() logger.info( msg=('could no longer authenticate while ' 'performing wal restore'), detail=('The absolute URI that could not be ' 'accessed is {url}.'.format(url=url)), hint=('This can be normal when using STS ' 'credentials.')) decomp_out.remove_regardless = True return False else: logger.warning(msg='S3 response error', detail='The error is: {0}, {1}' .format(e.error_code, e.error_message)) raise logger.info( msg='completed download and decompression', detail='Downloaded and decompressed "{url}" to "{path}"' .format(url=url, path=path))
def cat_extract(tar, member, targetpath): """Extract a regular file member using cat for async-like I/O Mostly adapted from tarfile.py. """ assert member.isreg() # Fetch the TarInfo object for the given name and build the # destination pathname, replacing forward slashes to platform # specific separators. targetpath = targetpath.rstrip("/") targetpath = targetpath.replace("/", os.sep) # Create all upper directories. upperdirs = os.path.dirname(targetpath) if upperdirs and not os.path.exists(upperdirs): try: # Create directories that are not part of the archive with # default permissions. os.makedirs(upperdirs) except EnvironmentError as e: if e.errno == errno.EEXIST: # Ignore an error caused by the race of # the directory being created between the # check for the path and the creation. pass else: raise with files.DeleteOnError(targetpath) as dest: with pipeline.get_cat_pipeline(pipeline.PIPE, dest.f) as pl: fp = tar.extractfile(member) copyfileobj.copyfileobj(fp, pl.stdin) if sys.version_info < (3, 5): tar.chown(member, targetpath) else: tar.chown(member, targetpath, False) tar.chmod(member, targetpath) tar.utime(member, targetpath)
def download(): with files.DeleteOnError(path) as decomp_out: with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl: conn = calling_format.connect(creds) g = gevent.spawn(write_and_return_error, uri, conn, pl.stdin) # Raise any exceptions from write_and_return_error try: exc = g.get() if exc is not None: raise exc except ClientException as e: if e.http_status == 404: # Do not retry if the key not present, this # can happen under normal situations. pl.abort() logger.warning( msg=('could no longer locate object while ' 'performing wal restore'), detail=('The absolute URI that could not be ' 'located is {uri}.'.format(uri=uri)), hint=('This can be normal when Postgres is trying ' 'to detect what timelines are available ' 'during restoration.')) decomp_out.remove_regardless = True return False else: raise logger.info( msg='completed download and decompression', detail='Downloaded and decompressed "{uri}" to "{path}"'. format(uri=uri, path=path)) return True
def do_lzop_get(creds, url, path, decrypt, do_retry): """ Get and decompress a URL This streams the content directly to lzop; the compressed version is never stored on disk. """ assert url.endswith('.lzo'), 'Expect an lzop-compressed file' with files.DeleteOnError(path) as decomp_out: key = _uri_to_key(creds, url) with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl: g = gevent.spawn(write_and_return_error, key, pl.stdin) exc = g.get() if exc is not None: raise exc logger.info( msg='completed download and decompression', detail='Downloaded and decompressed "{url}" to "{path}"'.format( url=url, path=path)) return True
def test_no_error_if_already_deleted(tmpdir): p = unicode(tmpdir.join('somefile')) with files.DeleteOnError(p) as doe: doe.f.write('hello') os.unlink(p)