Пример #1
0
    def download():
        with open(path, 'wb') as decomp_out:
            key = _uri_to_key(access_key, secret_key, url)
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_return_error, key, pipeline.stdin)

            try:
                # Raise any exceptions from write_and_return_error
                exc = g.get()
                if exc is not None:
                    raise exc
            except boto.exception.S3ResponseError, e:
                if e.status == 404:
                    # Do not retry if the key not present, this can happen
                    # under normal situations.
                    logger.warning(
                        msg=('could no longer locate object while performing '
                             'wal restore'),
                        detail=('The absolute URI that could not be located '
                                'is {url}.'.format(url=url)),
                        hint=('This can be normal when Postgres is trying to '
                              'detect what timelines are available during '
                              'restoration.'))
                    return False
                else:
                    raise

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
Пример #2
0
def test_upload_download_pipeline(tmpdir, rate_limit):
    payload, payload_file = create_bogus_payload(tmpdir)

    # Upload section
    test_upload = tmpdir.join('upload')
    with open(str(test_upload), 'wb') as upload:
        with open(str(payload_file), 'rb') as inp:
            with pipeline.get_upload_pipeline(
                    inp, upload, rate_limit=rate_limit):
                pass

    with open(str(test_upload), 'rb') as completed:
        round_trip = completed.read()

    # Download section
    test_download = tmpdir.join('download')
    with open(str(test_upload), 'rb') as upload:
        with open(str(test_download), 'wb') as download:
            with pipeline.get_download_pipeline(upload, download):
                pass

    with open(str(test_download), 'rb') as completed:
        round_trip = completed.read()

    assert round_trip == payload
Пример #3
0
    def download():
        with open(path, 'wb') as decomp_out:
            suri = s3_uri_wrap(s3_url)
            bucket = suri.get_bucket()
            key = bucket.get_key(suri.object_name)

            if key is None:
                logger.info(
                    msg='could not locate object while performing wal restore',
                    detail=('The absolute URI that could not be located '
                            'is {url}.'.format(url=s3_url)),
                    hint=('This can be normal when Postgres is trying to '
                          'detect what timelines are available during '
                          'restoration.'))
                return False

            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)

            # Raise any exceptions from _write_and_close
            g.get()

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{s3_url}" to "{path}"'
                .format(s3_url=s3_url, path=path))
        return True
Пример #4
0
    def download():
        with open(path, 'wb') as decomp_out:
            with get_download_pipeline(PIPE, decomp_out, decrypt) as pl:

                conn = calling_format.connect(creds)

                g = gevent.spawn(write_and_return_error, uri, conn, pl.stdin)

                # Raise any exceptions from write_and_return_error
                try:
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except ClientException as e:
                    if e.http_status == 404:
                        # Do not retry if the key not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.warning(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {uri}.'.format(uri=uri)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        return False
                    else:
                        raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{uri}" to "{path}"'
                .format(uri=uri, path=path))
        return True
Пример #5
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail='The partition being downloaded is {0}.'
            .format(partition_name),
            hint='The absolute S3 key is {0}.'.format(part_abs_name))

        key = self.bucket.get_key(part_abs_name)
        pipeline = get_download_pipeline(PIPE, PIPE, self.decrypt)
        g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)
        tar = tarfile.open(mode='r|', fileobj=pipeline.stdout)

        # TODO: replace with per-member file handling,
        # extractall very much warned against in the docs, and
        # seems to have changed between Python 2.6 and Python
        # 2.7.
        tar.extractall(self.local_root)
        tar.close()

        # Raise any exceptions from self._write_and_close
        g.get()

        pipeline.finish()
Пример #6
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail='The partition being downloaded is {0}.'
            .format(partition_name),
            hint='The absolute S3 key is {0}.'.format(part_abs_name))

        key = self.bucket.get_key(part_abs_name)
        pipeline = get_download_pipeline(PIPE, PIPE, self.decrypt)
        g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)
        tar = tarfile.open(mode='r|', fileobj=pipeline.stdout)

        # TODO: replace with per-member file handling,
        # extractall very much warned against in the docs, and
        # seems to have changed between Python 2.6 and Python
        # 2.7.
        tar.extractall(self.local_root)
        tar.close()

        # Raise any exceptions from self._write_and_close
        g.get()

        pipeline.finish()
Пример #7
0
    def download():
        with open(path, 'wb') as decomp_out:
            suri = s3_uri_wrap(s3_url)
            bucket = suri.get_bucket()
            key = bucket.get_key(suri.object_name)

            if key is None:
                logger.info(
                    msg='could not locate object while performing wal restore',
                    detail=('The absolute URI that could not be located '
                            'is {url}.'.format(url=s3_url)),
                    hint=('This can be normal when Postgres is trying to '
                          'detect what timelines are available during '
                          'restoration.'))
                return False

            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)

            # Raise any exceptions from _write_and_close
            g.get()

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{s3_url}" to "{path}"'
                .format(s3_url=s3_url, path=path))
        return True
Пример #8
0
    def download():
        with open(path, 'wb') as decomp_out:
            key = uri_to_key(aws_access_key_id, aws_secret_access_key, s3_url)

            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)

            try:
                # Raise any exceptions from _write_and_close
                g.get()
            except boto.exception.S3ResponseError, e:
                if e.status == 404:
                    # Do not retry if the key not present, this can happen
                    # under normal situations.
                    logger.info(
                        msg=('could not locate object while performing wal '
                             'restore'),
                        detail=('The absolute URI that could not be located '
                                'is {url}.'.format(url=s3_url)),
                        hint=('This can be normal when Postgres is trying to '
                              'detect what timelines are available during '
                              'restoration.'))

                    return False
                else:
                    raise

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{s3_url}" to "{path}"'
                .format(s3_url=s3_url, path=path))
Пример #9
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            key = _uri_to_key(creds, url)
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, key, pl.stdin)

                try:
                    # Raise any exceptions from write_and_return_error
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except boto.exception.S3ResponseError as e:
                    if e.status == 404:
                        # Do not retry if the key not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.info(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {url}.'.format(url=url)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        decomp_out.remove_regardless = True
                        return False
                    else:
                        raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #10
0
    def download():
        with open(path, 'wb') as decomp_out:
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_return_error, url, conn, pipeline.stdin)

            try:
                # Raise any exceptions from _write_and_close
                g.get()
            except WindowsAzureMissingResourceError:
                # Short circuit any re-try attempts under certain race
                # conditions.
                logger.warn(
                    msg=('could no longer locate object while performing '
                         'wal restore'),
                    detail=('The URI  at {url} no longer exists.'
                            .format(url=url)),
                    hint=('This can be normal when Postgres is trying to '
                          'detect what timelines are available during '
                          'restoration.'))
                return False

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #11
0
    def download():
        with open(path, 'wb') as decomp_out:
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_return_error, url, conn, pipeline.stdin)

            try:
                # Raise any exceptions guarded by
                # write_and_return_error.
                g.get()
            except WindowsAzureMissingResourceError:
                # Short circuit any re-try attempts under certain race
                # conditions.
                logger.warn(
                    msg=('could no longer locate object while performing '
                         'wal restore'),
                    detail=('The URI  at {url} no longer exists.'.format(
                        url=url)),
                    hint=('This can be normal when Postgres is trying to '
                          'detect what timelines are available during '
                          'restoration.'))
                return False

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'.
                format(url=url, path=path))
        return True
Пример #12
0
    def download():
        with open(path, 'wb') as decomp_out:
            key = _uri_to_key(creds, url)
            with get_download_pipeline(PIPE, decomp_out, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, key, pl.stdin)

                try:
                    # Raise any exceptions from write_and_return_error
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except boto.exception.S3ResponseError, e:
                    if e.status == 404:
                        # Do not retry if the key not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.warning(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {url}.'.format(url=url)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        return False
                    else:
                        raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
Пример #13
0
def test_upload_download_pipeline(tmpdir, rate_limit):
    payload, payload_file = create_bogus_payload(tmpdir)

    # Upload section
    test_upload = tmpdir.join('upload')
    with open(unicode(test_upload), 'w') as upload:
        with open(unicode(payload_file)) as inp:
            pl = pipeline.get_upload_pipeline(
                inp, upload, rate_limit=rate_limit)
            pl.finish()

    with open(unicode(test_upload)) as completed:
        round_trip = completed.read()

    # Download section
    test_download = tmpdir.join('download')
    with open(unicode(test_upload)) as upload:
        with open(unicode(test_download), 'w') as download:
            pl = pipeline.get_download_pipeline(upload, download)
            pl.finish()

    with open(unicode(test_download)) as completed:
        round_trip = completed.read()

    assert round_trip == payload
Пример #14
0
def test_upload_download_pipeline(tmpdir, rate_limit):
    payload, payload_file = create_bogus_payload(tmpdir)

    # Upload section
    test_upload = tmpdir.join('upload')
    with open(unicode(test_upload), 'w') as upload:
        with open(unicode(payload_file)) as inp:
            pl = pipeline.get_upload_pipeline(inp,
                                              upload,
                                              rate_limit=rate_limit)
            pl.finish()

    with open(unicode(test_upload)) as completed:
        round_trip = completed.read()

    # Download section
    test_download = tmpdir.join('download')
    with open(unicode(test_upload)) as upload:
        with open(unicode(test_download), 'w') as download:
            pl = pipeline.get_download_pipeline(upload, download)
            pl.finish()

    with open(unicode(test_download)) as completed:
        round_trip = completed.read()

    assert round_trip == payload
Пример #15
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            blob = _uri_to_blob(creds, url)
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, blob, pl.stdin)

                try:
                    # Raise any exceptions from write_and_return_error
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except NotFound as e:
                    # Do not retry if the blob not present, this
                    # can happen under normal situations.
                    pl.abort()
                    logger.warning(
                        msg=('could no longer locate object while '
                             'performing wal restore'),
                        detail=('The absolute URI that could not be '
                                'located is {url}.'.format(url=url)),
                        hint=('This can be normal when Postgres is trying '
                              'to detect what timelines are available '
                              'during restoration.'))
                    decomp_out.remove_regardless = True
                    return False

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #16
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, url, conn, pl.stdin)

                try:
                    # Raise any exceptions guarded by
                    # write_and_return_error.
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except WindowsAzureMissingResourceError:
                    # Short circuit any re-try attempts under certain race
                    # conditions.
                    pl.abort()
                    logger.warning(
                        msg=('could no longer locate object while '
                             'performing wal restore'),
                        detail=('The absolute URI that could not be '
                                'located is {url}.'.format(url=url)),
                        hint=('This can be normal when Postgres is trying '
                              'to detect what timelines are available '
                              'during restoration.'))
                    decomp_out.remove_regardless = True
                    return False

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #17
0
def test_upload_download_pipeline(tmpdir, rate_limit):
    payload, payload_file = create_bogus_payload(tmpdir)

    # Upload section
    test_upload = tmpdir.join('upload')
    with open(str(test_upload), 'wb') as upload:
        with open(str(payload_file), 'rb') as inp:
            with pipeline.get_upload_pipeline(inp,
                                              upload,
                                              rate_limit=rate_limit):
                pass

    with open(str(test_upload), 'rb') as completed:
        round_trip = completed.read()

    # Download section
    test_download = tmpdir.join('download')
    with open(str(test_upload), 'rb') as upload:
        with open(str(test_download), 'wb') as download:
            with pipeline.get_download_pipeline(upload, download):
                pass

    with open(str(test_download), 'rb') as completed:
        round_trip = completed.read()

    assert round_trip == payload
Пример #18
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, url, conn, pl.stdin)

                try:
                    # Raise any exceptions guarded by
                    # write_and_return_error.
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except AzureMissingResourceHttpError:
                    # Short circuit any re-try attempts under certain race
                    # conditions.
                    pl.abort()
                    logger.warning(
                        msg=('could no longer locate object while '
                             'performing wal restore'),
                        detail=('The absolute URI that could not be '
                                'located is {url}.'.format(url=url)),
                        hint=('This can be normal when Postgres is trying '
                              'to detect what timelines are available '
                              'during restoration.'))
                    decomp_out.remove_regardless = True
                    return False

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #19
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            key = _uri_to_key(creds, url)
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                g = gevent.spawn(write_and_return_error, key, pl.stdin)

                try:
                    # Raise any exceptions from write_and_return_error
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except boto.exception.S3ResponseError, e:
                    if e.status == 404:
                        # Do not retry if the key not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.warning(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {url}.'.format(url=url)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        decomp_out.remove_regardless = True
                        return False
                    elif e.value.error_code == 'ExpiredToken':
                        # Do not retry if STS token has expired.  It can never
                        # succeed in the future anyway.
                        pl.abort()
                        logger.info(
                            msg=('could no longer authenticate while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'accessed is {url}.'.format(url=url)),
                            hint=('This can be normal when using STS '
                                  'credentials.'))
                        decomp_out.remove_regardless = True
                        return False
                    else:
                        logger.warning(msg='S3 response error',
                                       detail='The error is: {0}, {1}'
                                       .format(e.error_code, e.error_message))
                        raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
Пример #20
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(msg='beginning partition download',
                    detail='The partition being downloaded is {0}.'.format(
                        partition_name),
                    hint='The absolute S3 key is {0}.'.format(part_abs_name))

        key = self.bucket.get_key(part_abs_name)
        with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
            g = gevent.spawn(s3.write_and_return_error, key, pl.stdin)
            TarPartition.tarfile_extract(pl.stdout, self.local_root)

            # Raise any exceptions guarded by write_and_return_error.
            exc = g.get()
            if exc is not None:
                raise exc
Пример #21
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail='The partition being downloaded is {0}.'
            .format(partition_name),
            hint='The absolute file key is {0}.'.format(part_abs_name))

        key = self.bucket.get_key(part_abs_name)
        with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
            g = gevent.spawn(file.write_and_return_error, key, pl.stdin)
            TarPartition.tarfile_extract(pl.stdout, self.local_root)

            # Raise any exceptions guarded by write_and_return_error.
            exc = g.get()
            if exc is not None:
                raise exc
Пример #22
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(msg='beginning partition download',
                    detail=('The partition being downloaded is {0}.'.format(
                        partition_name)),
                    hint='The absolute S3 key is {0}.'.format(part_abs_name))

        url = 'wabs://{ctr}/{path}'.format(ctr=self.layout.store_name(),
                                           path=part_abs_name)
        with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
            g = gevent.spawn(wabs.write_and_return_error, url, self.wabs_conn,
                             pl.stdin)
            TarPartition.tarfile_extract(pl.stdout, self.local_root)

            # Raise any exceptions from self._write_and_close
            exc = g.get()
            if exc is not None:
                raise exc
Пример #23
0
    def download():
        with open(path, 'wb') as decomp_out:
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)

            conn = calling_format.connect(creds)

            g = gevent.spawn(write_and_return_error, uri, conn, pipeline.stdin)

            # Raise any exceptions from write_and_return_error
            exc = g.get()
            if exc is not None:
                raise exc

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{uri}" to "{path}"'.
                format(uri=uri, path=path))
        return True
Пример #24
0
    def download():
        with open(path, 'wb') as decomp_out:
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)

            conn = calling_format.connect(creds)

            g = gevent.spawn(write_and_return_error, uri, conn, pipeline.stdin)

            # Raise any exceptions from write_and_return_error
            exc = g.get()
            if exc is not None:
                raise exc

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{uri}" to "{path}"'
                .format(uri=uri, path=path))
        return True
Пример #25
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail='The partition being downloaded is {0}.'.format(
                partition_name),
            hint='The absolute GCS object is {0}.'.format(part_abs_name))

        blob = self.bucket.get_blob('/' + part_abs_name)
        signed = blob.generate_signed_url(datetime.datetime.utcnow() +
                                          datetime.timedelta(minutes=10))
        with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
            g = gevent.spawn(gs.write_and_return_error, signed, pl.stdin)
            TarPartition.tarfile_extract(pl.stdout, self.local_root)

            # Raise any exceptions guarded by write_and_return_error.
            exc = g.get()
            if exc is not None:
                raise exc
Пример #26
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail=('The partition being downloaded is {0}.'
                    .format(partition_name)),
            hint='The absolute S3 key is {0}.'.format(part_abs_name))

        url = 'wabs://{ctr}/{path}'.format(ctr=self.layout.store_name(),
                                           path=part_abs_name)
        with get_download_pipeline(PIPE, PIPE, self.decrypt) as pl:
            g = gevent.spawn(wabs.write_and_return_error,
                             url, self.wabs_conn, pl.stdin)
            TarPartition.tarfile_extract(pl.stdout, self.local_root)

            # Raise any exceptions from self._write_and_close
            exc = g.get()
            if exc is not None:
                raise exc
Пример #27
0
    def download():
        with open(path, "wb") as decomp_out:
            suri = boto.storage_uri(s3_url, validate=False)
            try:
                key = suri.get_key()
            except boto.exception.InvalidUriError:
                logger.warning(msg="file not found: %s" % suri)
                return False

            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)
            g = gevent.spawn(write_and_close_thread, key, pipeline.stdin)

            # Raise any exceptions from _write_and_close
            g.get()

            pipeline.finish()

            logger.info(
                msg="completed download and decompression",
                detail='Downloaded and decompressed "{s3_url}" to "{path}"'.format(s3_url=s3_url, path=path),
            )
        return True
Пример #28
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(msg='beginning partition download',
                    detail=('The partition being downloaded is {0}.'.format(
                        partition_name)),
                    hint='The absolute Swift object name is {0}.'.format(
                        part_abs_name))

        url = 'swift://{ctr}/{path}'.format(ctr=self.layout.store_name(),
                                            path=part_abs_name)
        pipeline = get_download_pipeline(PIPE, PIPE, self.decrypt)
        g = gevent.spawn(swift.write_and_return_error, url, self.swift_conn,
                         pipeline.stdin)
        TarPartition.tarfile_extract(pipeline.stdout, self.local_root)

        # Raise any exceptions guarded by write_and_return_error.
        exc = g.get()
        if exc is not None:
            raise exc
        pipeline.finish()
Пример #29
0
    def fetch_partition(self, partition_name):
        part_abs_name = self.layout.basebackup_tar_partition(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning partition download',
            detail=('The partition being downloaded is {0}.'
                    .format(partition_name)),
            hint='The absolute Swift object name is {0}.'
            .format(part_abs_name))

        url = 'swift://{ctr}/{path}'.format(ctr=self.layout.store_name(),
                                            path=part_abs_name)
        pipeline = get_download_pipeline(PIPE, PIPE, self.decrypt)
        g = gevent.spawn(swift.write_and_return_error,
                         url, self.swift_conn, pipeline.stdin)
        TarPartition.tarfile_extract(pipeline.stdout, self.local_root)

        # Raise any exceptions guarded by write_and_return_error.
        exc = g.get()
        if exc is not None:
            raise exc
        pipeline.finish()
Пример #30
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            blob = _uri_to_blob(creds, url)
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
                signed = blob.generate_signed_url(
                    datetime.utcnow() + timedelta(minutes=10))
                g = gevent.spawn(write_and_return_error, signed, pl.stdin)

                try:
                    # Raise any exceptions from write_and_return_error
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except urllib2.HTTPError as e:
                    if e.code == 404:
                        # Do not retry if the blob not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.warning(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {url}.'.format(url=url)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        decomp_out.remove_regardless = True
                        return False

                    raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{url}" to "{path}"'
                .format(url=url, path=path))
        return True
Пример #31
0
    def download():
        with files.DeleteOnError(path) as decomp_out:
            with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:

                conn = calling_format.connect(creds)

                g = gevent.spawn(write_and_return_error, uri, conn, pl.stdin)

                # Raise any exceptions from write_and_return_error
                try:
                    exc = g.get()
                    if exc is not None:
                        raise exc
                except ClientException as e:
                    if e.http_status == 404:
                        # Do not retry if the key not present, this
                        # can happen under normal situations.
                        pl.abort()
                        logger.warning(
                            msg=('could no longer locate object while '
                                 'performing wal restore'),
                            detail=('The absolute URI that could not be '
                                    'located is {uri}.'.format(uri=uri)),
                            hint=('This can be normal when Postgres is trying '
                                  'to detect what timelines are available '
                                  'during restoration.'))
                        decomp_out.remove_regardless = True
                        return False
                    else:
                        raise

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{uri}" to "{path}"'.
                format(uri=uri, path=path))
        return True
Пример #32
0
    def fetch_manifest(self, partition_name, wale_info_dir):
        part_abs_name = self.layout.basebackup_manifest(
            self.backup_info, partition_name)

        logger.info(
            msg='beginning manifest download',
            detail='The manifest being downloaded is {0}.'
            .format(partition_name),
            hint='The absolute S3 key is {0}.'.format(part_abs_name))

        key = self.bucket.get_key(part_abs_name)
        with get_download_pipeline(PIPE, PIPE,
                                   gpg=self.decrypt,
                                   lzop=False) as pl:
            g = gevent.spawn(s3.write_and_return_error, key, pl.stdin)
            TarPartition.manifest_extract(pl.stdout,
                                          self.local_root,
                                          partition_name,
                                          wale_info_dir)

            # Raise any exceptions guarded by write_and_return_error.
            exc = g.get()
            if exc is not None:
                raise exc
Пример #33
0
def do_lzop_get(creds, url, path, decrypt, do_retry):
    """
    Get and decompress a URL

    This streams the content directly to lzop; the compressed version
    is never stored on disk.

    """
    assert url.endswith('.lzo'), 'Expect an lzop-compressed file'

    with files.DeleteOnError(path) as decomp_out:
        key = _uri_to_key(creds, url)
        with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
            g = gevent.spawn(write_and_return_error, key, pl.stdin)
            exc = g.get()
            if exc is not None:
                raise exc

        logger.info(
            msg='completed download and decompression',
            detail='Downloaded and decompressed "{url}" to "{path}"'.format(
                url=url, path=path))

    return True
Пример #34
0
    def download():
        with open(path, 'wb') as decomp_out:
            pipeline = get_download_pipeline(PIPE, decomp_out, decrypt)

            conn = calling_format.connect(creds)

            g = gevent.spawn(write_and_return_error, uri, conn, pipeline.stdin)

            # Raise any exceptions from write_and_return_error
            try:
                exc = g.get()
                if exc is not None:
                    raise exc
            except ClientException as e:
                if e.http_status == 404:
                    # Do not retry if the key not present, this can happen
                    # under normal situations.
                    logger.warning(
                        msg=('could no longer locate object while performing '
                             'wal restore'),
                        detail=('The absolute URI that could not be located '
                                'is {url}.'.format(url=uri)),
                        hint=('This can be normal when Postgres is trying to '
                              'detect what timelines are available during '
                              'restoration.'))
                    return False
                else:
                    raise

            pipeline.finish()

            logger.info(
                msg='completed download and decompression',
                detail='Downloaded and decompressed "{uri}" to "{path}"'
                .format(uri=uri, path=path))
        return True
Пример #35
0
def do_lzop_get(creds, url, path, decrypt, do_retry):
    """
    Get and decompress a URL

    This streams the content directly to lzop; the compressed version
    is never stored on disk.

    """
    assert url.endswith('.lzo'), 'Expect an lzop-compressed file'

    with files.DeleteOnError(path) as decomp_out:
        key = _uri_to_key(creds, url)
        with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
            g = gevent.spawn(write_and_return_error, key, pl.stdin)
            exc = g.get()
            if exc is not None:
                raise exc

        logger.info(
            msg='completed download and decompression',
            detail='Downloaded and decompressed "{url}" to "{path}"'
            .format(url=url, path=path))

    return True