示例#1
0
文件: fs.py 项目: EnterpriseDB/barman
class PathDeletionCommand(with_metaclass(ABCMeta, object)):
    """
    Stand-alone object that will execute delete operation on a self contained path
    """
    @abstractmethod
    def delete(self):
        """
示例#2
0
class KeepManager(with_metaclass(ABCMeta, object)):
    """Abstract base class which defines the KeepManager interface"""

    ANNOTATION_KEY = "keep"

    TARGET_FULL = "full"
    TARGET_STANDALONE = "standalone"

    supported_targets = (TARGET_FULL, TARGET_STANDALONE)

    @abstractmethod
    def should_keep_backup(self, backup_id):
        pass

    @abstractmethod
    def keep_backup(self, backup_id, target):
        pass

    @abstractmethod
    def get_keep_target(self, backup_id):
        pass

    @abstractmethod
    def release_keep(self, backup_id):
        pass
示例#3
0
class AnnotationManager(with_metaclass(ABCMeta)):
    """
    This abstract base class defines the AnnotationManager interface which provides
    methods for read, write and delete of annotations for a given backup.
    """
    @abstractmethod
    def put_annotation(self, backup_id, key, value):
        """Add an annotation"""

    @abstractmethod
    def get_annotation(self, backup_id, key):
        """Get the value of an annotation"""

    @abstractmethod
    def delete_annotation(self, backup_id, key):
        """Delete an annotation"""
示例#4
0
class ChunkedCompressor(with_metaclass(ABCMeta, object)):
    """
    Base class for all ChunkedCompressors
    """
    @abstractmethod
    def add_chunk(self, data):
        """
        Compresses the supplied data and returns all the compressed bytes.

        :param bytes data: The chunk of data to be compressed
        :return: The compressed data
        :rtype: bytes
        """

    @abstractmethod
    def decompress(self, data):
        """
示例#5
0
class RemoteStatusMixin(with_metaclass(ABCMeta, object)):
    """
    Abstract base class that implements remote status capabilities
    following the Mixin pattern.
    """

    def __init__(self, *args, **kwargs):
        """
        Base constructor (Mixin pattern)
        """
        self._remote_status = None
        super(RemoteStatusMixin, self).__init__(*args, **kwargs)

    @abstractmethod
    def fetch_remote_status(self):
        """
        Retrieve status information from the remote component

        The implementation of this method must not raise any exception in case
        of errors, but should set the missing values to None in the resulting
        dictionary.

        :rtype: dict[str, None|str]
        """

    def get_remote_status(self):
        """
        Get the status of the remote component

        This method does not raise any exception in case of errors,
        but set the missing values to None in the resulting dictionary.

        :rtype: dict[str, None|str]
        """
        if self._remote_status is None:
            self._remote_status = self.fetch_remote_status()
        return self._remote_status

    def reset_remote_status(self):
        """
        Reset the cached result
        """
        self._remote_status = None
示例#6
0
class FileManager(with_metaclass(ABCMeta)):
    @abstractmethod
    def file_exist(self, file_path):
        """
        Tests if file exists
        :param file_path: File path
        :type file_path: string

        :return: True if file exists False otherwise
        :rtype: bool
        """

    @abstractmethod
    def get_file_stats(self, file_path):
        """
        Tests if file exists
        :param file_path: File path
        :type file_path: string

        :return:
        :rtype: FileStats
        """

    @abstractmethod
    def get_file_list(self, path):
        """
        List all files within a path, including subdirectories
        :param path: Path to analyze
        :type path: string
        :return: List of file path
        :rtype: list
        """

    @abstractmethod
    def get_file_content(self, file_path, file_mode="rb"):
        """ """

    @abstractmethod
    def save_content_to_file(self, file_path, content, file_mode="wb"):
        """ """
示例#7
0
class Compressor(with_metaclass(ABCMeta, object)):
    """
    Base class for all the compressors
    """

    MAGIC = None

    def __init__(self, config, compression, path=None):
        self.config = config
        self.compression = compression
        self.path = path

    @classmethod
    def validate(cls, file_start):
        """
        Guess if the first bytes of a file are compatible with the compression
        implemented by this class

        :param file_start: a binary string representing the first few
            bytes of a file
        :rtype: bool
        """
        return cls.MAGIC and file_start.startswith(cls.MAGIC)

    @abstractmethod
    def compress(self, src, dst):
        """
        Abstract Method for compression method

        :param str src: source file path
        :param str dst: destination file path
        """

    @abstractmethod
    def decompress(self, src, dst):
        """
示例#8
0
class PgBaseBackupCompression(with_metaclass(ABCMeta, object)):
    """
    Represents the pg_basebackup compression options and provides functionality
    required by the backup process which depends on those options.
    """
    def __init__(self, config):
        """
        Constructor for the PgBaseBackupCompression abstract base class.

        :param barman.config.ServerConfig config: the server configuration
        """
        self.type = config.backup_compression
        self.format = config.backup_compression_format
        self.level = config.backup_compression_level
        self.location = config.backup_compression_location

    @abstractproperty
    def suffix(self):
        """The filename suffix expected for this compression"""

    def with_suffix(self, basename):
        """
        Append the suffix to the supplied basename.

        :param str basename: The basename (without compression suffix) of the
          file to be opened.
        """
        return ".".join((basename, self.suffix))

    @abstractmethod
    @contextmanager
    def open(self, basename):
        """
        Open file at path/basename for reading.

        :param str basename: The basename (without compression suffix) of the
          file to be opened.
        """

    def validate(self, server, remote_status):
        """
        Validate pg_basebackup compression options.

        :param barman.server.Server server: the server for which the
          compression options should be validated.
        :param dict remote_status: the status of the pg_basebackup command
        """
        if self.location is not None and self.location == "server":
            # "backup_location = server" requires pg_basebackup >= 15
            if remote_status["pg_basebackup_version"] < Version("15"):
                server.config.disabled = True
                server.config.msg_list.append(
                    "backup_compression_location = server requires "
                    "pg_basebackup 15 or greater")
            # "backup_location = server" requires PostgreSQL >= 15
            if server.postgres.server_version < 150000:
                server.config.disabled = True
                server.config.msg_list.append(
                    "backup_compression_location = server requires "
                    "PostgreSQL 15 or greater")

        # plain backup format is only allowed when compression is on the server
        if self.format == "plain" and self.location != "server":
            server.config.disabled = True
            server.config.msg_list.append(
                "backup_compression_format plain is not compatible with "
                "backup_compression_location %s" % self.location)
示例#9
0
class PostgreSQL(with_metaclass(ABCMeta, RemoteStatusMixin)):
    """
    This abstract class represents a generic interface to a PostgreSQL server.
    """

    CHECK_QUERY = 'SELECT 1'

    def __init__(self, config, conninfo):
        """
        Abstract base class constructor for PostgreSQL interface.

        :param barman.config.ServerConfig config: the server configuration
        :param str conninfo: Connection information (aka DSN)
        """
        super(PostgreSQL, self).__init__()
        assert conninfo
        self.config = config
        self.conninfo = conninfo
        self._conn = None
        self.allow_reconnect = True
        # Build a dictionary with connection info parameters
        # This is mainly used to speed up search in conninfo
        try:
            self.conn_parameters = self.parse_dsn(conninfo)
        except (ValueError, TypeError) as e:
            _logger.debug(e)
            raise ConninfoException('Cannot connect to postgres: "%s" '
                                    'is not a valid connection string' %
                                    conninfo)

    @staticmethod
    def parse_dsn(dsn):
        """
        Parse connection parameters from 'conninfo'

        :param str dsn: Connection information (aka DSN)
        :rtype: dict[str,str]
        """
        # TODO: this might be made more robust in the future
        return dict(x.split('=', 1) for x in dsn.split())

    @staticmethod
    def encode_dsn(parameters):
        """
        Build a connection string from a dictionary of connection
        parameters

        :param dict[str,str] parameters: Connection parameters
        :rtype: str
        """
        # TODO: this might be made more robust in the future
        return ' '.join(
            ["%s=%s" % (k, v) for k, v in sorted(parameters.items())])

    def get_connection_string(self, application_name=None):
        """
        Return the connection string, adding the application_name parameter
        if requested, unless already defined by user in the connection string

        :param str application_name: the application_name to add
        :return str: the connection string
        """
        conn_string = self.conninfo
        # check if the application name is already defined by user
        if application_name and 'application_name' not in self.conn_parameters:
            # Then add the it to the connection string
            conn_string += ' application_name=%s' % application_name
        return conn_string

    def connect(self):
        """
        Generic function for Postgres connection (using psycopg2)
        """

        if not self._check_connection():
            try:
                self._conn = psycopg2.connect(self.conninfo)
            # If psycopg2 fails to connect to the host,
            # raise the appropriate exception
            except psycopg2.DatabaseError as e:
                raise PostgresConnectionError(str(e).strip())
            # Register the connection to the list of live connections
            _live_connections.append(self)
        return self._conn

    def _check_connection(self):
        """
        Return false if the connection is broken

        :rtype: bool
        """
        # If the connection is not present return False
        if not self._conn:
            return False

        # Check if the connection works by running 'SELECT 1'
        cursor = None
        try:
            cursor = self._conn.cursor()
            cursor.execute(self.CHECK_QUERY)
        except psycopg2.DatabaseError:
            # Connection is broken, so we need to reconnect
            self.close()
            # Raise an error if reconnect is not allowed
            if not self.allow_reconnect:
                raise PostgresConnectionError(
                    "Connection lost, reconnection not allowed")
            return False
        finally:
            if cursor:
                cursor.close()

        return True

    def close(self):
        """
        Close the connection to PostgreSQL
        """
        if self._conn:
            # If the connection is still alive, rollback and close it
            if not self._conn.closed:
                if self._conn.status == STATUS_IN_TRANSACTION:
                    self._conn.rollback()
                self._conn.close()
            # Remove the connection from the live connections list
            self._conn = None
            _live_connections.remove(self)

    def _cursor(self, *args, **kwargs):
        """
        Return a cursor
        """
        conn = self.connect()
        return conn.cursor(*args, **kwargs)

    @property
    def server_version(self):
        """
        Version of PostgreSQL (returned by psycopg2)
        """
        conn = self.connect()
        return conn.server_version

    @property
    def server_txt_version(self):
        """
        Human readable version of PostgreSQL (calculated from server_version)

        :rtype: str|None
        """
        try:
            conn = self.connect()
            major = int(conn.server_version / 10000)
            minor = int(conn.server_version / 100 % 100)
            patch = int(conn.server_version % 100)
            if major < 10:
                return "%d.%d.%d" % (major, minor, patch)
            if minor != 0:
                _logger.warning("Unexpected non zero minor version %s in %s",
                                minor, conn.server_version)
            return "%d.%d" % (major, patch)
        except PostgresConnectionError as e:
            _logger.debug("Error retrieving PostgreSQL version: %s",
                          str(e).strip())
            return None

    @property
    def server_major_version(self):
        """
        PostgreSQL major version (calculated from server_txt_version)

        :rtype: str|None
        """
        result = self.server_txt_version
        if result is not None:
            return simplify_version(result)
        return None
示例#10
0
class WalArchiver(with_metaclass(ABCMeta, RemoteStatusMixin)):
    """
    Base class for WAL archiver objects
    """
    def __init__(self, backup_manager, name):
        """
        Base class init method.

        :param backup_manager: The backup manager
        :param name: The name of this archiver
        :return:
        """
        self.backup_manager = backup_manager
        self.server = backup_manager.server
        self.config = backup_manager.config
        self.name = name
        super(WalArchiver, self).__init__()

    def receive_wal(self, reset=False):
        """
        Manage reception of WAL files. Does nothing by default.
        Some archiver classes, like the StreamingWalArchiver, have a full
        implementation.

        :param bool reset: When set, resets the status of receive-wal
        :raise ArchiverFailure: when something goes wrong
        """

    def archive(self, fxlogdb, verbose=True):
        """
        Archive WAL files, discarding duplicates or those that are not valid.

        :param file fxlogdb: File object for xlogdb interactions
        :param boolean verbose: Flag for verbose output
        """
        compressor = self.backup_manager.compression_manager.get_compressor()
        stamp = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
        processed = 0
        header = "Processing xlog segments from %s for %s" % (self.name,
                                                              self.config.name)

        # Get the next batch of WAL files to be processed
        batch = self.get_next_batch()

        # Analyse the batch and properly log the information
        if batch.size:
            if batch.size > batch.run_size:
                # Batch mode enabled
                _logger.info(
                    "Found %s xlog segments from %s for %s."
                    " Archive a batch of %s segments in this run.", batch.size,
                    self.name, self.config.name, batch.run_size)
                header += " (batch size: %s)" % batch.run_size
            else:
                # Single run mode (traditional)
                _logger.info(
                    "Found %s xlog segments from %s for %s."
                    " Archive all segments in one run.", batch.size, self.name,
                    self.config.name)
        else:
            _logger.info("No xlog segments found from %s for %s.", self.name,
                         self.config.name)

        # Print the header (verbose mode)
        if verbose:
            output.info(header, log=False)

        # Loop through all available WAL files
        for wal_info in batch:
            # Print the header (non verbose mode)
            if not processed and not verbose:
                output.info(header, log=False)

            # Exit when archive batch size is reached
            if processed >= batch.run_size:
                _logger.debug(
                    "Batch size reached (%s) - "
                    "Exit %s process for %s", batch.batch_size, self.name,
                    self.config.name)
                break

            processed += 1

            # Report to the user the WAL file we are archiving
            output.info("\t%s", wal_info.name, log=False)
            _logger.info("Archiving segment %s of %s from %s: %s/%s",
                         processed, batch.run_size, self.name,
                         self.config.name, wal_info.name)
            # Archive the WAL file
            try:
                self.archive_wal(compressor, wal_info)
            except MatchingDuplicateWalFile:
                # We already have this file. Simply unlink the file.
                os.unlink(wal_info.orig_filename)
                continue
            except DuplicateWalFile:
                output.info(
                    "\tError: %s is already present in server %s. "
                    "File moved to errors directory.", wal_info.name,
                    self.config.name)
                error_dst = os.path.join(
                    self.config.errors_directory,
                    "%s.%s.duplicate" % (wal_info.name, stamp))
                # TODO: cover corner case of duplication (unlikely,
                # but theoretically possible)
                shutil.move(wal_info.orig_filename, error_dst)
                continue
            except AbortedRetryHookScript as e:
                _logger.warning("Archiving of %s/%s aborted by "
                                "pre_archive_retry_script."
                                "Reason: %s" %
                                (self.config.name, wal_info.name, e))
                return
            # Updates the information of the WAL archive with
            # the latest segments
            fxlogdb.write(wal_info.to_xlogdb_line())
            # flush and fsync for every line
            fxlogdb.flush()
            os.fsync(fxlogdb.fileno())

        if processed:
            _logger.debug("Archived %s out of %s xlog segments from %s for %s",
                          processed, batch.size, self.name, self.config.name)
        elif verbose:
            output.info("\tno file found", log=False)

        if batch.errors:
            output.info(
                "Some unknown objects have been found while "
                "processing xlog segments for %s. "
                "Objects moved to errors directory:",
                self.config.name,
                log=False)
            # Log unexpected files
            _logger.warning(
                "Archiver is about to move %s unexpected file(s) "
                "to errors directory for %s from %s", len(batch.errors),
                self.config.name, self.name)
            for error in batch.errors:
                basename = os.path.basename(error)
                output.info("\t%s", basename, log=False)
                # Print informative log line.
                _logger.warning("Moving unexpected file for %s from %s: %s",
                                self.config.name, self.name, basename)
                error_dst = os.path.join(self.config.errors_directory,
                                         "%s.%s.unknown" % (basename, stamp))
                try:
                    shutil.move(error, error_dst)
                except IOError as e:
                    if e.errno == errno.ENOENT:
                        _logger.warning('%s not found' % error)

    def archive_wal(self, compressor, wal_info):
        """
        Archive a WAL segment and update the wal_info object

        :param compressor: the compressor for the file (if any)
        :param WalFileInfo wal_info: the WAL file is being processed
        """

        src_file = wal_info.orig_filename
        src_dir = os.path.dirname(src_file)
        dst_file = wal_info.fullpath(self.server)
        tmp_file = dst_file + '.tmp'
        dst_dir = os.path.dirname(dst_file)

        error = None
        try:
            # Run the pre_archive_script if present.
            script = HookScriptRunner(self.backup_manager, 'archive_script',
                                      'pre')
            script.env_from_wal_info(wal_info, src_file)
            script.run()

            # Run the pre_archive_retry_script if present.
            retry_script = RetryHookScriptRunner(self.backup_manager,
                                                 'archive_retry_script', 'pre')
            retry_script.env_from_wal_info(wal_info, src_file)
            retry_script.run()

            # Check if destination already exists
            if os.path.exists(dst_file):
                src_uncompressed = src_file
                dst_uncompressed = dst_file
                dst_info = WalFileInfo.from_file(dst_file)
                try:
                    comp_manager = self.backup_manager.compression_manager
                    if dst_info.compression is not None:
                        dst_uncompressed = dst_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=dst_info.compression).decompress(
                                dst_file, dst_uncompressed)
                    if wal_info.compression:
                        src_uncompressed = src_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=wal_info.compression).decompress(
                                src_file, src_uncompressed)
                    # Directly compare files.
                    # When the files are identical
                    # raise a MatchingDuplicateWalFile exception,
                    # otherwise raise a DuplicateWalFile exception.
                    if filecmp.cmp(dst_uncompressed, src_uncompressed):
                        raise MatchingDuplicateWalFile(wal_info)
                    else:
                        raise DuplicateWalFile(wal_info)
                finally:
                    if src_uncompressed != src_file:
                        os.unlink(src_uncompressed)
                    if dst_uncompressed != dst_file:
                        os.unlink(dst_uncompressed)

            mkpath(dst_dir)
            # Compress the file only if not already compressed
            if compressor and not wal_info.compression:
                compressor.compress(src_file, tmp_file)
                shutil.copystat(src_file, tmp_file)
                os.rename(tmp_file, dst_file)
                os.unlink(src_file)
                # Update wal_info
                stat = os.stat(dst_file)
                wal_info.size = stat.st_size
                wal_info.compression = compressor.compression
            else:
                # Try to atomically rename the file. If successful,
                # the renaming will be an atomic operation
                # (this is a POSIX requirement).
                try:
                    os.rename(src_file, dst_file)
                except OSError:
                    # Source and destination are probably on different
                    # filesystems
                    shutil.copy2(src_file, tmp_file)
                    os.rename(tmp_file, dst_file)
                    os.unlink(src_file)
            # At this point the original file has been removed
            wal_info.orig_filename = None

            # Execute fsync() on the archived WAL file
            file_fd = os.open(dst_file, os.O_RDONLY)
            os.fsync(file_fd)
            os.close(file_fd)
            # Execute fsync() on the archived WAL containing directory
            fsync_dir(dst_dir)
            # Execute fsync() also on the incoming directory
            fsync_dir(src_dir)
        except Exception as e:
            # In case of failure save the exception for the post scripts
            error = e
            raise

        # Ensure the execution of the post_archive_retry_script and
        # the post_archive_script
        finally:
            # Run the post_archive_retry_script if present.
            try:
                retry_script = RetryHookScriptRunner(self,
                                                     'archive_retry_script',
                                                     'post')
                retry_script.env_from_wal_info(wal_info, dst_file, error)
                retry_script.run()
            except AbortedRetryHookScript as e:
                # Ignore the ABORT_STOP as it is a post-hook operation
                _logger.warning(
                    "Ignoring stop request after receiving "
                    "abort (exit code %d) from post-archive "
                    "retry hook script: %s", e.hook.exit_status, e.hook.script)

            # Run the post_archive_script if present.
            script = HookScriptRunner(self, 'archive_script', 'post', error)
            script.env_from_wal_info(wal_info, dst_file)
            script.run()

    @abstractmethod
    def get_next_batch(self):
        """
        Return a WalArchiverQueue containing the WAL files to be archived.

        :rtype: WalArchiverQueue
        """

    @abstractmethod
    def check(self, check_strategy):
        """
        Perform specific checks for the archiver - invoked
        by server.check_postgres

        :param CheckStrategy check_strategy: the strategy for the management
             of the results of the various checks
        """

    @abstractmethod
    def status(self):
        """
        Set additional status info - invoked by Server.status()
        """

    @staticmethod
    def summarise_error_files(error_files):
        """
        Summarise a error files list

        :param list[str] error_files: Error files list to summarise
        :return str: A summary, None if there are no error files
        """

        if not error_files:
            return None

        # The default value for this dictionary will be 0
        counters = collections.defaultdict(int)

        # Count the file types
        for name in error_files:
            if name.endswith(".error"):
                counters['not relevant'] += 1
            elif name.endswith(".duplicate"):
                counters['duplicates'] += 1
            elif name.endswith(".unknown"):
                counters['unknown'] += 1
            else:
                counters['unknown failure'] += 1

        # Return a summary list of the form: "item a: 2, item b: 5"
        return ', '.join("%s: %s" % entry for entry in counters.items())
示例#11
0
class RetentionPolicy(with_metaclass(ABCMeta, object)):
    """Abstract base class for retention policies"""
    def __init__(self, mode, unit, value, context, server):
        """Constructor of the retention policy base class"""
        self.mode = mode
        self.unit = unit
        self.value = int(value)
        self.context = context
        self.server = server
        self._first_backup = None
        self._first_wal = None

    def report(self, source=None, context=None):
        """Report obsolete/valid objects according to the retention policy"""
        if context is None:
            context = self.context
        # Overrides the list of available backups
        if source is None:
            source = self.server.get_available_backups(
                BackupInfo.STATUS_NOT_EMPTY)
        if context == 'BASE':
            return self._backup_report(source)
        elif context == 'WAL':
            return self._wal_report()
        else:
            raise ValueError('Invalid context %s', context)

    def backup_status(self, backup_id):
        """Report the status of a backup according to the retention policy"""
        source = self.server.get_available_backups(BackupInfo.STATUS_NOT_EMPTY)
        if self.context == 'BASE':
            return self._backup_report(source)[backup_id]
        else:
            return BackupInfo.NONE

    def first_backup(self):
        """Returns the first valid backup according to retention policies"""
        if not self._first_backup:
            self.report(context='BASE')
        return self._first_backup

    def first_wal(self):
        """Returns the first valid WAL according to retention policies"""
        if not self._first_wal:
            self.report(context='WAL')
        return self._first_wal

    @abstractmethod
    def __str__(self):
        """String representation"""
        pass

    @abstractmethod
    def debug(self):
        """Debug information"""
        pass

    @abstractmethod
    def _backup_report(self, source):
        """Report obsolete/valid backups according to the retention policy"""
        pass

    @abstractmethod
    def _wal_report(self):
        """Report obsolete/valid WALs according to the retention policy"""
        pass

    @classmethod
    def create(cls, server, option, value):
        """
        If given option and value from the configuration file match,
        creates the retention policy object for the given server
        """
        # using @abstractclassmethod from python3 would be better here
        raise NotImplementedError(
            'The class %s must override the create() class method',
            cls.__name__)

    def to_json(self):
        """
        Output representation of the obj for JSON serialization
        """
        return "%s %s %s" % (self.mode, self.value, self.unit)
示例#12
0
class PostgreSQL(with_metaclass(ABCMeta, RemoteStatusMixin)):
    """
    This abstract class represents a generic interface to a PostgreSQL server.
    """

    def __init__(self, config, conninfo):
        """
        Abstract base class constructor for PostgreSQL interface.

        :param barman.config.ServerConfig config: the server configuration
        :param str conninfo: Connection information (aka DSN)
        """
        super(PostgreSQL, self).__init__()
        assert conninfo
        self.config = config
        self.conninfo = conninfo
        self._conn = None
        # Build a dictionary with connection info parameters
        # This is mainly used to speed up search in conninfo
        try:
            self.conn_parameters = self.parse_dsn(conninfo)
        except (ValueError, TypeError) as e:
            _logger.debug(e)
            raise ConninfoException('Cannot connect to postgres: "%s" '
                                    'is not a valid connection string' %
                                    conninfo)

    @staticmethod
    def parse_dsn(dsn):
        """
        Parse connection parameters from 'conninfo'

        :param str dsn: Connection information (aka DSN)
        :rtype: dict[str,str]
        """
        # TODO: this might be made more robust in the future
        return dict(x.split('=', 1) for x in dsn.split(' '))

    def connect(self):
        """
        Generic function for Postgres connection (using psycopg2)
        """
        if not self._conn:
            try:
                self._conn = psycopg2.connect(self.conninfo)
            # If psycopg2 fails to connect to the host,
            # raise the appropriate exception
            except psycopg2.DatabaseError as e:
                raise PostgresConnectionError(
                    "Cannot connect to postgres: %s" % str(e).strip())
            # Register the connection to the live connections list
            _live_connections.append(self)
        return self._conn

    def close(self):
        """
        Close the connection to PostgreSQL
        """
        if self._conn:
            if self._conn.status == STATUS_IN_TRANSACTION:
                self._conn.rollback()
            self._conn.close()
            self._conn = None
            # Remove the connection from the live connections list
            _live_connections.remove(self)

    def _cursor(self, *args, **kwargs):
        """
        Return a cursor
        """
        conn = self.connect()
        return conn.cursor(*args, **kwargs)

    @property
    def server_version(self):
        """
        Version of PostgreSQL (returned by psycopg2)
        """
        conn = self.connect()
        return conn.server_version

    @property
    def server_txt_version(self):
        """
        Human readable version of PostgreSQL (calculated from server_version)

        :rtype: str|None
        """
        try:
            conn = self.connect()
            major = int(conn.server_version / 10000)
            minor = int(conn.server_version / 100 % 100)
            patch = int(conn.server_version % 100)
            return "%d.%d.%d" % (major, minor, patch)
        except PostgresConnectionError as e:
            _logger.debug("Error retrieving PostgreSQL version: %s",
                          str(e).strip())
            return None

    @property
    def server_major_version(self):
        """
        PostgreSQL major version (calculated from server_txt_version)

        :rtype: str|None
        """
        result = self.server_txt_version
        if result is not None:
            return simplify_version(result)
        return None