def show_pg_warnings(pg_dbs=[], bkped_dbs=[], logger=None):
        '''
        Target:
            - compare two lists with databases. This function will be used to
              show which PostgreSQL databases do not have a backup in a specified
              directory and which databases have a backup but are not stored in
              PostgreSQL.
        Parameters:
            - pg_dbs: list of PostgreSQL databases.
            - bkped_dbs: list of databases which have a backup.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        for dbname in pg_dbs:
            if dbname not in bkped_dbs:  # PostgreSQL without backup
                message = Messenger.NO_BACKUP_FOR_POSTGRESQL_DB.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')

        for dbname in bkped_dbs:
            # Backup of an nonexistent PostgreSQL database
            if dbname not in pg_dbs:
                message = Messenger.NO_POSTGRESQL_DB_FOR_BACKUP.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')
Beispiel #2
0
    def show_pg_warnings(pg_dbs=[], bkped_dbs=[], logger=None):
        '''
        Target:
            - compare two lists with databases. This function will be used to
              show which PostgreSQL databases do not have a backup in a specified
              directory and which databases have a backup but are not stored in
              PostgreSQL.
        Parameters:
            - pg_dbs: list of PostgreSQL databases.
            - bkped_dbs: list of databases which have a backup.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        for dbname in pg_dbs:
            if dbname not in bkped_dbs:  # PostgreSQL without backup
                message = Messenger.NO_BACKUP_FOR_POSTGRESQL_DB.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')

        for dbname in bkped_dbs:
            # Backup of an nonexistent PostgreSQL database
            if dbname not in pg_dbs:
                message = Messenger.NO_POSTGRESQL_DB_FOR_BACKUP.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')
    def get_filtered_dbnames(dbs_all, in_dbs=[], ex_dbs=[], in_regex='',
                             ex_regex='', in_priority=False, logger=None):
        '''
        Target:
            - filter a list of databases' names taking into account inclusion
              and exclusion parameters and their priority.
        Parameters:
            - dbs_all: list to filter.
            - in_dbs: list with the databases' names to include.
            - ex_dbs: list with the databases' names to exclude.
            - in_regex: regular expression which indicates the databases' names
              to include.
            - ex_regex: regular expression which indicates the databases' names
              to exclude.
            - in_priority: a flag which determinates if the inclusion
              parameters must predominate over the exclusion ones.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_all").
        '''
        if not logger:
            logger = Logger()

        bkp_list = []

        if in_priority:  # If inclusion is over exclusion
            # Apply exclusion first and then inclusion
            bkp_list = DbSelector.dbname_filter_exclude(dbs_all, ex_dbs,
                                                        ex_regex, logger)
            bkp_list = DbSelector.dbname_filter_include(bkp_list, in_dbs,
                                                        in_regex, logger)
        else:
            # Apply inclusion first and then exclusion
            bkp_list = DbSelector.dbname_filter_include(dbs_all, in_dbs,
                                                        in_regex, logger)
            bkp_list = DbSelector.dbname_filter_exclude(bkp_list, ex_dbs,
                                                        ex_regex, logger)

        logger.highlight('info', Messenger.SEARCHING_SELECTED_DBS, 'white')

        if bkp_list == []:
            logger.highlight('warning', Messenger.EMPTY_DBNAME_LIST, 'yellow',
                             effect='bold')
        else:
            for dbname in bkp_list:
                logger.info(Messenger.SELECTED_DB.format(dbname=dbname))
        return bkp_list
Beispiel #4
0
class RestorerCluster:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    cluster_backup = ''  # Absolute path of the backup file (of a cluster)

    def __init__(self, connecter=None, cluster_backup='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if cluster_backup and os.path.isfile(cluster_backup):
            self.cluster_backup = cluster_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        message = Messenger.CL_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, cluster_backup=self.cluster_backup)
        self.logger.debug(Messenger.CL_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_cluster_backup(self):
        '''
        Target:
            - restore a cluster's backup in PostgreSQL. The cluster must have
              been created before this process.
        '''
        # Regular expression which must match the backup's name
        regex = r'.*ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.cluster_backup):
            # Store the parts of the backup's name (servername, date, ext)
            parts = regex.search(self.cluster_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            Messenger.NO_BACKUP_FORMAT

        message = Messenger.BEGINNING_CL_RESTORER.format(
            cluster_backup=self.cluster_backup)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        # TODO: make dissappear every line about the operation shown in console
        if ext == 'gz':
            command = 'gunzip -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'zip':
            command = 'unzip -p {} | psql postgres -U {} -h {} -p {}'.format(
                self.cluster_backup, self.connecter.user,
                self.connecter.server, self.connecter.port)
        else:
            command = 'psql postgres -U {} -h {} -p {} < {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.cluster_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the cluster
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_CL_DONE.format(
                cluster_backup=self.cluster_backup, diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info', Messenger.RESTORER_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_cluster_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_CL_FAIL.format(
                cluster_backup=self.cluster_backup)
            self.logger.stop_exe(message)
Beispiel #5
0
class BackerCluster:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    # Flag which determinates whether the databases must be vacuumed before the
    # backup process
    vacuum = True
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, bkp_path='', group='',
                 bkp_type='dump', prefix='', vacuum=True, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        msg = Msg.CL_BACKER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, bkp_path=self.bkp_path, group=self.group,
            bkp_type=self.bkp_type, prefix=self.prefix, vacuum=self.vacuum)
        self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_all(self, bkps_dir):
        '''
        Target:
            - make a backup of a cluster.
        Parameters:
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)

        # Set backup's name
        file_name = self.prefix + 'ht_' + self.connecter.server + \
            str(self.connecter.port) + '_cluster_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dumpall -U {} -h {} -p {} | gzip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dumpall -U {} -h {} -p {} | bzip2 > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dumpall -U {} -h {} -p {} | zip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dumpall -U {} -h {} -p {} > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_all": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_cl(self):
        '''
        Target:
            - vacuum if necessary and make a backup of a cluster.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.CL_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        # Vaccum the databases before the backup process if necessary
        if self.vacuum:
            vacuumer = Vacuumer(connecter=self.connecter, logger=self.logger)
            dbs_all = vacuumer.connecter.get_pg_dbs_data(vacuumer.ex_templates,
                                                         vacuumer.db_owner)
            vacuumer.vacuum_dbs(dbs_all)

        self.logger.highlight('info', Msg.BEGINNING_CL_BACKER, 'white')

        start_time = DateTools.get_current_datetime()
        # Make the backup of the cluster
        success = self.backup_all(bkps_dir)
        end_time = DateTools.get_current_datetime()
        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)

        if success:
            msg = Msg.CL_BACKER_DONE.format(diff=diff)
            self.logger.highlight('info', msg, 'green', effect='bold')
        else:
            self.logger.highlight('warning', Msg.CL_BACKER_FAIL,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green',
                              effect='bold')
Beispiel #6
0
class Mailer:

    level = 1  # Verbosity level of the email
    from_info = {}  # Information about the sender's email account
    to_infos = []  # List with the destiny emails
    cc_infos = []  # List with the destiny emails (carbon copy)
    bcc_infos = []  # List with the destiny emails (blind carbon copy)
    server_tag = ''  # Alias of the sender's machine
    external_ip = ''  # External IP of the sender's machine
    op_type = ''  # Executed action
    group = None  # Affected group
    bkp_path = None  # Affected path of backups
    logger = None  # Logger to show and log some messages

    # Definition of constants

    OP_TYPES = {
        'u': 'Undefined method',
        'a': 'Alterer',
        'B': 'Backer',
        'd': 'Dropper',
        'r': 'Replicator',
        'R': 'Restorer',
        'T': 'Trimmer',
        't': 'Terminator',
        'v': 'Vacuumer',
    }

    OP_RESULTS = {
        0: ('<h2>{op_type}: <span style="color: green;">OK</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>The process has been executed '
            'succesfully.<br/><br/>You can see its log file at the following '
            'path:<br/><br/>{log_file}.'),
        1: ('<h2>{op_type}: <span style="color: orange;">WARNING</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some warnings during '
            'the process, but not critical errors. Anyway, please check it, '
            'because its behaviour is not bound to have been the expected '
            'one.<br/><br/>You can see its log file at the following path:'
            '<br/><br/>{log_file}.'),
        2: ('<h2>{op_type}: <span style="color: red;">ERROR</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some errors during '
            'the process, and they prevented some operations, because the '
            'execution was truncated. Please check immediately.<br/><br/>You '
            'can see its log file at the following path:<br/><br/>'
            '{log_file}.'),
        3: ('<h2>{op_type}: <span style="color: purple;">CRITICAL</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some critical errors '
            'during the process. The execution could not be carried out. '
            'Please check immediately.<br/><br/>You can see its log file at '
            'the following path:<br/><br/>{log_file}.'),
    }

    OP_RESULTS_NO_HTML = {
        0: ('{op_type}: OK at "{server_tag}"\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'The process has been executed succesfully.\n'
            'You can see its log file at the following path:\n'
            '{log_file}.\n'),
        1: ('{op_type}: WARNING at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'There were some warnings during the process, but not critical\n'
            'errors. Anyway, please check it, because its behaviour is not\n'
            'bound to have been the expected one. You can see its\n'
            'log file at the following path: {log_file}.\n'),
        2: ('{op_type}: ERROR at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'There were some errors during the process, and they prevented\n'
            'some operations, because the execution was truncated. Please\n'
            'check immediately. You can see its log file at the\n'
            'following path: {log_file}.\n'),
        3: ('{op_type}: CRITICAL at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'The process has been executed succesfully.\n'
            'You can see its log file at the following path:\n'
            '{log_file}.\n'),
    }

    def __init__(self, level=1, username='', email='', password='',
                 to_infos=[], cc_infos=[], bcc_infos=[], server_tag='',
                 external_ip='', op_type='', logger=None):

        if logger:
            self.logger = logger
        else:
            from logger.logger import Logger
            self.logger = Logger()

        if isinstance(level, int) and level in Default.MAIL_LEVELS:
            self.level = level
        elif Checker.str_is_int(level):
            self.level = Casting.str_to_int(level)
        else:
            self.level = Default.MAIL_LEVEL

        self.from_info['email'] = email
        if not Checker.str_is_valid_mail(email):
            message = Messenger.INVALID_FROM_MAIL.format(
                email=email)
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['name'] = username
        if username is '':
            message = Messenger.INVALID_FROM_USERNAME
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['pwd'] = password
        if password is '':
            message = Messenger.INVALID_FROM_PASSWORD
            self.logger.highlight('warning', message, 'yellow')

        to_infos = Casting.str_to_list(to_infos)
        self.to_infos = self.get_mail_infos(to_infos)

        cc_infos = Casting.str_to_list(cc_infos)
        self.cc_infos = self.get_mail_infos(cc_infos)

        bcc_infos = Casting.str_to_list(bcc_infos)
        self.bcc_infos = self.get_mail_infos(bcc_infos)

        if op_type in self.OP_TYPES.keys():
            self.op_type = op_type
        else:
            self.op_type = 'u'

        self.server_tag = server_tag
        self.external_ip = external_ip

    def add_group(self, group):
        '''
        Target:
            - add a group to the information sent by the email. It will be used
              in case of "Backer" being executed.
        Parameters:
            - group: the group's name.
        '''
        self.group = group

    def add_bkp_path(self, bkp_path):
        '''
        Target:
            - add a path to the information sent by the email. It will be used
              in case of "Trimmer" being executed.
        Parameters:
            - bkp_path: the path where the involved backups are stored.
        '''
        self.bkp_path = bkp_path

    def get_mail_infos(self, mail_infos):
        '''
        Target:
            - takes a list of strings with mail data and a "username <email>"
              format, splits it into parts and gives the same data stored and
              classified in a dictionary.
        Parameters:
            - mail_infos: the list of strings to be converted.
        Return:
            - a list of dictionaries with the username and the address of some
              mail accounts.
        '''
        temp_list = []

        for record in mail_infos:

            if Checker.str_is_valid_mail_info(record):

                mail_info = Casting.str_to_mail_info(record)

                if Checker.str_is_valid_mail(mail_info['email']):
                    temp_list.append(mail_info)
                else:
                    message = Messenger.INVALID_TO_MAIL.format(
                        email=mail_info['email'])
                    self.logger.highlight('warning', message, 'yellow')

            else:
                message = Messenger.INVALID_TO_MAIL_INFO.format(
                    mail_info=record)
                self.logger.highlight('warning', message, 'yellow')

        return temp_list

    def send_mail(self, detected_level):
        '''
        Target:
            - send an email to the specified email addresses.
        '''
        message = Messenger.BEGINNING_MAILER
        self.logger.highlight('info', message, 'white')

        # Get current date
        date = DateTools.get_date(fmt='%d-%m-%Y')
        time = DateTools.get_date(fmt='%H:%M:%S')
        zone = DateTools.get_date(fmt='%Z')

        # Get server name and IP addresses data
        server = IpAddress.get_hostname(self.logger)

        internal_ips = ''
        netifaces = IpAddress.get_netifaces_ips(self.logger)
        if netifaces:
            last_index = len(netifaces) - 1
        for index, netiface in enumerate(netifaces):
            internal_ips += '{} > {}'.format(netiface['netiface'],
                                             netiface['ip'])
            if index != last_index:
                internal_ips += ', '

        # Email full info template, for: John Doe <*****@*****.**>
        ADDR_TMPLT = '{} <{}>'

        # Sender and recipients email addresses (needed for sending the email)
        from_email_str = self.from_info['email']
        to_emails_list = [dict['email'] for dict in self.to_infos]
        cc_emails_list = [dict['email'] for dict in self.cc_infos]
        bcc_emails_list = [dict['email'] for dict in self.bcc_infos]
        all_emails_list = to_emails_list + cc_emails_list + bcc_emails_list

        # Sender and recipients full info (used in email message header)
        from_info_str = ADDR_TMPLT.format(self.from_info['name'],
                                          self.from_info['email'])
        to_infos_str = ', '.join(ADDR_TMPLT.format(
            dict['name'], dict['email']) for dict in self.to_infos)
        cc_infos_str = ', '.join(ADDR_TMPLT.format(
            dict['name'], dict['email']) for dict in self.cc_infos)

        # Specifying an alternative mail in case the receiver does not have a
        # mail server with HTML

        html = self.OP_RESULTS[detected_level].format(
            op_type=self.OP_TYPES[self.op_type], server_tag=self.server_tag,
            date=date, time=time, zone=zone, server=server,
            internal_ips=internal_ips, external_ip=self.external_ip,
            group=self.group, bkp_path=self.bkp_path,
            log_file=str(self.logger.log_file))

        text = self.OP_RESULTS_NO_HTML[detected_level].format(
            op_type=self.OP_TYPES[self.op_type], server_tag=self.server_tag,
            date=date, time=time, zone=zone, server=server,
            internal_ips=internal_ips, external_ip=self.external_ip,
            group=self.group, bkp_path=self.bkp_path,
            log_file=str(self.logger.log_file))

        # Specifying other email data (used in email message header)
        mail = MIMEMultipart('alternative')
        mail['From'] = from_info_str
        mail['To'] = to_infos_str
        mail['Cc'] = cc_infos_str
        mail['Subject'] = '[INFO] {op_type} results'.format(
            op_type=self.OP_TYPES[self.op_type].upper())

        # Record the MIME types of both parts - text/plain and text/html.
        part1 = MIMEText(text, 'plain')
        part2 = MIMEText(html, 'html')

        # Attach parts into message container. According to RFC 2046, the last
        # part of a multipart message, in this case the HTML message, is best
        # and preferred.
        mail.attach(part1)
        mail.attach(part2)

        msg_full = mail.as_string().encode()

        if all_emails_list:

            for email in all_emails_list:
                self.logger.info(Messenger.MAIL_DESTINY.format(email=email))

            # Sending the mail
            try:
                server = smtplib.SMTP('smtp.gmail.com:587')
                server.starttls()
                server.login(self.from_info['email'], self.from_info['pwd'])
                server.sendmail(from_email_str, all_emails_list, msg_full)
                server.quit()

            except smtplib.SMTPException as e:
                message = Messenger.SEND_MAIL_FAIL
                self.logger.highlight('info', message, 'yellow')
                self.logger.debug('Error en la función "send_mail": '
                                  '{}'.format(str(e)))

        else:
            message = Messenger.MAILER_HAS_NOTHING_TO_DO
            self.logger.highlight('info', message, 'yellow')

        message = Messenger.SEND_MAIL_DONE
        self.logger.highlight('info', message, 'green')
class Dropper:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    dbnames = []  # List of databases to be removed

    def __init__(self, connecter=None, dbnames=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(dbnames, list):
            self.dbnames = dbnames
        else:
            self.dbnames = Casting.str_to_list(dbnames)

        msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      dbnames=self.dbnames)
        self.logger.debug(Msg.DROPPER_VARS_INTRO)
        self.logger.debug(msg)

    def drop_pg_db(self, dbname, pg_superuser):
        '''
        Target:
            - remove a database in PostgreSQL.
        Parameters:
            - dbname: the PostgreSQL database's name which is going to be
              removed.
            - pg_superuser: a flag which indicates whether the current user is
              PostgreSQL superuser or not.
        '''
        delete = False

        try:
            self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (dbname, ))
            result = self.connecter.cursor.fetchone()

            if result:

                pg_pid = self.connecter.get_pid_str()
                formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                    pg_pid=pg_pid, target_db=dbname)

                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                # If there are not any connections to the target database...
                if not result:

                    # Users who are not superusers will only be able to drop
                    # the databases they own
                    if not pg_superuser:

                        self.connecter.cursor.execute(Queries.GET_PG_DB_OWNER,
                                                      (dbname, ))
                        db = self.connecter.cursor.fetchone()

                        if db['owner'] != self.connecter.user:

                            msg = Msg.DROP_DB_NOT_ALLOWED.format(
                                user=self.connecter.user, dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                        else:
                            delete = True

                    else:
                        delete = True

                    if delete:

                        # Get the database's "datallowconn" value
                        datallowconn = self.connecter.get_datallowconn(dbname)

                        # If datallowconn is allowed, change it temporarily
                        if datallowconn:
                            # Disallow connections to the database during the
                            # process
                            result = self.connecter.disallow_db_conn(dbname)
                            if not result:
                                msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                        fmt_query_drop_db = Queries.DROP_PG_DB.format(
                            dbname=dbname)

                        start_time = DateTools.get_current_datetime()
                        # Drop the database
                        self.connecter.cursor.execute(fmt_query_drop_db)
                        end_time = DateTools.get_current_datetime()
                        # Get and show the process' duration
                        diff = DateTools.get_diff_datetimes(
                            start_time, end_time)
                        msg = Msg.DROP_DB_DONE.format(dbname=dbname, diff=diff)
                        self.logger.highlight('info', msg, 'green')

                        # If datallowconn was allowed, leave it as it was
                        if datallowconn:
                            # Allow connections to the database at the end of
                            # the process
                            result = self.connecter.allow_db_conn(dbname)
                            if not result:
                                msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                else:
                    msg = Msg.ACTIVE_CONNS_ERROR.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            else:
                msg = Msg.DB_DOES_NOT_EXIST.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        except Exception as e:
            self.logger.debug('Error en la función "drop_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning',
                                  Msg.DROP_DB_FAIL.format(dbname=dbname),
                                  'yellow')

    def drop_pg_dbs(self, dbnames):
        '''
        Target:
            - remove a list of databases in PostgreSQL.
        '''
        self.logger.highlight('info', Msg.BEGINNING_DROPPER, 'white')
        # Check if the role of user connected to PostgreSQL is superuser
        pg_superuser = self.connecter.is_pg_superuser()

        if dbnames:

            for dbname in self.dbnames:

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                self.drop_pg_db(dbname, pg_superuser)

        else:
            self.logger.highlight('warning',
                                  Msg.DROPPER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info',
                              Msg.DROP_DBS_DONE,
                              'green',
                              effect='bold')
class Connecter:
    '''This class manages connections with database engines and operations
    involving them.
    So far, only PostgreSQL is supported.
    '''
    conn = None  # The PostgreSQL connection object
    cursor = None  # The cursor of the PostgreSQL connection
    server = None  # The target host of the connection
    user = None  # The PostgreSQL user who makes the connection
    port = None  # The target port of the connection
    database = None  # The target database of the connection
    logger = None  # A logger to show and log some messages

    # PostgreSQL version (from this one on some variables change their names)
    PG_PID_VERSION_THRESHOLD = 90200
    pg_pid_91 = 'procpid'  # Name for PostgreSQL PID variable till version 9.1
    pg_pid_92 = 'pid'  # Name for PostgreSQL PID variable since version 9.2

    def __init__(self, server, user, port, database=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.server = server

        self.user = user

        if isinstance(port, int):
            self.port = port
        elif Checker.str_is_int(port):
            self.port = Casting.str_to_int(port)
        else:
            self.logger.stop_exe(Msg.INVALID_PORT)

        if database is None:
            self.database = Default.CONNECTION_DATABASE
        elif database:
            self.database = database
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_DATABASE)

        try:
            self.conn = psycopg2.connect(host=self.server,
                                         user=self.user,
                                         port=self.port,
                                         database=self.database)
            self.conn.autocommit = True
            # TODO: ask for a password here if possible
            self.cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
        except Exception as e:
            self.logger.debug('Error en la función "pg_connect": {}.'.format(
                str(e)))
            self.logger.stop_exe(Msg.CONNECT_FAIL)

    def pg_disconnect(self):
        '''
        Target:
            - disconnect from PostgreSQL.
        '''
        try:
            self.cursor.close()
            self.conn.close()
        except Exception as e:
            self.logger.debug('Error en la función "pg_disconnect": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.DISCONNECT_FAIL)

    def get_pg_version(self):
        '''
        Target:
            - get the PostgreSQL version.
        Return:
            - a integer which gives the PostgreSQL version.
        '''
        return self.conn.server_version

    def get_pretty_pg_version(self):
        '''
        Target:
            - get the pretty PostgreSQL version.
        Return:
            - a string which gives the PostgreSQL version and more details.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_PRETTY_VERSION)
            pretty_pg_version = self.cursor.fetchone()

            return pretty_pg_version[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pretty_pg_version": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_VERSION_FAIL, 'yellow')
            return None

    def get_pid_str(self):
        '''
        Target:
            - get the name of the process id depending on the PostgreSQL
              version which is being used. Before the version 9.2 this variable
              was called "procpid", afterwards became "pid".
        Return:
            - a string which gives the name of the vaiable process id.
        '''
        pg_version = self.get_pg_version()  # Get PostgreSQL version

        if pg_version < self.PG_PID_VERSION_THRESHOLD:
            return self.pg_pid_91
        else:
            return self.pg_pid_92

    def is_pg_superuser(self):
        '''
        Target:
            - check if a user connected to PostgreSQL has a superuser role.
        Return:
            - a boolean which indicates whether a user is a PostgreSQL
              superuser or not.
        '''
        self.cursor.execute(Queries.IS_PG_SUPERUSER)
        row = self.cursor.fetchone()

        return row['usesuper']

    def get_pg_time_start(self):
        '''
        Target:
            - get the time when PostgreSQL was started.
        Return:
            - a date which indicates the time when PostgreSQL was started.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_START)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_start": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_START_FAIL,
                                  'yellow')
            return None

    def get_pg_time_up(self):
        '''
        Target:
            - get how long PostgreSQL has been working.
        Return:
            - a date which indicates how long PostgreSQL has been working.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_UP)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries becaustop_exese of
            # waiting this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_up": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_UP_FAIL, 'yellow')
            return None

    def get_pg_dbs_data(self, ex_templates=True, db_owner=''):
        '''
        Target:
            - do different queries to PostgreSQL depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
            - db_owner: the name of the user whose databases are going to be
              obtained.
        Return:
            - a list with the PostgreSQL databases and their names,
              datallowconn and owners.
        '''
        try:
            # Get all databases (no templates) of a specific owner
            if db_owner and ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS_BY_OWNER,
                                    (db_owner, ))
            # Get all databases (templates too) of a specific owner
            elif db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS_BY_OWNER, (db_owner, ))
            # Get all databases (no templates)
            elif not db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS)
            else:  # Get all databases (templates too)
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS)

            dbs = self.cursor.fetchall()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbs_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DBS_DATA
            self.logger.highlight('warning', msg, 'yellow')
            dbs = None

        return dbs

    def get_pg_db_data(self, dbname):
        '''
        Target:
            - show some info about a specified database.
        Parameters:
            - dbname: name of the database whose information is going to be
              gattered.
        '''

        try:
            self.cursor.execute(Queries.GET_PG_DB_DATA, (dbname, ))
            db = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_db_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DB_DATA.format(dbname=dbname)
            self.logger.highlight('warning', msg, 'yellow')
            db = None

        return db

    def get_pg_user_data(self, username):
        '''
        Target:
            - get some info about a specified user.
        Parameters:
            - username: name of the user whose information is going to be
              gattered.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_USER_DATA, (username, ))
            else:
                self.cursor.execute(Queries.GET_PG92_USER_DATA, (username, ))
            user = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_user_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_USER_DATA.format(username=username)
            self.logger.highlight('warning', msg, 'yellow')
            user = None

        return user

    def get_pg_conn_data(self, connpid):
        '''
        Target:
            - show some info about backends.
        Parameters:
            - connpid: PID of the backend whose information is going to be
              shown.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_CONN_DATA, (connpid, ))
            else:
                self.cursor.execute(Queries.GET_PG92_CONN_DATA, (connpid, ))
            conn = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_conn_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_CONN_DATA.format(connpid=connpid)
            self.logger.highlight('warning', msg, 'yellow')
            conn = None

        return conn

    def get_pg_dbnames(self, ex_templates=False):
        '''
        Target:
            - get PostgreSQL databases' names depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
        '''
        try:
            if ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBNAMES)
            else:
                self.cursor.execute(Queries.GET_PG_DBNAMES)
            result = self.cursor.fetchall()

            dbnames = []
            for record in result:
                dbnames.append(record['datname'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbnames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_DBNAMES_DATA, 'yellow')
            dbnames = None

        return dbnames

    def get_pg_usernames(self):
        '''
        Target:
            - get PostgreSQL users' names.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_USERNAMES)
            result = self.cursor.fetchall()

            usernames = []
            for record in result:
                usernames.append(record['usename'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_usernames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_USERNAMES_DATA,
                                  'yellow')
            usernames = None

        return usernames

    def get_pg_connpids(self):
        '''
        Target:
            - get PostgreSQL backends' PIDs.
        '''
        pid = self.get_pid_str()  # Get PID variable's name
        formatted_query_get_pg_connpids = Queries.GET_PG_CONNPIDS.format(
            pid=pid)

        try:
            self.cursor.execute(formatted_query_get_pg_connpids)
            result = self.cursor.fetchall()

            pids = []
            for record in result:
                pids.append(record['pid'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_connpids": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_CONNPIDS_DATA,
                                  'yellow')
            pids = None

        return pids

    def allow_db_conn(self, dbname):
        '''
        Target:
            - enable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to allow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.ALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "allow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def disallow_db_conn(self, dbname):
        '''
        Target:
            - disable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to disallow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.DISALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "disallow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def get_datallowconn(self, dbname):
        '''
        Target:
            - get "datallowconn" from a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be read.
        Return:
            - a boolean which indicates the value of "datallowconn".
        '''
        try:
            self.cursor.execute(Queries.GET_PG_DB_DATALLOWCONN, (dbname, ))
            result = self.cursor.fetchone()
            return result[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_datallowconn": '
                              '{}.'.format(str(e)))
            return None
Beispiel #9
0
class TrimmerCluster:

    bkp_path = ''  # The path where the backups are stored
    prefix = ''  # The prefix of the backups' names
    min_n_bkps = None  # Minimum number of a database's backups to keep
    exp_days = None  # Number of days which make a backup obsolete
    max_size = None  # Maximum size of a group of database's backups
    # Maximum size in Bytes of a group of database's backups
    max_size_bytes = None
    # Related to max_size, equivalence to turn the specified unit of measure in
    # the max_size variable into Bytes
    equivalence = 10 ** 6
    logger = None  # Logger to show and log some messages

    def __init__(self, bkp_path='', prefix='', min_n_bkps=1, exp_days=365,
                 max_size=5000, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if bkp_path and os.path.isdir(bkp_path):
            self.bkp_path = bkp_path
        else:
            self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)

        if prefix is None:
            self.prefix = Default.PREFIX
        else:
            self.prefix = prefix

        if min_n_bkps is None:
            self.min_n_bkps = Default.MIN_N_BKPS
        elif isinstance(min_n_bkps, int):
            self.min_n_bkps = min_n_bkps
        elif Checker.str_is_int(min_n_bkps):
            self.min_n_bkps = Casting.str_to_int(min_n_bkps)
        else:
            self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)

        if exp_days is None:
            self.exp_days = Default.EXP_DAYS
        elif isinstance(exp_days, int) and exp_days >= -1:
            self.exp_days = exp_days
        elif Checker.str_is_valid_exp_days(exp_days):
            self.exp_days = Casting.str_to_int(exp_days)
        else:
            self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)

        if max_size is None:
            self.max_size = Default.MAX_SIZE
        elif Checker.str_is_valid_max_size(max_size):
            self.max_size = max_size
        else:
            self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)

        # Split a string with size and unit of measure into a dictionary
        self.max_size = Casting.str_to_max_size(self.max_size)
        # Get the equivalence in Bytes of the specified unit of measure
        self.equivalence = Casting.get_equivalence(self.max_size['unit'])
        # Get the specified size in Bytes
        self.max_size_bytes = self.max_size['size'] * self.equivalence

        message = Messenger.CL_TRIMMER_VARS.format(
            bkp_path=self.bkp_path, prefix=self.prefix,
            min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
            max_size=self.max_size)
        self.logger.debug(Messenger.CL_TRIMMER_VARS_INTRO)
        self.logger.debug(message)

    def trim_cluster(self, ht_bkps_list):
        '''
        Target:
            - remove (if necessary) some cluster's backups, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - ht_bkps_list: list of backups of a cluster to analyse and trim.
        '''
        if self.exp_days == -1:  # No expiration date
            x_days_ago = None
        else:
            x_days_ago = time.time() - (60 * 60 * 24 * self.exp_days)

        # Store the total number of backups of the cluster
        num_bkps = len(ht_bkps_list)
        # Clone the list to avoid conflict errors when removing
        ht_bkps_lt = ht_bkps_list[:]

        unlinked = False

        self.logger.highlight('info', Messenger.BEGINNING_CL_TRIMMER, 'white')

        start_time = DateTools.get_current_datetime()

        for f in ht_bkps_list:

            # Break if number of backups do not exceed the minimum
            if num_bkps <= self.min_n_bkps:
                break

            file_info = os.stat(f)

            # Obsolete backup
            if x_days_ago and file_info.st_ctime < x_days_ago:

                self.logger.info(Messenger.DELETING_OBSOLETE_BACKUP % f)
                os.unlink(f)  # Remove backup's file
                unlinked = True
                # Update the number of backups of the database
                num_bkps -= 1
                ht_bkps_lt.remove(f)  # Update the list of cluster's backups

        end_time = DateTools.get_current_datetime()

        # Get total size of the backups in Bytes
        tsize = Dir.get_files_tsize(ht_bkps_lt)
        # Get total size of the backups in the selected unit of measure
        tsize_unit = ceil(tsize / self.equivalence)

        ## UNCOMMENT NEXT SECTION TO PROCEDURE WITH THE BACKUP'S DELETION IF
        ## THEIR TOTAL SIZE EXCEEDS THE SPECIFIED MAXIMUM SIZE

        #ht_bkps_list = ht_bkps_lt[:]

        #for f in ht_bkps_list:
            ## If there are less backups than the minimum required...
            #if num_bkps <= self.min_n_bkps:
                #break
            #if tsize <= self.max_size_bytes:
                #break
            #else:
                #file_info = os.stat(f)
                #self.logger.info('Tamaño de copias de seguridad en disco '
                                 #'mayor que {} {}: eliminando el archivo '
                                 #'{}...' % (self.max_size['size'],
                                            #self.max_size['unit'], f))
                #os.unlink(f)  # Remove backup's file
                #unlinked = True
                ## Update the number of backups of the cluster
                #num_bkps -= 1
                ## ht_bkps_lt.remove(f)  # Update the list of cluster's backups
                #tsize -= file_info.st_size  # Update total size after deletion

        if not unlinked:

            message = Messenger.NO_CL_BACKUP_DELETED
            self.logger.highlight('warning', message, 'yellow')

        if tsize > self.max_size_bytes:  # Total size exceeds the maximum

            message = Messenger.CL_BKPS_SIZE_EXCEEDED.format(
                tsize_unit=tsize_unit, size=self.max_size['size'],
                unit=self.max_size['unit'])
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)
        self.logger.highlight('info', Messenger.CL_TRIMMER_DONE.format(
            diff=diff), 'green')

    def trim_clusters(self, bkps_list):
        '''
        Target:
            - remove (if necessary) some backups of a cluster, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - bkps_list: list of backups found in the specified directory.
        '''
        # If not prefix specified, trim all the backups (not only the ones
        # without prefix)
        if self.prefix:
            regex = r'(' + self.prefix + ')ht_(.+_cluster)_' \
                    '(\d{8}_\d{6}_.+)\.(?:dump|bz2|gz|zip)$'
        else:
            regex = r'(.+)?ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.' \
                    '(?:dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        ht_bkps_list = []

        for file in bkps_list:

            # Extract file's name from the absolute path
            filename = os.path.basename(file)

            # If file matches regex (it means that file is a backup)
            if re.match(regex, filename):

                # Append backup to the group of cluster's backups
                ht_bkps_list.append(file)

            else:
                continue

        if ht_bkps_list:

            # Remove (if necessary) some backups of the cluster
            self.trim_cluster(ht_bkps_list)
            # Remove directories which could be empty after the trim
            Dir.remove_empty_dirs(self.bkp_path)

        else:
            self.logger.highlight('warning', Messenger.NO_BACKUP_IN_DIR,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TRIMMER_DONE, 'green',
                              effect='bold')
class Vacuumer:

    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    # Flag which determinates whether the templates must be included
    ex_templates = True
    # Use other PostgreSQL user during the backup process (only for superusers)
    db_owner = ''
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, in_dbs=[], in_regex='',
                 in_priority=False, ex_dbs=['postgres'], ex_regex='',
                 ex_templates=True, db_owner='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_REGEX)

        if isinstance(ex_templates, bool):
            self.ex_templates = ex_templates
        elif Checker.str_is_bool(ex_templates):
            self.ex_templates = Casting.str_to_bool(ex_templates)
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_TEMPLATES)

        if db_owner is None:
            self.db_owner = Default.DB_OWNER
        else:
            self.db_owner = db_owner

        message = Messenger.VACUUMER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, in_dbs=self.in_dbs,
            in_regex=self.in_regex, in_priority=self.in_priority,
            ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
            ex_templates=self.ex_templates, db_owner=self.db_owner)
        self.logger.debug(Messenger.VACUUMER_VARS_INTRO)
        self.logger.debug(message)

    def vacuum_db(self, dbname):
        '''
        Target:
            - vacuum a PostgreSQL database.
        Parameters:
            - dbname: name of the database which is going to be vacuumed.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True

        # Store the command to do
        command = 'vacuumdb {} -U {} -h {} -p {}'.format(
            dbname, self.connecter.user, self.connecter.server,
            self.connecter.port)

        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()
        except Exception as e:
            self.logger.debug('Error en la función "vacuum_db": {}.'.format(
                str(e)))
            success = False
        return success

    def vacuum_dbs(self, vacuum_list):
        '''
        Target:
            - vacuum a group of PostgreSQL databases.
        Parameters:
            - vacuum_list: names of the databases which are going to be
              vacuumed.
        '''
        if vacuum_list:
            self.logger.highlight('info', Messenger.BEGINNING_VACUUMER,
                                  'white')

        for db in vacuum_list:

            dbname = db['datname']

            message = Messenger.PROCESSING_DB.format(dbname=dbname)
            self.logger.highlight('info', message, 'cyan')

            # Let the user know whether the database connection is allowed
            if not db['datallowconn']:
                message = Messenger.FORBIDDEN_DB_CONNECTION.format(
                    dbname=dbname)
                self.logger.highlight('warning', message, 'yellow',
                                      effect='bold')
                success = False
            else:
                start_time = DateTools.get_current_datetime()
                # Vacuum the database
                success = self.vacuum_db(dbname)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

            if success:
                message = Messenger.DB_VACUUMER_DONE.format(dbname=dbname,
                                                            diff=diff)
                self.logger.highlight('info', message, 'green')

            else:
                message = Messenger.DB_VACUUMER_FAIL.format(dbname=dbname)
                self.logger.highlight('warning', message, 'yellow',
                                      effect='bold')

        self.logger.highlight('info', Messenger.VACUUMER_DONE, 'green',
                              effect='bold')
class Replicator:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    new_dbname = ''  # Name of the copy
    original_dbname = ''  # Name of the original database

    def __init__(self,
                 connecter=None,
                 new_dbname='',
                 original_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # First check whether the name of the copy already exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
        # Do not replicate if the name already exists
        result = self.connecter.cursor.fetchone()
        if result:
            msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
            self.logger.stop_exe(msg)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Msg.NO_NEW_DBNAME)

        # First check whether the name of the source exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
                                      (original_dbname, ))
        result = self.connecter.cursor.fetchone()
        if not result:
            msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
            self.logger.stop_exe(msg)

        if original_dbname:
            self.original_dbname = original_dbname
        else:
            self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)

        msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
                                         user=self.connecter.user,
                                         port=self.connecter.port,
                                         original_dbname=self.original_dbname,
                                         new_dbname=self.new_dbname)
        self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
        self.logger.debug(msg)

    def replicate_pg_db(self):
        '''
        Target:
            - clone a specified database in PostgreSQL.
        '''
        try:
            pg_pid = self.connecter.get_pid_str()
            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=self.original_dbname)
            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                msg = Msg.ACTIVE_CONNS_ERROR.format(
                    dbname=self.original_dbname)
                self.logger.stop_exe(msg)

            formatted_query_clone_pg_db = Queries.CLONE_PG_DB.format(
                dbname=self.new_dbname,
                original_dbname=self.original_dbname,
                user=self.connecter.user)

            msg = Msg.BEGINNING_REPLICATOR.format(
                original_dbname=self.original_dbname)
            self.logger.highlight('info', msg, 'white')

            # Get the database's "datallowconn" value
            datallowconn = self.connecter.get_datallowconn(
                self.original_dbname)

            # If datallowconn is allowed, change it temporarily
            if datallowconn:
                # Disallow connections to the database during the
                # process
                result = self.connecter.disallow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            # self.connecter.cursor.execute('commit')
            start_time = DateTools.get_current_datetime()
            # Replicate the database
            self.connecter.cursor.execute(formatted_query_clone_pg_db)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            # If datallowconn was allowed, leave it as it was
            if datallowconn:
                # Allow connections to the database at the end of
                # the process
                result = self.connecter.allow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            msg = Msg.REPLICATE_DB_DONE.format(
                new_dbname=self.new_dbname,
                original_dbname=self.original_dbname,
                diff=diff)
            self.logger.highlight('info', msg, 'green')
            self.logger.highlight('info',
                                  Msg.REPLICATOR_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "clone_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.REPLICATE_DB_FAIL)
Beispiel #12
0
class Scheduler:

    time = ''  # Time when the command is going to be executed in Cron
    command = ''  # Command which is going to be executed in Cron.
    logger = None  # Logger to show and log some messages

    def __init__(self, time='', command='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.time = time.strip()
        self.command = command.strip()

    def show_lines(self):
        '''
        Target:
            - show the lines of the program's CRON file.
        '''
        self.logger.highlight('info', Messenger.SHOWING_CRONTAB_FILE, 'white')
        print()

        cron = CronTab(user=True)

        if cron:
            for line in cron.lines:
                print(str(line))
        else:
            print('\033[1;40;93m' + Messenger.NO_CRONTAB_FILE + '\033[0m')

    def add_line(self):
        '''
        Target:
            - add a line to the program's CRON file.
        '''
        cron = CronTab(user=True)

        job = cron.new(command=self.command)

        if self.time in ['@yearly', '@annually']:
            job.setall('0 0 1 1 *')
        elif self.time == '@monthly':
            job.setall('0 0 1 * *')
        elif self.time == '@weekly':
            job.setall('0 0 * * 0')
        elif self.time in ['@daily', '@midnight']:
            job.setall('0 0 * * *')
        elif self.time == '@hourly':
            job.setall('0 * * * *')
        elif self.time == '@reboot':
            job.every_reboot()
        else:
            job.setall(self.time)

        self.logger.highlight('info', Messenger.SCHEDULER_ADDING, 'white')

        if not cron:
            self.logger.info(Messenger.CREATING_CRONTAB)

        try:
            cron.write()
            self.logger.highlight('info', Messenger.SCHEDULER_ADD_DONE,
                                  'green')
            #print(cron.render())

        except Exception as e:
            self.logger.debug('Error en la función "add_line": {}.'.format(
                str(e)))
            self.logger.stop_exe(Messenger.SCHEDULER_ADD_FAIL)

    def remove_line(self):
        '''
        Target:
            - remove a line from the program's CRON file.
        '''
        self.logger.highlight('info', Messenger.SCHEDULER_REMOVING, 'white')

        cron = CronTab(user=True)

        if not cron:
            self.logger.stop_exe(Messenger.NO_CRONTAB_FILE)

        deletion = False

        line = self.time + ' ' + self.command

        for job in cron:

            if str(job).strip() == line:

                try:
                    cron.remove(job)
                    message = Messenger.SCHEDULER_REMOVE_DONE.format(job=job)
                    self.logger.highlight('info', message, 'green')
                    deletion = True

                except Exception as e:
                    self.logger.debug('Error en la función "remove_line": '
                                      '{}.'.format(str(e)))
                    message = Messenger.SCHEDULER_REMOVE_FAIL.format(job=job)
                    self.logger.highlight('warning', message, 'yellow')

        if not deletion:
            self.logger.stop_exe(Messenger.NO_CRONTAB_JOB_TO_DEL)

        cron.write()
Beispiel #13
0
class Alterer:

    in_dbs = []  # List of databases to be included in the process
    old_role = ''  # Current owner of the database's tables
    new_role = ''  # New owner for the database and its tables
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, in_dbs=[], old_role='', new_role='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if old_role:
            self.old_role = old_role
        else:
            self.logger.stop_exe(Msg.NO_OLD_ROLE)

        if not new_role:
            self.logger.stop_exe(Msg.NO_NEW_ROLE)
        # First check whether the user exists in PostgreSQL or not
        self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
        # Do not alter database if the user does not exist
        result = self.connecter.cursor.fetchone()
        if result:
            self.new_role = new_role
        else:
            msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
            self.logger.stop_exe(msg)

        msg = Msg.ALTERER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, in_dbs=self.in_dbs,
            old_role=self.old_role, new_role=self.new_role)
        self.logger.debug(Msg.ALTERER_VARS_INTRO)
        self.logger.debug(msg)

    def alter_db_owner(self, db):
        '''
        Target:
            - change the owner of a databases and its tables.
        Parameters:
            - db: database which is going to be altered.
        Return:
            - a boolean which indicates the success of the process.
        '''
        msg = Msg.ALTERER_FEEDBACK.format(old_role=self.old_role,
                                          new_role=self.new_role)
        self.logger.info(msg)

        success = True
        dbname = db['datname']

        if db['owner'] != 'postgres':  # Do not allow switch an owner postgres

            if db['datallowconn'] == 1:  # Check if the db allows connections

                try:
                    # Change the owner of the database
                    self.connecter.cursor.execute(
                        Queries.CHANGE_PG_DB_OWNER.format(
                            dbname=dbname, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.CHANGE_PG_DB_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Start another connection to the target database to be able to
                # apply the next query
                own_connecter = Connecter(server=self.connecter.server,
                                          user=self.connecter.user,
                                          port=self.connecter.port,
                                          database=dbname, logger=self.logger)

                # Disallow connections to the database during the process
                result = self.connecter.disallow_db_conn(dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                try:
                    # Change the owner of the database's tables
                    own_connecter.cursor.execute(
                        Queries.REASSIGN_PG_DB_TBLS_OWNER.format(
                            old_role=self.old_role, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.REASSIGN_PG_DB_TBLS_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Allow connections to the database at the end of the process
                result = self.connecter.allow_db_conn(dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                # Close cursor and connection to the target database
                own_connecter.pg_disconnect()

            else:
                success = False
                msg = Msg.DB_DOES_NOT_ALLOW_CONN.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        else:
            success = False
            msg = Msg.DB_OWNED_BY_POSTGRES_NOT_ALLOWED
            self.logger.highlight('warning', msg, 'yellow')

        return success

    def alter_dbs_owner(self, alt_list):
        '''
        Target:
            - change the owner of a group of databases and their tables.
        Parameters:
            - alt_list: names of the databases which are going to be altered.
        '''
        self.logger.highlight('info', Msg.PROCESSING_ALTERER, 'white')

        if alt_list:

            for db in alt_list:

                dbname = db['datname']

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                start_time = DateTools.get_current_datetime()
                # Change the owner of the database
                success = self.alter_db_owner(db)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_ALTERER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_ALTERER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning', Msg.ALTERER_HAS_NOTHING_TO_DO,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.ALTERER_DONE, 'green', effect='bold')
Beispiel #14
0
class Alterer:

    in_dbs = []  # List of databases to be included in the process
    old_role = ''  # Current owner of the database's tables
    new_role = ''  # New owner for the database and its tables
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 in_dbs=[],
                 old_role='',
                 new_role='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if old_role:
            self.old_role = old_role
        else:
            self.logger.stop_exe(Msg.NO_OLD_ROLE)

        if not new_role:
            self.logger.stop_exe(Msg.NO_NEW_ROLE)
        # First check whether the user exists in PostgreSQL or not
        self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
        # Do not alter database if the user does not exist
        result = self.connecter.cursor.fetchone()
        if result:
            self.new_role = new_role
        else:
            msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
            self.logger.stop_exe(msg)

        msg = Msg.ALTERER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      in_dbs=self.in_dbs,
                                      old_role=self.old_role,
                                      new_role=self.new_role)
        self.logger.debug(Msg.ALTERER_VARS_INTRO)
        self.logger.debug(msg)

    def alter_db_owner(self, db):
        '''
        Target:
            - change the owner of a databases and its tables.
        Parameters:
            - db: database which is going to be altered.
        Return:
            - a boolean which indicates the success of the process.
        '''
        msg = Msg.ALTERER_FEEDBACK.format(old_role=self.old_role,
                                          new_role=self.new_role)
        self.logger.info(msg)

        success = True
        dbname = db['datname']

        if db['owner'] != 'postgres':  # Do not allow switch an owner postgres

            if db['datallowconn'] == 1:  # Check if the db allows connections

                try:
                    # Change the owner of the database
                    self.connecter.cursor.execute(
                        Queries.CHANGE_PG_DB_OWNER.format(
                            dbname=dbname, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.CHANGE_PG_DB_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Start another connection to the target database to be able to
                # apply the next query
                own_connecter = Connecter(server=self.connecter.server,
                                          user=self.connecter.user,
                                          port=self.connecter.port,
                                          database=dbname,
                                          logger=self.logger)

                # Disallow connections to the database during the process
                result = self.connecter.disallow_db_conn(dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                try:
                    # Change the owner of the database's tables
                    own_connecter.cursor.execute(
                        Queries.REASSIGN_PG_DB_TBLS_OWNER.format(
                            old_role=self.old_role, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.REASSIGN_PG_DB_TBLS_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Allow connections to the database at the end of the process
                result = self.connecter.allow_db_conn(dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                # Close cursor and connection to the target database
                own_connecter.pg_disconnect()

            else:
                success = False
                msg = Msg.DB_DOES_NOT_ALLOW_CONN.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        else:
            success = False
            msg = Msg.DB_OWNED_BY_POSTGRES_NOT_ALLOWED
            self.logger.highlight('warning', msg, 'yellow')

        return success

    def alter_dbs_owner(self, alt_list):
        '''
        Target:
            - change the owner of a group of databases and their tables.
        Parameters:
            - alt_list: names of the databases which are going to be altered.
        '''
        self.logger.highlight('info', Msg.PROCESSING_ALTERER, 'white')

        if alt_list:

            for db in alt_list:

                dbname = db['datname']

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                start_time = DateTools.get_current_datetime()
                # Change the owner of the database
                success = self.alter_db_owner(db)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_ALTERER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_ALTERER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.ALTERER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.ALTERER_DONE, 'green', effect='bold')
Beispiel #15
0
class Restorer:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    db_backup = ''  # Absolute path of the backup file (of a database)
    new_dbname = ''  # New name for the database restored in PostgreSQL

    def __init__(self, connecter=None, db_backup='', new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_db_backup(self):
        '''
        Target:
            - restore a database's backup in PostgreSQL.
        '''
        #replicator = Replicator(self.connecter, self.new_dbname,
                                #Default.RESTORING_TEMPLATE, self.logger)
        #result = self.connecter.allow_db_conn(Default.RESTORING_TEMPLATE)
        #if result:
            #replicator.replicate_pg_db()
            #self.connecter.disallow_db_conn(Default.RESTORING_TEMPLATE)
        #else:
            #self.logger.stop_exe(Messenger.ALLOW_DB_CONN_FAIL.format(
                #dbname=Default.RESTORING_TEMPLATE))

        # Regular expression which must match the backup's name
        regex = r'.*db_(.+)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.db_backup):
            # Store the parts of the backup's name (name, date, ext)
            parts = regex.search(self.db_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            self.logger.stop_exe(Messenger.NO_BACKUP_FORMAT)

        message = Messenger.BEGINNING_DB_RESTORER.format(
            db_backup=self.db_backup, new_dbname=self.new_dbname)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        if ext == 'gz':
            command = 'gunzip -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'zip':
            command = 'unzip -p {} | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        else:
            command = 'pg_restore -U {} -h {} -p {} -d {} {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.new_dbname, self.db_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the database
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_DB_DONE.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname,
                diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info', Messenger.RESTORER_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_db_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_DB_FAIL.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname)
            self.logger.stop_exe(message)
class Replicator:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    new_dbname = ''  # Name of the copy
    original_dbname = ''  # Name of the original database

    def __init__(self, connecter=None, new_dbname='', original_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # First check whether the name of the copy already exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
        # Do not replicate if the name already exists
        result = self.connecter.cursor.fetchone()
        if result:
            msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
            self.logger.stop_exe(msg)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Msg.NO_NEW_DBNAME)

        # First check whether the name of the source exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
                                      (original_dbname, ))
        result = self.connecter.cursor.fetchone()
        if not result:
            msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
            self.logger.stop_exe(msg)

        if original_dbname:
            self.original_dbname = original_dbname
        else:
            self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)

        msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
                                         user=self.connecter.user,
                                         port=self.connecter.port,
                                         original_dbname=self.original_dbname,
                                         new_dbname=self.new_dbname)
        self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
        self.logger.debug(msg)

    def replicate_pg_db(self):
        '''
        Target:
            - clone a specified database in PostgreSQL.
        '''
        try:
            pg_pid = self.connecter.get_pid_str()
            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=self.original_dbname)
            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                msg = Msg.ACTIVE_CONNS_ERROR.format(
                    dbname=self.original_dbname)
                self.logger.stop_exe(msg)

            formatted_query_clone_pg_db = Queries.CLONE_PG_DB.format(
                dbname=self.new_dbname, original_dbname=self.original_dbname,
                user=self.connecter.user)

            msg = Msg.BEGINNING_REPLICATOR.format(
                original_dbname=self.original_dbname)
            self.logger.highlight('info', msg, 'white')

            # Get the database's "datallowconn" value
            datallowconn = self.connecter.get_datallowconn(
                self.original_dbname)

            # If datallowconn is allowed, change it temporarily
            if datallowconn:
                # Disallow connections to the database during the
                # process
                result = self.connecter.disallow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            # self.connecter.cursor.execute('commit')
            start_time = DateTools.get_current_datetime()
            # Replicate the database
            self.connecter.cursor.execute(formatted_query_clone_pg_db)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            # If datallowconn was allowed, leave it as it was
            if datallowconn:
                # Allow connections to the database at the end of
                # the process
                result = self.connecter.allow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            msg = Msg.REPLICATE_DB_DONE.format(
                new_dbname=self.new_dbname,
                original_dbname=self.original_dbname, diff=diff)
            self.logger.highlight('info', msg, 'green')
            self.logger.highlight('info', Msg.REPLICATOR_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "clone_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.REPLICATE_DB_FAIL)
Beispiel #17
0
class Backer:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    # Flag which determinates whether the templates must be included
    ex_templates = True
    # Flag which determinates whether the included databases must be vacuumed
    # before the backup process
    vacuum = True
    # Use other PostgreSQL user during the backup process (only for superusers)
    db_owner = ''
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 bkp_path='',
                 group='',
                 bkp_type='dump',
                 prefix='',
                 in_dbs=[],
                 in_regex='',
                 in_priority=False,
                 ex_dbs=['postgres'],
                 ex_regex='',
                 ex_templates=True,
                 vacuum=True,
                 db_owner='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Msg.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Msg.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Msg.INVALID_EX_REGEX)

        if isinstance(ex_templates, bool):
            self.ex_templates = ex_templates
        elif Checker.str_is_bool(ex_templates):
            self.ex_templates = Casting.str_to_bool(ex_templates)
        else:
            self.logger.stop_exe(Msg.INVALID_EX_TEMPLATES)

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        if db_owner is None:
            self.db_owner = db_owner
        else:
            self.db_owner = Default.DB_OWNER

        msg = Msg.DB_BACKER_VARS.format(server=self.connecter.server,
                                        user=self.connecter.user,
                                        port=self.connecter.port,
                                        bkp_path=self.bkp_path,
                                        group=self.group,
                                        bkp_type=self.bkp_type,
                                        prefix=self.prefix,
                                        in_dbs=self.in_dbs,
                                        in_regex=self.in_regex,
                                        in_priority=self.in_priority,
                                        ex_dbs=self.ex_dbs,
                                        ex_regex=self.ex_regex,
                                        ex_templates=self.ex_templates,
                                        vacuum=self.vacuum,
                                        db_owner=self.db_owner)
        self.logger.debug(Msg.DB_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_db(self, dbname, bkps_dir):
        '''
        Target:
            - make a backup of a specified database.
        Parameters:
            - dbname: name of the database which is going to be backuped.
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)
        # Set backup's name
        file_name = self.prefix + 'db_' + dbname + '_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | gzip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | bzip2 > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | zip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)

        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_db": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_dbs(self, dbs_all):
        '''
        Target:
            - make a backup of some specified databases.
        Parameters:
            - dbs_all: names of the databases which are going to be backuped.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.DB_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        self.logger.highlight('info', Msg.PROCESSING_DB_BACKER, 'white')

        if dbs_all:
            for db in dbs_all:

                dbname = db['datname']
                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                # Let the user know whether the database connection is allowed
                if not db['datallowconn']:
                    msg = Msg.FORBIDDEN_DB_CONNECTION.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
                    success = False

                else:
                    # Vaccum the database before the backup process if
                    # necessary
                    if self.vacuum:
                        self.logger.info(
                            Msg.PRE_VACUUMING_DB.format(dbname=dbname))
                        vacuumer = Vacuumer(self.connecter, self.in_dbs,
                                            self.in_regex, self.in_priority,
                                            self.ex_dbs, self.ex_regex,
                                            self.ex_templates, self.db_owner,
                                            self.logger)

                        # Vacuum the database
                        success = vacuumer.vacuum_db(dbname)
                        if success:
                            msg = Msg.PRE_VACUUMING_DB_DONE.format(
                                dbname=dbname)
                            self.logger.info(msg)
                        else:
                            msg = Msg.PRE_VACUUMING_DB_FAIL.format(
                                dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                    self.logger.info(
                        Msg.BEGINNING_DB_BACKER.format(dbname=dbname))

                    start_time = DateTools.get_current_datetime()
                    # Make the backup of the database
                    success = self.backup_db(dbname, bkps_dir)
                    end_time = DateTools.get_current_datetime()
                    # Get and show the process' duration
                    diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_BACKER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_BACKER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.BACKER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green', effect='bold')
Beispiel #18
0
class Trimmer:

    bkp_path = ''  # The path where the backups are stored
    prefix = ''  # The prefix of the backups' names
    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    min_n_bkps = None  # Minimum number of a database's backups to keep
    exp_days = None  # Number of days which make a backup obsolete
    max_size = None  # Maximum size of a group of database's backups
    # Maximum size in Bytes of a group of database's backups
    max_size_bytes = None
    # Related to max_size, equivalence to turn the specified unit of measure in
    # the max_size variable into Bytes
    equivalence = 10 ** 6
    # Flag which determinates whether show alerts about PostgreSQL
    pg_warnings = True
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, bkp_path='', prefix='', in_dbs=[], in_regex='',
                 in_priority=False, ex_dbs=[], ex_regex='', min_n_bkps=1,
                 exp_days=365, max_size='10000MB', pg_warnings=True,
                 connecter=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if bkp_path and os.path.isdir(bkp_path):
            self.bkp_path = bkp_path
        else:
            self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)

        if prefix is None:
            self.prefix = Default.PREFIX
        else:
            self.prefix = prefix

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_REGEX)

        if min_n_bkps is None:
            self.min_n_bkps = Default.MIN_N_BKPS
        elif isinstance(min_n_bkps, int):
            self.min_n_bkps = min_n_bkps
        elif Checker.str_is_int(min_n_bkps):
            self.min_n_bkps = Casting.str_to_int(min_n_bkps)
        else:
            self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)

        if exp_days is None:
            self.exp_days = Default.EXP_DAYS
        elif isinstance(exp_days, int) and exp_days >= -1:
            self.exp_days = exp_days
        elif Checker.str_is_valid_exp_days(exp_days):
            self.exp_days = Casting.str_to_int(exp_days)
        else:
            self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)

        if max_size is None:
            self.max_size = Default.MAX_SIZE
        elif Checker.str_is_valid_max_size(max_size):
            self.max_size = max_size
        else:
            self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)

        # Split a string with size and unit of measure into a dictionary
        self.max_size = Casting.str_to_max_size(self.max_size)
        # Get the equivalence in Bytes of the specified unit of measure
        self.equivalence = Casting.get_equivalence(self.max_size['unit'])
        # Get the specified size in Bytes
        self.max_size_bytes = self.max_size['size'] * self.equivalence

        if isinstance(pg_warnings, bool):
            self.pg_warnings = pg_warnings
        elif Checker.str_is_bool(pg_warnings):
            self.pg_warnings = Casting.str_to_bool(pg_warnings)
        else:
            self.logger.stop_exe(Messenger.INVALID_PG_WARNINGS)

        if self.pg_warnings:
            if connecter:
                self.connecter = connecter
            else:
                self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        message = Messenger.DB_TRIMMER_VARS.format(
            bkp_path=self.bkp_path, prefix=self.prefix, in_dbs=self.in_dbs,
            in_regex=self.in_regex, in_priority=self.in_priority,
            ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
            min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
            max_size=self.max_size, pg_warnings=self.pg_warnings)
        self.logger.debug(Messenger.DB_TRIMMER_VARS_INTRO)
        self.logger.debug(message)

    def trim_db(self, dbname, db_bkps_list):
        '''
        Target:
            - remove (if necessary) some database's backups, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - dbname: name of the database whose backups are going to be
              trimmed.
            - db_bkps_list: list of backups of a database to analyse and trim.
        '''
        if self.exp_days == -1:  # No expiration date
            x_days_ago = None
        else:
            x_days_ago = time.time() - (60 * 60 * 24 * self.exp_days)

        # Store the total number of backups of the database
        num_bkps = len(db_bkps_list)
        # Clone the list to avoid conflict errors when removing
        db_bkps_lt = db_bkps_list[:]

        unlinked = False

        message = Messenger.BEGINNING_DB_TRIMMER.format(dbname=dbname)
        self.logger.highlight('info', message, 'white')

        start_time = DateTools.get_current_datetime()

        for f in db_bkps_list:

            # Break if number of backups do not exceed the minimum
            if num_bkps <= self.min_n_bkps:
                break

            file_info = os.stat(f)

            # Obsolete backup
            if x_days_ago and file_info.st_ctime < x_days_ago:

                self.logger.info(Messenger.DELETING_OBSOLETE_BACKUP % f)
                os.unlink(f)  # Remove backup's file
                unlinked = True
                # Update the number of backups of the database
                num_bkps -= 1
                db_bkps_lt.remove(f)  # Update the list of database's backups

        end_time = DateTools.get_current_datetime()

        # Get total size of the backups in Bytes
        tsize = Dir.get_files_tsize(db_bkps_lt)
        # Get total size of the backups in the selected unit of measure
        tsize_unit = ceil(tsize / self.equivalence)

        ## UNCOMMENT NEXT SECTION TO PROCEDURE WITH THE BACKUP'S DELETION IF
        ## THEIR TOTAL SIZE EXCEEDS THE SPECIFIED MAXIMUM SIZE

        #db_bkps_list = db_bkps_lt[:]

        #for f in db_bkps_list:
            ## If there are less backups than the minimum required...
            #if num_bkps <= self.min_n_bkps:
                #break
            #if tsize <= self.max_size_bytes:
                #break
            #else:
                #file_info = os.stat(f)
                #self.logger.info('Tamaño de copias de seguridad en disco '
                                 #'mayor que {} {}: eliminando el archivo '
                                 #'{}...' % (self.max_size['size'],
                                            #self.max_size['unit'], f))
                #os.unlink(f)  # Remove backup's file
                #unlinked = True
                ## Update the number of backups of the database
                #num_bkps -= 1
                ## Update the list of database's backups
                ## db_bkps_lt.remove(f)
                #tsize -= file_info.st_size  # Update total size after deletion

        if not unlinked:

            message = Messenger.NO_DB_BACKUP_DELETED.format(dbname=dbname)
            self.logger.highlight('warning', message, 'yellow')

        if tsize > self.max_size_bytes:  # Total size exceeds the maximum

            message = Messenger.DB_BKPS_SIZE_EXCEEDED.format(
                dbname=dbname, tsize_unit=tsize_unit,
                size=self.max_size['size'], unit=self.max_size['unit'])
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)
        self.logger.highlight('info', Messenger.DB_TRIMMER_DONE.format(
            dbname=dbname, diff=diff), 'green')

    def trim_dbs(self, bkps_list, dbs_to_clean):
        '''
        Target:
            - remove (if necessary) some backups of a group of databases,
              taking into account some parameters in the following order:
              minimum number of backups to keep > obsolete backups.
        Parameters:
            - bkps_list: list of backups found in the specified directory.
            - dbs_to_clean: name of the database whose backups are going to be
              trimmed.
        '''
        # If not prefix specified, trim all the backups (not only the ones
        # without prefix)
        if self.prefix:
            regex = r'(' + self.prefix + ')db_(.+)_(\d{8}_\d{6}_.+)\.' \
                    '(?:dump|bz2|gz|zip)$'
        else:
            regex = r'(.+)?db_(.+)_(\d{8}_\d{6}_.+)\.(?:dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        for dbname in dbs_to_clean:

            db_bkps_list = []

            for file in bkps_list:

                # Extract file's name from the absolute path
                filename = os.path.basename(file)

                # If file matches regex (it means that file is a backup)
                if re.match(regex, filename):

                    # Extract parts of the name ([prefix], dbname, date)
                    parts = regex.search(filename).groups()
                    # Store the database's name whose this backup belongs to
                    fdbname = parts[1]

                    # If that backup belongs to a database which is has to be
                    # trimmed
                    if dbname == fdbname:
                        # Append backup to the group of database's backups
                        db_bkps_list.append(file)
                    else:
                        continue
                else:
                    continue

            # Remove (if necessary) some backups of the specified database
            self.trim_db(dbname, db_bkps_list)

        # Remove directories which could be empty after the trim
        Dir.remove_empty_dirs(self.bkp_path)

        self.logger.highlight('info', Messenger.TRIMMER_DONE, 'green',
                              effect='bold')
Beispiel #19
0
class BackerCluster:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    # Flag which determinates whether the databases must be vacuumed before the
    # backup process
    vacuum = True
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 bkp_path='',
                 group='',
                 bkp_type='dump',
                 prefix='',
                 vacuum=True,
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        msg = Msg.CL_BACKER_VARS.format(server=self.connecter.server,
                                        user=self.connecter.user,
                                        port=self.connecter.port,
                                        bkp_path=self.bkp_path,
                                        group=self.group,
                                        bkp_type=self.bkp_type,
                                        prefix=self.prefix,
                                        vacuum=self.vacuum)
        self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_all(self, bkps_dir):
        '''
        Target:
            - make a backup of a cluster.
        Parameters:
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)

        # Set backup's name
        file_name = self.prefix + 'ht_' + self.connecter.server + \
            str(self.connecter.port) + '_cluster_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dumpall -U {} -h {} -p {} | gzip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dumpall -U {} -h {} -p {} | bzip2 > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dumpall -U {} -h {} -p {} | zip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dumpall -U {} -h {} -p {} > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_all": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_cl(self):
        '''
        Target:
            - vacuum if necessary and make a backup of a cluster.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.CL_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        # Vaccum the databases before the backup process if necessary
        if self.vacuum:
            vacuumer = Vacuumer(connecter=self.connecter, logger=self.logger)
            dbs_all = vacuumer.connecter.get_pg_dbs_data(
                vacuumer.ex_templates, vacuumer.db_owner)
            vacuumer.vacuum_dbs(dbs_all)

        self.logger.highlight('info', Msg.BEGINNING_CL_BACKER, 'white')

        start_time = DateTools.get_current_datetime()
        # Make the backup of the cluster
        success = self.backup_all(bkps_dir)
        end_time = DateTools.get_current_datetime()
        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)

        if success:
            msg = Msg.CL_BACKER_DONE.format(diff=diff)
            self.logger.highlight('info', msg, 'green', effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.CL_BACKER_FAIL,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green', effect='bold')
class Connecter:
    '''This class manages connections with database engines and operations
    involving them.
    So far, only PostgreSQL is supported.
    '''
    conn = None  # The PostgreSQL connection object
    cursor = None  # The cursor of the PostgreSQL connection
    server = None  # The target host of the connection
    user = None  # The PostgreSQL user who makes the connection
    port = None  # The target port of the connection
    database = None  # The target database of the connection
    logger = None  # A logger to show and log some messages

    # PostgreSQL version (from this one on some variables change their names)
    PG_PID_VERSION_THRESHOLD = 90200
    pg_pid_91 = 'procpid'  # Name for PostgreSQL PID variable till version 9.1
    pg_pid_92 = 'pid'  # Name for PostgreSQL PID variable since version 9.2

    def __init__(self, server, user, port, database=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.server = server

        self.user = user

        if isinstance(port, int):
            self.port = port
        elif Checker.str_is_int(port):
            self.port = Casting.str_to_int(port)
        else:
            self.logger.stop_exe(Msg.INVALID_PORT)

        if database is None:
            self.database = Default.CONNECTION_DATABASE
        elif database:
            self.database = database
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_DATABASE)

        try:
            self.conn = psycopg2.connect(host=self.server, user=self.user,
                                         port=self.port,
                                         database=self.database)
            self.conn.autocommit = True
            # TODO: ask for a password here if possible
            self.cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
        except Exception as e:
            self.logger.debug('Error en la función "pg_connect": {}.'.format(
                str(e)))
            self.logger.stop_exe(Msg.CONNECT_FAIL)

    def pg_disconnect(self):
        '''
        Target:
            - disconnect from PostgreSQL.
        '''
        try:
            self.cursor.close()
            self.conn.close()
        except Exception as e:
            self.logger.debug('Error en la función "pg_disconnect": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.DISCONNECT_FAIL)

    def get_pg_version(self):
        '''
        Target:
            - get the PostgreSQL version.
        Return:
            - a integer which gives the PostgreSQL version.
        '''
        return self.conn.server_version

    def get_pretty_pg_version(self):
        '''
        Target:
            - get the pretty PostgreSQL version.
        Return:
            - a string which gives the PostgreSQL version and more details.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_PRETTY_VERSION)
            pretty_pg_version = self.cursor.fetchone()

            return pretty_pg_version[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pretty_pg_version": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_VERSION_FAIL, 'yellow')
            return None

    def get_pid_str(self):
        '''
        Target:
            - get the name of the process id depending on the PostgreSQL
              version which is being used. Before the version 9.2 this variable
              was called "procpid", afterwards became "pid".
        Return:
            - a string which gives the name of the vaiable process id.
        '''
        pg_version = self.get_pg_version()  # Get PostgreSQL version

        if pg_version < self.PG_PID_VERSION_THRESHOLD:
            return self.pg_pid_91
        else:
            return self.pg_pid_92

    def is_pg_superuser(self):
        '''
        Target:
            - check if a user connected to PostgreSQL has a superuser role.
        Return:
            - a boolean which indicates whether a user is a PostgreSQL
              superuser or not.
        '''
        self.cursor.execute(Queries.IS_PG_SUPERUSER)
        row = self.cursor.fetchone()

        return row['usesuper']

    def get_pg_time_start(self):
        '''
        Target:
            - get the time when PostgreSQL was started.
        Return:
            - a date which indicates the time when PostgreSQL was started.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_START)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_start": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_START_FAIL,
                                  'yellow')
            return None

    def get_pg_time_up(self):
        '''
        Target:
            - get how long PostgreSQL has been working.
        Return:
            - a date which indicates how long PostgreSQL has been working.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_UP)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries becaustop_exese of
            # waiting this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_up": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_UP_FAIL, 'yellow')
            return None

    def get_pg_dbs_data(self, ex_templates=True, db_owner=''):
        '''
        Target:
            - do different queries to PostgreSQL depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
            - db_owner: the name of the user whose databases are going to be
              obtained.
        Return:
            - a list with the PostgreSQL databases and their names,
              datallowconn and owners.
        '''
        try:
            # Get all databases (no templates) of a specific owner
            if db_owner and ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS_BY_OWNER,
                                    (db_owner, ))
            # Get all databases (templates too) of a specific owner
            elif db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS_BY_OWNER, (db_owner, ))
            # Get all databases (no templates)
            elif not db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS)
            else:  # Get all databases (templates too)
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS)

            dbs = self.cursor.fetchall()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbs_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DBS_DATA
            self.logger.highlight('warning', msg, 'yellow')
            dbs = None

        return dbs

    def get_pg_db_data(self, dbname):
        '''
        Target:
            - show some info about a specified database.
        Parameters:
            - dbname: name of the database whose information is going to be
              gattered.
        '''

        try:
            self.cursor.execute(Queries.GET_PG_DB_DATA, (dbname, ))
            db = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_db_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DB_DATA.format(dbname=dbname)
            self.logger.highlight('warning', msg, 'yellow')
            db = None

        return db

    def get_pg_user_data(self, username):
        '''
        Target:
            - get some info about a specified user.
        Parameters:
            - username: name of the user whose information is going to be
              gattered.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_USER_DATA, (username, ))
            else:
                self.cursor.execute(Queries.GET_PG92_USER_DATA, (username, ))
            user = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_user_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_USER_DATA.format(username=username)
            self.logger.highlight('warning', msg, 'yellow')
            user = None

        return user

    def get_pg_conn_data(self, connpid):
        '''
        Target:
            - show some info about backends.
        Parameters:
            - connpid: PID of the backend whose information is going to be
              shown.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_CONN_DATA, (connpid, ))
            else:
                self.cursor.execute(Queries.GET_PG92_CONN_DATA, (connpid, ))
            conn = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_conn_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_CONN_DATA.format(connpid=connpid)
            self.logger.highlight('warning', msg, 'yellow')
            conn = None

        return conn

    def get_pg_dbnames(self, ex_templates=False):
        '''
        Target:
            - get PostgreSQL databases' names depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
        '''
        try:
            if ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBNAMES)
            else:
                self.cursor.execute(Queries.GET_PG_DBNAMES)
            result = self.cursor.fetchall()

            dbnames = []
            for record in result:
                dbnames.append(record['datname'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbnames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_DBNAMES_DATA, 'yellow')
            dbnames = None

        return dbnames

    def get_pg_usernames(self):
        '''
        Target:
            - get PostgreSQL users' names.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_USERNAMES)
            result = self.cursor.fetchall()

            usernames = []
            for record in result:
                usernames.append(record['usename'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_usernames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_USERNAMES_DATA,
                                  'yellow')
            usernames = None

        return usernames

    def get_pg_connpids(self):
        '''
        Target:
            - get PostgreSQL backends' PIDs.
        '''
        pid = self.get_pid_str()  # Get PID variable's name
        formatted_query_get_pg_connpids = Queries.GET_PG_CONNPIDS.format(
            pid=pid)

        try:
            self.cursor.execute(formatted_query_get_pg_connpids)
            result = self.cursor.fetchall()

            pids = []
            for record in result:
                pids.append(record['pid'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_connpids": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_CONNPIDS_DATA,
                                  'yellow')
            pids = None

        return pids

    def allow_db_conn(self, dbname):
        '''
        Target:
            - enable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to allow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.ALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "allow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def disallow_db_conn(self, dbname):
        '''
        Target:
            - disable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to disallow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.DISALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "disallow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def get_datallowconn(self, dbname):
        '''
        Target:
            - get "datallowconn" from a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be read.
        Return:
            - a boolean which indicates the value of "datallowconn".
        '''
        try:
            self.cursor.execute(Queries.GET_PG_DB_DATALLOWCONN, (dbname, ))
            result = self.cursor.fetchone()
            return result[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_datallowconn": '
                              '{}.'.format(str(e)))
            return None
Beispiel #21
0
class Restorer:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    db_backup = ''  # Absolute path of the backup file (of a database)
    new_dbname = ''  # New name for the database restored in PostgreSQL

    def __init__(self,
                 connecter=None,
                 db_backup='',
                 new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_db_backup(self):
        '''
        Target:
            - restore a database's backup in PostgreSQL.
        '''
        replicator = Replicator(self.connecter, self.new_dbname,
                                Default.RESTORING_TEMPLATE, self.logger)
        result = self.connecter.allow_db_conn(Default.RESTORING_TEMPLATE)
        if result:
            replicator.replicate_pg_db()
            self.connecter.disallow_db_conn(Default.RESTORING_TEMPLATE)
        else:
            self.logger.stop_exe(
                Messenger.ALLOW_DB_CONN_FAIL.format(
                    dbname=Default.RESTORING_TEMPLATE))

        # Regular expression which must match the backup's name
        regex = r'.*db_(.+)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.db_backup):
            # Store the parts of the backup's name (name, date, ext)
            parts = regex.search(self.db_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            self.logger.stop_exe(Messenger.NO_BACKUP_FORMAT)

        message = Messenger.BEGINNING_DB_RESTORER.format(
            db_backup=self.db_backup, new_dbname=self.new_dbname)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        if ext == 'gz':
            command = 'gunzip -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'zip':
            command = 'unzip -p {} | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        else:
            command = 'pg_restore -U {} -h {} -p {} -d {} {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.new_dbname, self.db_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the database
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_DB_DONE.format(
                db_backup=self.db_backup,
                new_dbname=self.new_dbname,
                diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info',
                                  Messenger.RESTORER_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_db_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_DB_FAIL.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname)
            self.logger.stop_exe(message)
class Terminator:

    target_all = None  # Flag which determinates if terminate any connection
    target_user = None  # Terminate any connection of an specific user
    target_dbs = []  # Terminate any connection to a list of databases
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter,
                 target_all=False,
                 target_user='',
                 target_dbs=[],
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if target_all is None:
            self.target_all = target_all
        elif isinstance(target_all, bool):
            self.target_all = target_all
        elif Checker.str_is_bool(target_all):
            self.target_all = Casting.str_to_bool(target_all)
        else:
            self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)

        self.target_user = target_user

        if target_dbs is None:
            self.target_dbs = []
        elif isinstance(target_dbs, list):
            self.target_dbs = target_dbs
        else:
            self.target_dbs = Casting.str_to_list(target_dbs)

        message = Messenger.TERMINATOR_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            target_all=self.target_all,
            target_user=target_user,
            target_dbs=self.target_dbs)
        self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
        self.logger.debug(message)

    def terminate_backend_user(self):
        '''
        Target:
            - terminate every connection of a specific user to PostgreSQL (as
              long as the target user is the one who is running the program).
        '''
        message = Messenger.BEGINNING_TERMINATE_USER_CONN.format(
            target_user=self.target_user)
        self.logger.highlight('info', message, 'white')

        try:
            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            sql = Queries.GET_CURRENT_PG_USER
            self.connecter.cursor.execute(sql)
            current_pg_user = self.connecter.cursor.fetchone()[0]

            if self.target_user == current_pg_user:
                message = Messenger.TARGET_USER_IS_CURRENT_USER.format(
                    target_user=self.target_user)
                self.logger.highlight('warning', message, 'yellow')

            else:
                formatted_sql = Queries.BACKEND_PG_USER_EXISTS.format(
                    pg_pid=pg_pid, target_user=self.target_user)
                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                if result:
                    formatted_sql = Queries.TERMINATE_BACKEND_PG_USER.format(
                        pg_pid=pg_pid, target_user=self.target_user)
                    self.connecter.cursor.execute(formatted_sql)
                else:
                    message = Messenger.NO_USER_CONNS.format(
                        target_user=self.target_user)
                    self.logger.info(message)

            message = Messenger.TERMINATE_USER_CONN_DONE.format(
                target_user=self.target_user)
            self.logger.highlight('info', message, 'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_user": '******'{}.'.format(str(e)))
            message = Messenger.TERMINATE_USER_CONN_FAIL.format(
                target_user=self.target_user)
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_db(self, target_db):
        '''
        Target:
            - terminate every connection to a PostgreSQL database (except the
              current one, if it is connected to the target database).
        '''
        try:
            # The variable "target_db" sometimes could be a string or a list
            # of list, so it is necessary to check it first
            if not isinstance(target_db, str):
                target_db = target_db['datname']

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=target_db)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:

                formatted_sql = Queries.TERMINATE_BACKEND_PG_DB.format(
                    pg_pid=pg_pid, target_db=target_db)

                self.connecter.cursor.execute(formatted_sql)

                message = Messenger.TERMINATE_DB_CONN_DONE.format(
                    target_dbname=target_db)
                self.logger.info(message)

            else:
                message = Messenger.NO_DB_CONNS.format(target_db=target_db)
                self.logger.info(message)

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_db": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_DB_CONN_FAIL.format(
                target_dbname=target_db)
            self.logger.highlight('warning', message, 'yellow')

    def terminate_backend_dbs(self, ter_list):
        '''
        Target:
            - terminate every connection to some PostgreSQL databases (except
              the current one, if it is connected to one of the target
              databases).
        Parameters:
            - ter_list: the list of databases whose connections are going to be
              terminated.
        '''
        message = Messenger.BEGINNING_TERMINATE_DBS_CONN
        self.logger.highlight('info', message, 'white')

        if ter_list:
            for target_db in ter_list:
                self.terminate_backend_db(target_db)
        else:
            self.logger.highlight('warning',
                                  Messenger.TERMINATOR_HAS_NOTHING_TO_DO,
                                  'yellow')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_all(self):
        '''
        Target:
            - remove every connection to PostgreSQL (except the current one).
        '''
        try:
            message = Messenger.BEGINNING_TERMINATE_ALL_CONN
            self.logger.highlight('info', message, 'white')

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_ALL_EXISTS.format(pg_pid=pg_pid)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                formatted_sql = Queries.TERMINATE_BACKEND_PG_ALL.format(
                    pg_pid=pg_pid)
                self.connecter.cursor.execute(formatted_sql)
            else:
                self.logger.info(Messenger.NO_CONNS)

            self.logger.highlight('info', Messenger.TERMINATE_ALL_CONN_DONE,
                                  'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_all": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_ALL_CONN_FAIL
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')
Beispiel #23
0
class RestorerCluster:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    cluster_backup = ''  # Absolute path of the backup file (of a cluster)

    def __init__(self, connecter=None, cluster_backup='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if cluster_backup and os.path.isfile(cluster_backup):
            self.cluster_backup = cluster_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        message = Messenger.CL_RESTORER_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            cluster_backup=self.cluster_backup)
        self.logger.debug(Messenger.CL_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_cluster_backup(self):
        '''
        Target:
            - restore a cluster's backup in PostgreSQL. The cluster must have
              been created before this process.
        '''
        # Regular expression which must match the backup's name
        regex = r'.*ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.cluster_backup):
            # Store the parts of the backup's name (servername, date, ext)
            parts = regex.search(self.cluster_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            Messenger.NO_BACKUP_FORMAT

        message = Messenger.BEGINNING_CL_RESTORER.format(
            cluster_backup=self.cluster_backup)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        # TODO: make dissappear every line about the operation shown in console
        if ext == 'gz':
            command = 'gunzip -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'zip':
            command = 'unzip -p {} | psql postgres -U {} -h {} -p {}'.format(
                self.cluster_backup, self.connecter.user,
                self.connecter.server, self.connecter.port)
        else:
            command = 'psql postgres -U {} -h {} -p {} < {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.cluster_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the cluster
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_CL_DONE.format(
                cluster_backup=self.cluster_backup, diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info',
                                  Messenger.RESTORER_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_cluster_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_CL_FAIL.format(
                cluster_backup=self.cluster_backup)
            self.logger.stop_exe(message)
class Terminator:

    target_all = None  # Flag which determinates if terminate any connection
    target_user = None  # Terminate any connection of an specific user
    target_dbs = []  # Terminate any connection to a list of databases
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter, target_all=False, target_user='',
                 target_dbs=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if target_all is None:
            self.target_all = target_all
        elif isinstance(target_all, bool):
            self.target_all = target_all
        elif Checker.str_is_bool(target_all):
            self.target_all = Casting.str_to_bool(target_all)
        else:
            self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)

        self.target_user = target_user

        if target_dbs is None:
            self.target_dbs = []
        elif isinstance(target_dbs, list):
            self.target_dbs = target_dbs
        else:
            self.target_dbs = Casting.str_to_list(target_dbs)

        message = Messenger.TERMINATOR_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, target_all=self.target_all,
            target_user=target_user, target_dbs=self.target_dbs)
        self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
        self.logger.debug(message)

    def terminate_backend_user(self):
        '''
        Target:
            - terminate every connection of a specific user to PostgreSQL (as
              long as the target user is the one who is running the program).
        '''
        message = Messenger.BEGINNING_TERMINATE_USER_CONN.format(
            target_user=self.target_user)
        self.logger.highlight('info', message, 'white')

        try:
            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            sql = Queries.GET_CURRENT_PG_USER
            self.connecter.cursor.execute(sql)
            current_pg_user = self.connecter.cursor.fetchone()[0]

            if self.target_user == current_pg_user:
                message = Messenger.TARGET_USER_IS_CURRENT_USER.format(
                    target_user=self.target_user)
                self.logger.highlight('warning', message, 'yellow')

            else:
                formatted_sql = Queries.BACKEND_PG_USER_EXISTS.format(
                    pg_pid=pg_pid, target_user=self.target_user)
                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                if result:
                    formatted_sql = Queries.TERMINATE_BACKEND_PG_USER.format(
                        pg_pid=pg_pid, target_user=self.target_user)
                    self.connecter.cursor.execute(formatted_sql)
                else:
                    message = Messenger.NO_USER_CONNS.format(
                        target_user=self.target_user)
                    self.logger.info(message)

            message = Messenger.TERMINATE_USER_CONN_DONE.format(
                target_user=self.target_user)
            self.logger.highlight('info', message, 'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_user": '******'{}.'.format(str(e)))
            message = Messenger.TERMINATE_USER_CONN_FAIL.format(
                target_user=self.target_user)
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_db(self, target_db):
        '''
        Target:
            - terminate every connection to a PostgreSQL database (except the
              current one, if it is connected to the target database).
        '''
        try:
            # The variable "target_db" sometimes could be a string or a list
            # of list, so it is necessary to check it first
            if not isinstance(target_db, str):
                target_db = target_db['datname']

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=target_db)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:

                formatted_sql = Queries.TERMINATE_BACKEND_PG_DB.format(
                    pg_pid=pg_pid, target_db=target_db)

                self.connecter.cursor.execute(formatted_sql)

                message = Messenger.TERMINATE_DB_CONN_DONE.format(
                    target_dbname=target_db)
                self.logger.info(message)

            else:
                message = Messenger.NO_DB_CONNS.format(target_db=target_db)
                self.logger.info(message)

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_db": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_DB_CONN_FAIL.format(
                target_dbname=target_db)
            self.logger.highlight('warning', message, 'yellow')

    def terminate_backend_dbs(self, ter_list):
        '''
        Target:
            - terminate every connection to some PostgreSQL databases (except
              the current one, if it is connected to one of the target
              databases).
        Parameters:
            - ter_list: the list of databases whose connections are going to be
              terminated.
        '''
        message = Messenger.BEGINNING_TERMINATE_DBS_CONN
        self.logger.highlight('info', message, 'white')

        if ter_list:
            for target_db in ter_list:
                self.terminate_backend_db(target_db)
        else:
            self.logger.highlight('warning',
                                  Messenger.TERMINATOR_HAS_NOTHING_TO_DO,
                                  'yellow')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_all(self):
        '''
        Target:
            - remove every connection to PostgreSQL (except the current one).
        '''
        try:
            message = Messenger.BEGINNING_TERMINATE_ALL_CONN
            self.logger.highlight('info', message, 'white')

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_ALL_EXISTS.format(pg_pid=pg_pid)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                formatted_sql = Queries.TERMINATE_BACKEND_PG_ALL.format(
                    pg_pid=pg_pid)
                self.connecter.cursor.execute(formatted_sql)
            else:
                self.logger.info(Messenger.NO_CONNS)

            self.logger.highlight('info', Messenger.TERMINATE_ALL_CONN_DONE,
                                  'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_all": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_ALL_CONN_FAIL
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')
Beispiel #25
0
class Informer:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    connpids = []
    dbnames = []  # List of databases to get some info about
    usernames = []  # List of users to get some info about

    def __init__(self,
                 connecter=None,
                 connpids=[],
                 dbnames=[],
                 usernames=[],
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        self.connpids = connpids
        self.dbnames = dbnames
        self.usernames = usernames

    def show_pg_dbnames(self):
        '''
        Target:
            - show the names of every PostgreSQL database.
        '''
        msg_len = len(Messenger.SHOWING_DBS_NAME)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_DBS_NAME
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        # Get all the names of PostgreSQL databases and show them
        dbnames = self.connecter.get_pg_dbnames()

        if dbnames:
            for dbname in dbnames:
                self.logger.info(dbname)
        else:
            message = Messenger.NO_DB_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_usernames(self):
        '''
        Target:
            - show the names of every PostgreSQL user.
        '''
        msg_len = len(Messenger.SHOWING_USERS_NAME)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_USERS_NAME
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        # Get all the names of PostgreSQL users and show them
        usernames = self.connecter.get_pg_usernames()

        if usernames:
            for username in usernames:
                self.logger.info(username)
        else:
            message = Messenger.NO_USER_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_connpids(self):
        '''
        Target:
            - show the PIDs of every PostgreSQL backend.
        '''
        msg_len = len(Messenger.SHOWING_CONNS_PID)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_CONNS_PID
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        # Get all the PIDs of PostgreSQL backends and show them
        connpids = self.connecter.get_pg_connpids()

        if connpids:
            for connpid in connpids:
                self.logger.info(str(connpid))
        else:
            message = Messenger.NO_CONN_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_dbs_data(self):
        '''
        Target:
            - show some info about every PostgreSQL database.
        '''
        msg_len = len(Messenger.SHOWING_DBS_DATA)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_DBS_DATA
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        dbs_data = []
        # Get every PostgreSQL database if no list specified, otherwise, keep
        # the specified list (given by console arguments)
        if self.dbnames == []:
            self.dbnames = self.connecter.get_pg_dbnames()

        for dbname in self.dbnames:  # Get data of each selected database
            result = self.connecter.get_pg_db_data(dbname)
            if result:
                dbs_data.append(result)

        if dbs_data:
            for db in dbs_data:
                message = Messenger.DATNAME + str(db['datname'])
                self.logger.highlight('info', message, 'cyan')
                message = Messenger.OWNER + str(db['owner'])
                self.logger.info(message)
                message = Messenger.ENCODING + str(db['encoding'])
                self.logger.info(message)
                message = Messenger.DATSIZE + str(db['size'])
                self.logger.info(message)
                message = Messenger.DATCOLLATE + str(db['datcollate'])
                self.logger.info(message)
                message = Messenger.DATCTYPE + str(db['datctype'])
                self.logger.info(message)
                message = Messenger.DATISTEMPLATE + str(db['datistemplate'])
                self.logger.info(message)
                message = Messenger.DATALLOWCONN + str(db['datallowconn'])
                self.logger.info(message)
                message = Messenger.DATCONNLIMIT + str(db['datconnlimit'])
                self.logger.info(message)
                message = Messenger.DATLASTSYSOID + str(db['datlastsysoid'])
                self.logger.info(message)
                message = Messenger.DATFROZENXID + str(db['datfrozenxid'])
                self.logger.info(message)
                message = Messenger.DATTABLESPACE + str(db['dattablespace'])
                self.logger.info(message)
                message = Messenger.DATACL + str(db['datacl'])
                self.logger.info(message)
        else:
            message = Messenger.NO_DB_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_users_data(self):
        '''
        Target:
            - show some info about every PostgreSQL user.
        '''
        msg_len = len(Messenger.SHOWING_USERS_DATA)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_USERS_DATA
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        users_data = []
        # Get every PostgreSQL user if no list specified, otherwise, keep
        # the specified list (given by console arguments)

        if self.usernames == []:
            self.usernames = self.connecter.get_pg_usernames()

        for username in self.usernames:  # Get data of each selected user
            result = self.connecter.get_pg_user_data(username)
            if result:
                users_data.append(result)

        pg_version = self.connecter.get_pg_version()  # Get PostgreSQL version

        if users_data:
            for user in users_data:
                message = Messenger.USENAME + str(user['usename'])
                self.logger.highlight('info', message, 'cyan')
                message = Messenger.USESYSID + str(user['usesysid'])
                self.logger.info(message)
                message = Messenger.USECREATEDB + str(user['usecreatedb'])
                self.logger.info(message)
                message = Messenger.USESUPER + str(user['usesuper'])
                self.logger.info(message)
                message = Messenger.USECATUPD + str(user['usecatupd'])
                self.logger.info(message)
                if pg_version >= self.connecter.PG_PID_VERSION_THRESHOLD:
                    message = Messenger.USEREPL + str(user['userepl'])
                    self.logger.info(message)
                message = Messenger.PASSWD + str(user['passwd'])
                self.logger.info(message)
                message = Messenger.VALUNTIL + str(user['valuntil'])
                self.logger.info(message)
                message = Messenger.USECONFIG + str(user['useconfig'])
                self.logger.info(message)
        else:
            message = Messenger.NO_USER_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_conns_data(self):
        '''
        Target:
            - show some info about every PostgreSQL backend.
        '''
        msg_len = len(Messenger.SHOWING_CONNS_DATA)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_CONNS_DATA
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        conns_data = []
        # Get every PostgreSQL connection if no list specified, otherwise, keep
        # the specified list (given by console arguments)
        if self.connpids == []:
            self.connpids = self.connecter.get_pg_connpids()

        for connpid in self.connpids:  # Get data of each selected backend
            result = self.connecter.get_pg_conn_data(connpid)
            if result:
                conns_data.append(result)

        pg_version = self.connecter.get_pg_version()  # Get PostgreSQL version

        if conns_data:
            for conn in conns_data:
                if pg_version >= self.connecter.PG_PID_VERSION_THRESHOLD:
                    message = Messenger.PID + str(conn['pid'])
                else:
                    message = Messenger.PROCPID + str(conn['procpid'])
                self.logger.highlight('info', message, 'cyan')
                message = Messenger.DATID + str(conn['datid'])
                self.logger.info(message)
                message = Messenger.DATNAME + str(conn['datname'])
                self.logger.info(message)
                message = Messenger.USESYSID + str(conn['usesysid'])
                self.logger.info(message)
                message = Messenger.USENAME + str(conn['usename'])
                self.logger.info(message)
                message = Messenger.APPLICATION_NAME + str(
                    conn['application_name'])
                self.logger.info(message)
                message = Messenger.CLIENT_ADDR + str(conn['client_addr'])
                self.logger.info(message)
                message = Messenger.CLIENT_HOSTNAME + str(
                    conn['client_hostname'])
                self.logger.info(message)
                message = Messenger.CLIENT_PORT + str(conn['client_port'])
                self.logger.info(message)
                message = Messenger.BACKEND_START + str(conn['backend_start'])
                self.logger.info(message)
                message = Messenger.XACT_START + str(conn['xact_start'])
                self.logger.info(message)
                message = Messenger.QUERY_START + str(conn['query_start'])
                self.logger.info(message)
                if pg_version >= self.connecter.PG_PID_VERSION_THRESHOLD:
                    message = Messenger.STATE_CHANGE + str(
                        conn['state_change'])
                    self.logger.info(message)
                message = Messenger.WAITING + str(conn['waiting'])
                self.logger.info(message)
                if pg_version >= self.connecter.PG_PID_VERSION_THRESHOLD:
                    message = Messenger.STATE + str(conn['state'])
                    self.logger.info(message)
                    message = Messenger.QUERY + str(conn['query'])
                    self.logger.info(message)
        else:
            message = Messenger.NO_CONN_DATA_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_version(self):
        '''
        Target:
            - show PostgreSQL version.
        '''
        msg_len = len(Messenger.SHOWING_PG_VERSION)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_PG_VERSION
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        pretty_pg_version = self.connecter.get_pretty_pg_version()
        if pretty_pg_version:
            self.logger.info(pretty_pg_version)
        else:
            message = Messenger.NO_PG_VERSION_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_nversion(self):
        '''
        Target:
            - show PostgreSQL version in numeric format.
        '''
        pg_version = self.connecter.get_pg_version()
        print(pg_version)

    def show_pg_time_start(self):
        '''
        Target:
            - show when PostgreSQL was started.
        '''
        msg_len = len(Messenger.SHOWING_PG_TIME_START)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_PG_TIME_START
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        pg_time_start = self.connecter.get_pg_time_start()
        if pg_time_start:
            self.logger.info(str(pg_time_start))
        else:
            message = Messenger.NO_PG_TIME_START_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')

    def show_pg_time_up(self):
        '''
        Target:
            - show how long PostgreSQL has been working.
        '''
        msg_len = len(Messenger.SHOWING_PG_TIME_UP)
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')
        message = Messenger.SHOWING_PG_TIME_UP
        self.logger.highlight('info', message, 'white')
        message = '*' * msg_len
        self.logger.highlight('info', message, 'white')

        pg_time_up = self.connecter.get_pg_time_up()
        if pg_time_up:
            self.logger.info(str(pg_time_up))
        else:
            message = Messenger.NO_PG_TIME_UP_TO_SHOW
            self.logger.highlight('warning', message, 'yellow', effect='bold')
Beispiel #26
0
class Dropper:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    dbnames = []  # List of databases to be removed

    def __init__(self, connecter=None, dbnames=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(dbnames, list):
            self.dbnames = dbnames
        else:
            self.dbnames = Casting.str_to_list(dbnames)

        msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      dbnames=self.dbnames)
        self.logger.debug(Msg.DROPPER_VARS_INTRO)
        self.logger.debug(msg)

    def drop_pg_db(self, dbname, pg_superuser):
        '''
        Target:
            - remove a database in PostgreSQL.
        Parameters:
            - dbname: the PostgreSQL database's name which is going to be
              removed.
            - pg_superuser: a flag which indicates whether the current user is
              PostgreSQL superuser or not.
        '''
        delete = False

        try:
            self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (dbname, ))
            result = self.connecter.cursor.fetchone()

            if result:

                pg_pid = self.connecter.get_pid_str()
                formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                    pg_pid=pg_pid, target_db=dbname)

                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                # If there are not any connections to the target database...
                if not result:

                    # Users who are not superusers will only be able to drop
                    # the databases they own
                    if not pg_superuser:

                        self.connecter.cursor.execute(Queries.GET_PG_DB_OWNER,
                                                      (dbname, ))
                        db = self.connecter.cursor.fetchone()

                        if db['owner'] != self.connecter.user:

                            msg = Msg.DROP_DB_NOT_ALLOWED.format(
                                user=self.connecter.user, dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                        else:
                            delete = True

                    else:
                        delete = True

                    if delete:

                        # Get the database's "datallowconn" value
                        datallowconn = self.connecter.get_datallowconn(dbname)

                        # If datallowconn is allowed, change it temporarily
                        if datallowconn:
                            # Disallow connections to the database during the
                            # process
                            result = self.connecter.disallow_db_conn(dbname)
                            if not result:
                                msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                        fmt_query_drop_db = Queries.DROP_PG_DB.format(
                            dbname=dbname)

                        start_time = DateTools.get_current_datetime()
                        # Drop the database
                        self.connecter.cursor.execute(fmt_query_drop_db)
                        end_time = DateTools.get_current_datetime()
                        # Get and show the process' duration
                        diff = DateTools.get_diff_datetimes(start_time,
                                                            end_time)
                        msg = Msg.DROP_DB_DONE.format(dbname=dbname, diff=diff)
                        self.logger.highlight('info', msg, 'green')

                        # If datallowconn was allowed, leave it as it was
                        if datallowconn:
                            # Allow connections to the database at the end of
                            # the process
                            result = self.connecter.allow_db_conn(dbname)
                            if not result:
                                msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                else:
                    msg = Msg.ACTIVE_CONNS_ERROR.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            else:
                msg = Msg.DB_DOES_NOT_EXIST.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        except Exception as e:
            self.logger.debug('Error en la función "drop_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.DROP_DB_FAIL.format(
                dbname=dbname), 'yellow')

    def drop_pg_dbs(self, dbnames):
        '''
        Target:
            - remove a list of databases in PostgreSQL.
        '''
        self.logger.highlight('info', Msg.BEGINNING_DROPPER, 'white')
        # Check if the role of user connected to PostgreSQL is superuser
        pg_superuser = self.connecter.is_pg_superuser()

        if dbnames:

            for dbname in self.dbnames:

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                self.drop_pg_db(dbname, pg_superuser)

        else:
            self.logger.highlight('warning', Msg.DROPPER_HAS_NOTHING_TO_DO,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.DROP_DBS_DONE, 'green',
                              effect='bold')
Beispiel #27
0
class Backer:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    # Flag which determinates whether the templates must be included
    ex_templates = True
    # Flag which determinates whether the included databases must be vacuumed
    # before the backup process
    vacuum = True
    # Use other PostgreSQL user during the backup process (only for superusers)
    db_owner = ''
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, bkp_path='', group='',
                 bkp_type='dump', prefix='', in_dbs=[], in_regex='',
                 in_priority=False, ex_dbs=['postgres'], ex_regex='',
                 ex_templates=True, vacuum=True, db_owner='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Msg.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Msg.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Msg.INVALID_EX_REGEX)

        if isinstance(ex_templates, bool):
            self.ex_templates = ex_templates
        elif Checker.str_is_bool(ex_templates):
            self.ex_templates = Casting.str_to_bool(ex_templates)
        else:
            self.logger.stop_exe(Msg.INVALID_EX_TEMPLATES)

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        if db_owner is None:
            self.db_owner = db_owner
        else:
            self.db_owner = Default.DB_OWNER

        msg = Msg.DB_BACKER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, bkp_path=self.bkp_path, group=self.group,
            bkp_type=self.bkp_type, prefix=self.prefix, in_dbs=self.in_dbs,
            in_regex=self.in_regex, in_priority=self.in_priority,
            ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
            ex_templates=self.ex_templates, vacuum=self.vacuum,
            db_owner=self.db_owner)
        self.logger.debug(Msg.DB_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_db(self, dbname, bkps_dir):
        '''
        Target:
            - make a backup of a specified database.
        Parameters:
            - dbname: name of the database which is going to be backuped.
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)
        # Set backup's name
        file_name = self.prefix + 'db_' + dbname + '_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | gzip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | bzip2 > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | zip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)

        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_db": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_dbs(self, dbs_all):
        '''
        Target:
            - make a backup of some specified databases.
        Parameters:
            - dbs_all: names of the databases which are going to be backuped.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.DB_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        self.logger.highlight('info', Msg.PROCESSING_DB_BACKER, 'white')

        if dbs_all:
            for db in dbs_all:

                dbname = db['datname']
                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                # Let the user know whether the database connection is allowed
                if not db['datallowconn']:
                    msg = Msg.FORBIDDEN_DB_CONNECTION.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow',
                                          effect='bold')
                    success = False

                else:
                    # Vaccum the database before the backup process if
                    # necessary
                    if self.vacuum:
                        self.logger.info(Msg.PRE_VACUUMING_DB.format(
                            dbname=dbname))
                        vacuumer = Vacuumer(self.connecter, self.in_dbs,
                                            self.in_regex, self.in_priority,
                                            self.ex_dbs, self.ex_regex,
                                            self.ex_templates, self.db_owner,
                                            self.logger)

                        # Vacuum the database
                        success = vacuumer.vacuum_db(dbname)
                        if success:
                            msg = Msg.PRE_VACUUMING_DB_DONE.format(
                                dbname=dbname)
                            self.logger.info(msg)
                        else:
                            msg = Msg.PRE_VACUUMING_DB_FAIL.format(
                                dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                    self.logger.info(Msg.BEGINNING_DB_BACKER.format(
                        dbname=dbname))

                    start_time = DateTools.get_current_datetime()
                    # Make the backup of the database
                    success = self.backup_db(dbname, bkps_dir)
                    end_time = DateTools.get_current_datetime()
                    # Get and show the process' duration
                    diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_BACKER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_BACKER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning', Msg.BACKER_HAS_NOTHING_TO_DO,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green', effect='bold')