Ejemplo n.º 1
0
    def on_line_received(self, json_payload):
        Logger.debug('line received: %s', json_payload, 'server')
        payload = json.loads(json_payload)

        if payload and 'type' in payload and payload['type'] == 'user_input':
            user_input = UserInputDataUnit(payload['content']).get_object()
            self.game.apply_user_input_to_player(self.user_id, user_input)
            self.update_game()
            self.send_broadcast_payload_except_self({
                'type':
                'user_input',
                'content':
                payload['content'],
                'user':
                PlayerDataUnit(self.game.get_player(self.user_id)).get_pdu()
            })

            self.interval_executor.execute_if_interval_elapsed(
                self, lambda self: self.send_broadcast_payload(
                    {
                        'type': 'game_state',
                        'content': GameDataUnit(self.game).get_pdu()
                    }))

        else:
            Logger.error('Unknown message type: (%s)', json_payload,
                         'server_protocol')
Ejemplo n.º 2
0
    def __init__(self, pdu_or_object):
        self.pdu = {}

        Logger.debug("PDU init (%s:%s)", (type(pdu_or_object), pdu_or_object), 'network_protocol')
        if isinstance(pdu_or_object, dict):
            Logger.trace("PDU init from dict", category='network_protocol')
            self.from_dictionary(pdu_or_object)
        else:
            Logger.trace("PDU init from object", category='network_protocol')
            self.from_object(pdu_or_object)
Ejemplo n.º 3
0
def send_to_server_recursive_threaded_loop(connection_handler):
    Logger.trace('loop from network process sender thread', category='client')

    while not connection_handler.input_queue.empty():
        payload = connection_handler.input_queue.get()
        Logger.debug('fetch input_queue from network process sender thread', payload, category='client')
        connection_handler.sendLine(json.dumps(payload))

    time.sleep(0.01)  # 10 ms
    # get our Deferred which will be called with the largeFibonnaciNumber result
    threads.deferToThread(send_to_server_recursive_threaded_loop, connection_handler)
Ejemplo n.º 4
0
 def forbid_root(logger=None):
     '''
     Target:
         - stop the execution of the program if this is being run by "root".
     '''
     if not logger:
         logger = Logger()
     try:
         if getuser() == 'root':  # Get system username
             raise Exception()
     except Exception as e:
         logger.debug('Error en la función "forbid_root": {}.'.format(
             str(e)))
         logger.stop_exe(Messenger.ROOT_NOT_ALLOWED)
Ejemplo n.º 5
0
 def forbid_root(logger=None):
     '''
     Target:
         - stop the execution of the program if this is being run by "root".
     '''
     if not logger:
         logger = Logger()
     try:
         if getuser() == 'root':  # Get system username
             raise Exception()
     except Exception as e:
         logger.debug('Error en la función "forbid_root": {}.'.format(
             str(e)))
         logger.stop_exe(Messenger.ROOT_NOT_ALLOWED)
Ejemplo n.º 6
0
class LogCfgParser:

    logger = None  # Logger to show and log some messages
    cfg = None  # Parser which stores the variables of the config file
    log_vars = {}  # Dictionary to store the loaded logger variables

    def __init__(self):
        pass

    def load_cfg(self, cfg_file):
        '''
        Target:
            - create a parser and read a config file.
        Parameters:
            - cfg_file: the config file to be readed.
        '''
        try:
            self.cfg = configparser.ConfigParser()
            # If config file exists, read it
            if os.path.exists(cfg_file):
                self.cfg.read(cfg_file)
            else:
                raise Exception()
        except Exception as e:
            # Create logger in the exception to avoid redundancy errors
            if not self.logger:
                self.logger = Logger()
            self.logger.debug('Error en la función "load_cfg": {}.'.format(
                str(e)))
            self.logger.stop_exe(Messenger.INVALID_CFG_PATH)

    def parse_logger(self):
        '''
        Target:
            - get the logger variables from a configuration file and store them
              in a dictionary.
        '''
        try:
            self.log_vars = {
                'log_dir': self.cfg.get('settings', 'log_dir').strip(),
                'level': self.cfg.get('settings', 'level').strip(),
                'mute': self.cfg.get('settings', 'mute').strip(),
            }
        except Exception as e:
            # Create logger in the exception to avoid redundancy errors
            if not self.logger:
                self.logger = Logger()
            self.logger.debug('Error en la función "parse_logger": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.LOGGER_CFG_DAMAGED)
Ejemplo n.º 7
0
def main():
    l = Logger(__name__)
    osqonnector = Bottle()

    for app in INSTALLED_APPS:
        l.debug("loading {}".format(app.__name__))
        osqonnector.merge(app.app)

    l.debug("[{}]: ready to serv ({}:{})".format(getpid(), config.HOST,
                                                 config.PORT))
    try:
        bjoern.run(osqonnector,
                   config.HOST,
                   config.PORT,
                   reuse_port=config.REUSE_PORT)
    except KeyboardInterrupt:
        l.info("bye.")
Ejemplo n.º 8
0
    def create_dir(path, logger=None):
        '''
        Target:
            - stop the execution of the program if this is being run by "root".
        Parameters:
            - path: directory to create.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        try:
            if not os.path.exists(path):  # If path does not exist...
                os.makedirs(path)  # Create it
        except Exception as e:
            logger.debug('Error en la función "create_dir": {}.'.format(
                str(e)))
            logger.stop_exe(Messenger.USER_NOT_ALLOWED_TO_CHDIR)
Ejemplo n.º 9
0
    def create_dir(path, logger=None):
        '''
        Target:
            - stop the execution of the program if this is being run by "root".
        Parameters:
            - path: directory to create.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        try:
            if not os.path.exists(path):  # If path does not exist...
                os.makedirs(path)  # Create it
        except Exception as e:
            logger.debug('Error en la función "create_dir": {}.'.format(
                str(e)))
            logger.stop_exe(Messenger.USER_NOT_ALLOWED_TO_CHDIR)
Ejemplo n.º 10
0
    def db_filter_exclude(dbs_list, ex_dbs=[], ex_regex='', logger=None):
        '''
        Target:
            - filter a list of databases to remove only the specified ones,
              taking into account the received parameters.
        Parameters:
            - dbs_list: list to filter.
            - ex_dbs: list with the databases' names to exclude.
            - ex_regex: regular expression which indicates the databases' names
              to exclude.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_list"), value from the addition
              of the exclude conditions "ex_dbs" y "ex_regex".
        '''
        if not logger:
            logger = Logger()

        # Copy the list to remove remove positions without conflict errors
        dbs_filtered = dbs_list[:]

        if '*' in ex_dbs:  # If exclude all...
            return []  # Return an empty list

        if ex_regex:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the exclusion list or matches the
                # regular expression...
                if dbname in ex_dbs or re.match(ex_regex, dbname):
                    # Remove database from the list
                    dbs_filtered.remove(db)
                    logger.debug('Base de datos excluida: {}'.format(dbname))
        else:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the exclusion list...
                if dbname in ex_dbs:
                    # Remove database from the list
                    dbs_filtered.remove(db)
                    logger.debug('Base de datos excluida: {}'.format(dbname))

        return dbs_filtered
Ejemplo n.º 11
0
    def on_line_received(self, json_payload):
        Logger.debug('line received: %s', json_payload, 'server')
        payload = json.loads(json_payload)

        if payload and 'type' in payload and payload['type'] == 'user_input':
            user_input = UserInputDataUnit(payload['content']).get_object()
            self.game.apply_user_input_to_player(self.user_id, user_input)
            self.update_game()
            self.send_broadcast_payload_except_self({
                'type': 'user_input',
                'content': payload['content'],
                'user': PlayerDataUnit(self.game.get_player(self.user_id)).get_pdu()
            })

            self.interval_executor.execute_if_interval_elapsed(self, lambda self: self.send_broadcast_payload(
                {'type': 'game_state', 'content': GameDataUnit(self.game).get_pdu()}))

        else:
            Logger.error('Unknown message type: (%s)', json_payload, 'server_protocol')
Ejemplo n.º 12
0
    def db_filter_include(dbs_list, in_dbs=[], in_regex='', logger=None):
        '''
        Target:
            - filter a list of databases to get only the specified ones, taking
              into account the received parameters.
        Parameters:
            - dbs_list: list to filter.
            - in_dbs: list with the databases' names to include.
            - in_regex: regular expression which indicates the databases' names
              to include.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_list"), value from the addition
              of the include conditions "in_dbs" y "in_regex".
        '''
        if not logger:
            logger = Logger()

        dbs_filtered = []

        if '*' in in_dbs:  # If include all...
            return dbs_list  # Return the whole list

        if in_regex:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the inclusion list or matches the
                # regular expression...
                if dbname in in_dbs or re.match(in_regex, dbname):
                    dbs_filtered.append(db)  # Add database to the list
                    logger.debug('Base de datos incluida: {}'.format(dbname))
        else:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the inclusion list...
                if dbname in in_dbs:
                    dbs_filtered.append(db)  # Add database to the list
                    logger.debug('Base de datos incluida: {}'.format(dbname))

        return dbs_filtered
Ejemplo n.º 13
0
    def run(self):
        Logger.debug('process start from network process', category='client')

        reactor.connectTCP('127.0.0.1', 5000, ClientFactory(self.input_queue, self.output_queue))
        reactor.run()
Ejemplo n.º 14
0
 def connectionMade(self):
     Logger.debug('connection made from network process', category='client')
     send_to_server_recursive_threaded_loop(self)
Ejemplo n.º 15
0
class KivaRunner:
	"""
	This class can run kiva with a certain set of parameters
	and different temperture inputs.
	"""

	def __init__(self, working_dir, log_dir, compare_file, parameter_format, logger=None):
		"""
		parameter_format specifies what parameters from which files are needed.
		[file, name, min, max, step]
		"""
		if logger == None:
			self.log = Logger()
		else:
			self.log = logger
		self.error = False
		self.working_dir = working_dir
		self.log_dir = log_dir
		self.kiva_path = '../ext'
		self.kiva_name = 'kiva_0D'
		self.kiva_parameter_files = ['itape17', 'itape5', 'itapeERC', 'itapeRs']
		self.parameter_format = parameter_format
		self.log.debug("Parameter Format: %s" % self.parameter_format)
		# this will later hold all kiva_parameter_files that
		# need to be loaded as ITapeFile objects
		self.itapes = {}
		self.compare_file = compare_file
		# Check if working directory exists and that it is empty
		self.working_dir_indicator = os.path.join(self.working_dir, 'kivagen.working.dir')
		if not os.path.isdir(self.working_dir):
			self.log.error("'%s' does not exist." % self.working_dir)
			self.error = True
			self.working_dir = None
		elif os.listdir(self.working_dir):	# not empty
			if not os.path.isfile(self.working_dir_indicator):
				self.log.error("'%s' does not seem to be a working directory." % self.working_dir)
				self.error = True
				self.working_dir = None
				return
		else:
			open(self.working_dir_indicator, 'a').close()
		# Check that log dir exists
		if not os.path.isdir(self.log_dir):
			self.log.error("'%s' does not exist." % self.log_dir)
			self.error = True
			self.log_dir = None
		# Check if input files exist
		input_files = [self.compare_file]
		input_files.append(os.path.join(self.kiva_path, self.kiva_name))
		for parameter_file in self.kiva_parameter_files:
			input_files.append(os.path.join(self.kiva_path, parameter_file))
		for input_file in input_files:
			if not os.path.isfile(input_file):
				self.log.error("'%s' not found." % input_file)
				self.error = True
		# Parse compare file
		self.compare_values = self._readCompareFile(self.compare_file)
		# self.log.debug("self.compare_values = %s" % self.compare_values)
		# Load Parameters
		self._loadParameters()
		# self.log.debug("self.param_list = %s" % self.param_list)

# --------------- Parameters --------------------------------------------------
	def _loadParameters(self):
		"""
		Loads the start parameters from the itape files.
		Also creates a lookup table to match parameter list index to
		itape file and value id
		"""
		self.param_list = []
		self.param_lookup = {}
		# relaod parameters from file
		for itape in self.itapes.values():
			itape._open(itape.name)
		param_index = 0
		for param in self.parameter_format:
			# if file has not been loaded before
			if param[0] not in self.itapes:
				f = os.path.join(self.kiva_path, param[0])
				if param[0] in self.kiva_parameter_files:
					if param[0] in ['itapeERC', 'itapeRs']:
						self.itapes[param[0]] = ITapeFile(f, self.log)
					else:
						self.log.error("Failed to read %s. Can only read itapeERC and itapeRs." % f)
				else:
					self.log.error("Failed to read %s. Not in self.kiva_parameter_files (%s)" % self.kiva_parameter_files)
			# try to get parameter id
			ii = self.itapes[param[0]].getId(param[1])
			if ii < 0:
				self.log.error("Could not find '%s' in '%s'." % (param[1], param[0]))
				self.param_list.append(None) # append None to get index right
			else:
				value = self.itapes[param[0]].getValue(ii)
				self.param_list.append(self._checkParameter(value, param))
				# remember itap file and value id
				self.param_lookup[param_index] = [self.itapes[param[0]], ii]
			param_index = param_index + 1
		# now we should have collected all parameters
		if len(self.param_list) != len(self.parameter_format):
			sel.log.error("Only found %s elements. Expected %s!"
							% (self.param_list, len(self.parameter_format)))

	def getParameters(self):
		"""
		Returns parameter list.
		"""
		return self.param_list

	def setParameters(self, parameters):
		"""
		Hand a new set of parameters to this class.
		They will be used for the next kiva runs.
		"""
		# 1.) check if parameter list length is correct
		if len(parameters) != len(self.parameter_format):
			sel.log.error("Parameter list needs to contain exactly %s elements. Not %s!"
							% (len(self.parameter_format), len(parameters)))
			return
		# 2.) Check if parameters are correct and put them into the itape files
		for ii in range(0, len(parameters)):
			if self._checkParameter(parameters[ii], self.parameter_format[ii]) == None:
				return
			self.param_lookup[ii][0].setValue(self.param_lookup[ii][1], parameters[ii])
		# 3.) Replace parameter list
		self.param_list = parameters


	def _checkParameter(self, value, format_line):
		"""
		Returns None if parameter is out of range.
		Else retruns value.
		"""
		if value < format_line[2]:
			self.log.error("%s < %s (min)." % (value, format_line[2]))
			return None
		if value > format_line[3]:
			self.log.error("%s > %s (max)." % (value, format_line[3]))
			return None
		return value

# --------------- CompareFile -------------------------------------------------
	def _readCompareFile(self, compare_file):
		"""
		Read the compare file and returns temperature and delay as lists.
		"""
		compare_values = []
		with open(compare_file, 'rb') as csvfile:
			csvreader = csv.reader(csvfile, delimiter=',')
			for row in csvreader:
				try:
					temperature = 1000.0 / float(row[0])
					temperature = round(temperature, 0) # we do not need decimal places
					time = float(row[1])
					compare_values.append([temperature, time])
				except ValueError:
					pass
		return compare_values

# --------------- Setup and run -----------------------------------------------
	def _setupWorkingDir(self):
		"""
		Prepare working directory for a kiva run.
		"""
		if self.working_dir == None:
			self.log.error("No valid working dir set.")
			return False
		# Delete contents
		for node in os.listdir(self.working_dir):
			if os.path.isdir(os.path.join(self.working_dir, node)):
				shutil.rmtree(os.path.join(self.working_dir, node))
			else:
				os.unlink(os.path.join(self.working_dir, node))
		# Recreate working dir indicator
		open(self.working_dir_indicator, 'a').close()
		# Create Directory containing all input files
		path = os.path.join(self.working_dir, 'run0')
		os.makedirs(path)
		for f in self.kiva_parameter_files:
			if f in self.itapes:
				dst = os.path.join(path, os.path.basename(self.itapes[f].name))
				self.itapes[f].save(dst)
			else:
				src = os.path.join(self.kiva_path, f)
				dst = os.path.join(path, f)
				shutil.copy(src, dst)
		# Create as many copies of it as there are compare_values
		for ii in range(1, len(self.compare_values)):
			shutil.copytree(path, os.path.join(self.working_dir, 'run' + str(ii)))
		# patch temperature in every directory's itape5 file line 230
		for ii in range(0, len(self.compare_values)):
			f = os.path.join(self.working_dir, 'run' + str(ii), 'itape5')
			if not os.path.isfile(f):
				self.log.error("'%s' does not exist." % f)
				return False
			# load file
			with open(f, 'r') as file:
				data = file.readlines()
			# change line
			line = "'tempi',     "
			line += '{0:.1f}'.format(self.compare_values[ii][0]) + '\n'
			data[230 - 1] = line
			# write data back
			with open(f, 'w') as file:
				file.writelines( data )
		return True

	def _collectIgnitionDelay(self):
		"""
		This needs to be called after kiva is run.
		Data from the runXX/T_ign.dat files is collected.
		"""
		self.results = []
		for ii in range(0, len(self.compare_values)):
			f = os.path.join(self.working_dir, 'run' + str(ii), 'T_ign.dat')
			if not os.path.isfile(f):
				self.log.error("'%s' does not exist." % f)
				delay = 0	# probably kiva was killed
				# save log
				log = os.path.join(self.log_dir, str(int(time.time())))
				if not os.path.isdir(log):
					os.makedirs(log)
				shutil.move(os.path.join(self.working_dir, 'run' + str(ii)), log)
				src = os.path.join(self.working_dir, 'run' + str(ii) + '.log')
				des = os.path.join(log, 'run' + str(ii) + '.log')
				os.rename(src, des)
				src = os.path.join(self.working_dir, 'run' + str(ii) + '.error')
				des = os.path.join(log, 'run' + str(ii) + '.error')
				os.rename(src, des)
			else:
				with open(f, 'r') as file:
					delay = float(file.readlines()[0].strip())
			compare = self.compare_values[ii]
			delta = delay - compare[1]
			delta_abs = abs(delta)
			delta_rel = round((delta_abs * 100 / compare[1]), 2)
			self.log.debug("Delay for Temperatur %s: \texpected: %s \tcomputed: %s" % (compare[0], compare[1], delay))
			self.log.debug("Delta: %s \tabs: %s \trelative: %s%%" % (delta, delta_abs, delta_rel))
			self.results.append([delay, compare[1]])

	def _restart(self, p, max_tries):
		"""
		Restarts process if number of max_tries has not been passed.
		"""
		if p[5] >= max_tries:
			self.log.error("kiva in '%s' failed on the %s. try." % (p[1], p[5]))
			p[2] = False
			p[3].close() # close log file
			p[4].close() # close error file
			return False
		else:
			# clean up runXX dir
			for node in os.listdir(p[1]):
				if node not in self.kiva_parameter_files:
					os.unlink(os.path.join(p[1], node))
			# increment ties
			p[5] = p[5] + 1
			# print message to log
			msg = "\n%s. Try -------------------------------------------\n" % p[5]
			p[3].write(msg)
			p[4].write(msg)
			# restart p
			kiva = os.path.abspath(os.path.join(self.kiva_path, self.kiva_name))
			p[0] = subprocess.Popen(kiva, cwd=p[1], stdout=p[3], stderr=p[4])
			self.log.warn("kiva in '%s' restarted for it's %s. try." % (p[1], p[5]))
			return True

	def run(self, time_out=120, max_tries=5):
		"""
		Executes kiva with current parameter set.
		Once for every compare_value.
		"""
		if not self._setupWorkingDir():
			return False
		# Start one kiva process for every folder
		processes = []
		# devnull = open('/dev/null', 'w')
		kiva = os.path.abspath(os.path.join(self.kiva_path, self.kiva_name))
		for ii in range(0, len(self.compare_values)):
			log = os.path.join(self.working_dir, 'run' + str(ii) + '.log')
			log_file = open(log, 'w')
			error = os.path.join(self.working_dir, 'run' + str(ii) + '.error')
			error_file = open(error, 'w')
			d = os.path.join(self.working_dir, 'run' + str(ii))
			p = subprocess.Popen(kiva, cwd=d, stdout=log_file, stderr=error_file)
			# process, path, running, log_file, error_file, tries
			processes.append([p, d, True, log_file, error_file, 1])
			self.log.debug("kiva in '%s' spawned." % d)
		# Check for all kiva processes to terminate
		all_finished = False
		start_time = time.time()
		while not all_finished:
			all_finished = True
			for p in processes:
				if p[2]:
					if p[0].poll() == 0:
						self.log.debug("kiva in '%s' terminated." % p[1])
						# Check if output file was generated
						if os.path.isfile(os.path.join(p[1], 'T_ign.dat')):
							p[2] = False
							p[3].close() # close log file
							p[4].close() # close error file
						else:	# if there is no T_ign.dat
							self._restart(p, max_tries)	# restart
							start_time = time.time()	# reset timeout
					else:
						all_finished = False
			# is time up?
			if time.time() - start_time > time_out:
				self.log.warn("Timed out! (%s s)" % time_out)
				for p in processes:
					if p[2]:
						p[0].kill()
						p[3].close() # close log file
						p[4].close() # close error file
						self.log.warn("kiva in '%s' killed!" % p[1])
				all_finished = True
			# do not use up all CPU time
			time.sleep(0.5)
		# Collect Results
		self._collectIgnitionDelay()
		return True

	def getFitness(self):
		"""
		Call this after calling run.
		This will return a fitness value based on the temperatures computed.
		"""
		score = 0.0
		for value in self.results:
			score += abs(value[0] - value[1])
		return score
Ejemplo n.º 16
0
class Dropper:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    dbnames = []  # List of databases to be removed

    def __init__(self, connecter=None, dbnames=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(dbnames, list):
            self.dbnames = dbnames
        else:
            self.dbnames = Casting.str_to_list(dbnames)

        msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      dbnames=self.dbnames)
        self.logger.debug(Msg.DROPPER_VARS_INTRO)
        self.logger.debug(msg)

    def drop_pg_db(self, dbname, pg_superuser):
        '''
        Target:
            - remove a database in PostgreSQL.
        Parameters:
            - dbname: the PostgreSQL database's name which is going to be
              removed.
            - pg_superuser: a flag which indicates whether the current user is
              PostgreSQL superuser or not.
        '''
        delete = False

        try:
            self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (dbname, ))
            result = self.connecter.cursor.fetchone()

            if result:

                pg_pid = self.connecter.get_pid_str()
                formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                    pg_pid=pg_pid, target_db=dbname)

                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                # If there are not any connections to the target database...
                if not result:

                    # Users who are not superusers will only be able to drop
                    # the databases they own
                    if not pg_superuser:

                        self.connecter.cursor.execute(Queries.GET_PG_DB_OWNER,
                                                      (dbname, ))
                        db = self.connecter.cursor.fetchone()

                        if db['owner'] != self.connecter.user:

                            msg = Msg.DROP_DB_NOT_ALLOWED.format(
                                user=self.connecter.user, dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                        else:
                            delete = True

                    else:
                        delete = True

                    if delete:

                        # Get the database's "datallowconn" value
                        datallowconn = self.connecter.get_datallowconn(dbname)

                        # If datallowconn is allowed, change it temporarily
                        if datallowconn:
                            # Disallow connections to the database during the
                            # process
                            result = self.connecter.disallow_db_conn(dbname)
                            if not result:
                                msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                        fmt_query_drop_db = Queries.DROP_PG_DB.format(
                            dbname=dbname)

                        start_time = DateTools.get_current_datetime()
                        # Drop the database
                        self.connecter.cursor.execute(fmt_query_drop_db)
                        end_time = DateTools.get_current_datetime()
                        # Get and show the process' duration
                        diff = DateTools.get_diff_datetimes(
                            start_time, end_time)
                        msg = Msg.DROP_DB_DONE.format(dbname=dbname, diff=diff)
                        self.logger.highlight('info', msg, 'green')

                        # If datallowconn was allowed, leave it as it was
                        if datallowconn:
                            # Allow connections to the database at the end of
                            # the process
                            result = self.connecter.allow_db_conn(dbname)
                            if not result:
                                msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                else:
                    msg = Msg.ACTIVE_CONNS_ERROR.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            else:
                msg = Msg.DB_DOES_NOT_EXIST.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        except Exception as e:
            self.logger.debug('Error en la función "drop_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning',
                                  Msg.DROP_DB_FAIL.format(dbname=dbname),
                                  'yellow')

    def drop_pg_dbs(self, dbnames):
        '''
        Target:
            - remove a list of databases in PostgreSQL.
        '''
        self.logger.highlight('info', Msg.BEGINNING_DROPPER, 'white')
        # Check if the role of user connected to PostgreSQL is superuser
        pg_superuser = self.connecter.is_pg_superuser()

        if dbnames:

            for dbname in self.dbnames:

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                self.drop_pg_db(dbname, pg_superuser)

        else:
            self.logger.highlight('warning',
                                  Msg.DROPPER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info',
                              Msg.DROP_DBS_DONE,
                              'green',
                              effect='bold')
Ejemplo n.º 17
0
 def send_broadcast_payload(self, payload):
     payload = json.dumps(payload)
     Logger.debug('broadcasting to all: %s', payload, 'server')
     for user_id, connection in self.connections.iteritems():
         connection.sendLine(payload)
Ejemplo n.º 18
0
class Terminator:

    target_all = None  # Flag which determinates if terminate any connection
    target_user = None  # Terminate any connection of an specific user
    target_dbs = []  # Terminate any connection to a list of databases
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter,
                 target_all=False,
                 target_user='',
                 target_dbs=[],
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if target_all is None:
            self.target_all = target_all
        elif isinstance(target_all, bool):
            self.target_all = target_all
        elif Checker.str_is_bool(target_all):
            self.target_all = Casting.str_to_bool(target_all)
        else:
            self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)

        self.target_user = target_user

        if target_dbs is None:
            self.target_dbs = []
        elif isinstance(target_dbs, list):
            self.target_dbs = target_dbs
        else:
            self.target_dbs = Casting.str_to_list(target_dbs)

        message = Messenger.TERMINATOR_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            target_all=self.target_all,
            target_user=target_user,
            target_dbs=self.target_dbs)
        self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
        self.logger.debug(message)

    def terminate_backend_user(self):
        '''
        Target:
            - terminate every connection of a specific user to PostgreSQL (as
              long as the target user is the one who is running the program).
        '''
        message = Messenger.BEGINNING_TERMINATE_USER_CONN.format(
            target_user=self.target_user)
        self.logger.highlight('info', message, 'white')

        try:
            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            sql = Queries.GET_CURRENT_PG_USER
            self.connecter.cursor.execute(sql)
            current_pg_user = self.connecter.cursor.fetchone()[0]

            if self.target_user == current_pg_user:
                message = Messenger.TARGET_USER_IS_CURRENT_USER.format(
                    target_user=self.target_user)
                self.logger.highlight('warning', message, 'yellow')

            else:
                formatted_sql = Queries.BACKEND_PG_USER_EXISTS.format(
                    pg_pid=pg_pid, target_user=self.target_user)
                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                if result:
                    formatted_sql = Queries.TERMINATE_BACKEND_PG_USER.format(
                        pg_pid=pg_pid, target_user=self.target_user)
                    self.connecter.cursor.execute(formatted_sql)
                else:
                    message = Messenger.NO_USER_CONNS.format(
                        target_user=self.target_user)
                    self.logger.info(message)

            message = Messenger.TERMINATE_USER_CONN_DONE.format(
                target_user=self.target_user)
            self.logger.highlight('info', message, 'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_user": '******'{}.'.format(str(e)))
            message = Messenger.TERMINATE_USER_CONN_FAIL.format(
                target_user=self.target_user)
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_db(self, target_db):
        '''
        Target:
            - terminate every connection to a PostgreSQL database (except the
              current one, if it is connected to the target database).
        '''
        try:
            # The variable "target_db" sometimes could be a string or a list
            # of list, so it is necessary to check it first
            if not isinstance(target_db, str):
                target_db = target_db['datname']

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=target_db)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:

                formatted_sql = Queries.TERMINATE_BACKEND_PG_DB.format(
                    pg_pid=pg_pid, target_db=target_db)

                self.connecter.cursor.execute(formatted_sql)

                message = Messenger.TERMINATE_DB_CONN_DONE.format(
                    target_dbname=target_db)
                self.logger.info(message)

            else:
                message = Messenger.NO_DB_CONNS.format(target_db=target_db)
                self.logger.info(message)

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_db": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_DB_CONN_FAIL.format(
                target_dbname=target_db)
            self.logger.highlight('warning', message, 'yellow')

    def terminate_backend_dbs(self, ter_list):
        '''
        Target:
            - terminate every connection to some PostgreSQL databases (except
              the current one, if it is connected to one of the target
              databases).
        Parameters:
            - ter_list: the list of databases whose connections are going to be
              terminated.
        '''
        message = Messenger.BEGINNING_TERMINATE_DBS_CONN
        self.logger.highlight('info', message, 'white')

        if ter_list:
            for target_db in ter_list:
                self.terminate_backend_db(target_db)
        else:
            self.logger.highlight('warning',
                                  Messenger.TERMINATOR_HAS_NOTHING_TO_DO,
                                  'yellow')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_all(self):
        '''
        Target:
            - remove every connection to PostgreSQL (except the current one).
        '''
        try:
            message = Messenger.BEGINNING_TERMINATE_ALL_CONN
            self.logger.highlight('info', message, 'white')

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_ALL_EXISTS.format(pg_pid=pg_pid)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                formatted_sql = Queries.TERMINATE_BACKEND_PG_ALL.format(
                    pg_pid=pg_pid)
                self.connecter.cursor.execute(formatted_sql)
            else:
                self.logger.info(Messenger.NO_CONNS)

            self.logger.highlight('info', Messenger.TERMINATE_ALL_CONN_DONE,
                                  'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_all": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_ALL_CONN_FAIL
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')
Ejemplo n.º 19
0
class Restorer:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    db_backup = ''  # Absolute path of the backup file (of a database)
    new_dbname = ''  # New name for the database restored in PostgreSQL

    def __init__(self, connecter=None, db_backup='', new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_db_backup(self):
        '''
        Target:
            - restore a database's backup in PostgreSQL.
        '''
        #replicator = Replicator(self.connecter, self.new_dbname,
                                #Default.RESTORING_TEMPLATE, self.logger)
        #result = self.connecter.allow_db_conn(Default.RESTORING_TEMPLATE)
        #if result:
            #replicator.replicate_pg_db()
            #self.connecter.disallow_db_conn(Default.RESTORING_TEMPLATE)
        #else:
            #self.logger.stop_exe(Messenger.ALLOW_DB_CONN_FAIL.format(
                #dbname=Default.RESTORING_TEMPLATE))

        # Regular expression which must match the backup's name
        regex = r'.*db_(.+)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.db_backup):
            # Store the parts of the backup's name (name, date, ext)
            parts = regex.search(self.db_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            self.logger.stop_exe(Messenger.NO_BACKUP_FORMAT)

        message = Messenger.BEGINNING_DB_RESTORER.format(
            db_backup=self.db_backup, new_dbname=self.new_dbname)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        if ext == 'gz':
            command = 'gunzip -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'zip':
            command = 'unzip -p {} | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        else:
            command = 'pg_restore -U {} -h {} -p {} -d {} {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.new_dbname, self.db_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the database
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_DB_DONE.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname,
                diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info', Messenger.RESTORER_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_db_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_DB_FAIL.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname)
            self.logger.stop_exe(message)
Ejemplo n.º 20
0
class RestorerCluster:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    cluster_backup = ''  # Absolute path of the backup file (of a cluster)

    def __init__(self, connecter=None, cluster_backup='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if cluster_backup and os.path.isfile(cluster_backup):
            self.cluster_backup = cluster_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        message = Messenger.CL_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, cluster_backup=self.cluster_backup)
        self.logger.debug(Messenger.CL_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_cluster_backup(self):
        '''
        Target:
            - restore a cluster's backup in PostgreSQL. The cluster must have
              been created before this process.
        '''
        # Regular expression which must match the backup's name
        regex = r'.*ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.cluster_backup):
            # Store the parts of the backup's name (servername, date, ext)
            parts = regex.search(self.cluster_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            Messenger.NO_BACKUP_FORMAT

        message = Messenger.BEGINNING_CL_RESTORER.format(
            cluster_backup=self.cluster_backup)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        # TODO: make dissappear every line about the operation shown in console
        if ext == 'gz':
            command = 'gunzip -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'zip':
            command = 'unzip -p {} | psql postgres -U {} -h {} -p {}'.format(
                self.cluster_backup, self.connecter.user,
                self.connecter.server, self.connecter.port)
        else:
            command = 'psql postgres -U {} -h {} -p {} < {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.cluster_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the cluster
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_CL_DONE.format(
                cluster_backup=self.cluster_backup, diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info', Messenger.RESTORER_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_cluster_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_CL_FAIL.format(
                cluster_backup=self.cluster_backup)
            self.logger.stop_exe(message)
Ejemplo n.º 21
0
class Dropper:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    dbnames = []  # List of databases to be removed

    def __init__(self, connecter=None, dbnames=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(dbnames, list):
            self.dbnames = dbnames
        else:
            self.dbnames = Casting.str_to_list(dbnames)

        msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      dbnames=self.dbnames)
        self.logger.debug(Msg.DROPPER_VARS_INTRO)
        self.logger.debug(msg)

    def drop_pg_db(self, dbname, pg_superuser):
        '''
        Target:
            - remove a database in PostgreSQL.
        Parameters:
            - dbname: the PostgreSQL database's name which is going to be
              removed.
            - pg_superuser: a flag which indicates whether the current user is
              PostgreSQL superuser or not.
        '''
        delete = False

        try:
            self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (dbname, ))
            result = self.connecter.cursor.fetchone()

            if result:

                pg_pid = self.connecter.get_pid_str()
                formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                    pg_pid=pg_pid, target_db=dbname)

                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                # If there are not any connections to the target database...
                if not result:

                    # Users who are not superusers will only be able to drop
                    # the databases they own
                    if not pg_superuser:

                        self.connecter.cursor.execute(Queries.GET_PG_DB_OWNER,
                                                      (dbname, ))
                        db = self.connecter.cursor.fetchone()

                        if db['owner'] != self.connecter.user:

                            msg = Msg.DROP_DB_NOT_ALLOWED.format(
                                user=self.connecter.user, dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                        else:
                            delete = True

                    else:
                        delete = True

                    if delete:

                        # Get the database's "datallowconn" value
                        datallowconn = self.connecter.get_datallowconn(dbname)

                        # If datallowconn is allowed, change it temporarily
                        if datallowconn:
                            # Disallow connections to the database during the
                            # process
                            result = self.connecter.disallow_db_conn(dbname)
                            if not result:
                                msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                        fmt_query_drop_db = Queries.DROP_PG_DB.format(
                            dbname=dbname)

                        start_time = DateTools.get_current_datetime()
                        # Drop the database
                        self.connecter.cursor.execute(fmt_query_drop_db)
                        end_time = DateTools.get_current_datetime()
                        # Get and show the process' duration
                        diff = DateTools.get_diff_datetimes(start_time,
                                                            end_time)
                        msg = Msg.DROP_DB_DONE.format(dbname=dbname, diff=diff)
                        self.logger.highlight('info', msg, 'green')

                        # If datallowconn was allowed, leave it as it was
                        if datallowconn:
                            # Allow connections to the database at the end of
                            # the process
                            result = self.connecter.allow_db_conn(dbname)
                            if not result:
                                msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                                    dbname=dbname)
                                self.logger.highlight('warning', msg, 'yellow')

                else:
                    msg = Msg.ACTIVE_CONNS_ERROR.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            else:
                msg = Msg.DB_DOES_NOT_EXIST.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        except Exception as e:
            self.logger.debug('Error en la función "drop_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.DROP_DB_FAIL.format(
                dbname=dbname), 'yellow')

    def drop_pg_dbs(self, dbnames):
        '''
        Target:
            - remove a list of databases in PostgreSQL.
        '''
        self.logger.highlight('info', Msg.BEGINNING_DROPPER, 'white')
        # Check if the role of user connected to PostgreSQL is superuser
        pg_superuser = self.connecter.is_pg_superuser()

        if dbnames:

            for dbname in self.dbnames:

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                self.drop_pg_db(dbname, pg_superuser)

        else:
            self.logger.highlight('warning', Msg.DROPPER_HAS_NOTHING_TO_DO,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.DROP_DBS_DONE, 'green',
                              effect='bold')
Ejemplo n.º 22
0
    networkProcess.start()

    # init local game
    localInputFeed = UserInputFeedLocal()

    # init client logic
    client_game_handler = ClientNetworkGameHandler(game)
    tick_simulator = TickSimulator(Game.fps)

    quit_the_game = False
    while not quit_the_game:
        # fetch network inputs and update game state
        if not networkProcess.output_queue.empty():
            payload = networkProcess.output_queue.get()
            Logger.debug(
                "fetch input_queue from main, from network process to screen",
                category='start_client')
            client_game_handler.on_line_received(payload)

        # fetch local input and update game state
        user_input = localInputFeed.fetch_user_input()
        client_game_handler.on_local_user_input(user_input)

        # redraw game
        tick_simulator.simulate(game, lambda self: self.tick())
        #game.tick()
        hmi.draw()

        if user_input.has_pressed_something():
            # send local user_inputs
            Logger.debug(
Ejemplo n.º 23
0
from logger.logger import Logger
import time

if __name__ == '__main__':
	tt = Logger().get_log
	tt.error("sdaaada3")
	tt.debug("sdetsdebugtest1")
	for i in range(1,20):
		tt.debug("asda %u" %(i))
		time.sleep(5.0)
Ejemplo n.º 24
0
class RestorerCluster:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    cluster_backup = ''  # Absolute path of the backup file (of a cluster)

    def __init__(self, connecter=None, cluster_backup='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if cluster_backup and os.path.isfile(cluster_backup):
            self.cluster_backup = cluster_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        message = Messenger.CL_RESTORER_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            cluster_backup=self.cluster_backup)
        self.logger.debug(Messenger.CL_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_cluster_backup(self):
        '''
        Target:
            - restore a cluster's backup in PostgreSQL. The cluster must have
              been created before this process.
        '''
        # Regular expression which must match the backup's name
        regex = r'.*ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.cluster_backup):
            # Store the parts of the backup's name (servername, date, ext)
            parts = regex.search(self.cluster_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            Messenger.NO_BACKUP_FORMAT

        message = Messenger.BEGINNING_CL_RESTORER.format(
            cluster_backup=self.cluster_backup)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        # TODO: make dissappear every line about the operation shown in console
        if ext == 'gz':
            command = 'gunzip -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | psql postgres -U {} -h {} ' \
                      '-p {}'.format(
                          self.cluster_backup, self.connecter.user,
                          self.connecter.server, self.connecter.port)
        elif ext == 'zip':
            command = 'unzip -p {} | psql postgres -U {} -h {} -p {}'.format(
                self.cluster_backup, self.connecter.user,
                self.connecter.server, self.connecter.port)
        else:
            command = 'psql postgres -U {} -h {} -p {} < {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.cluster_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the cluster
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_CL_DONE.format(
                cluster_backup=self.cluster_backup, diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info',
                                  Messenger.RESTORER_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_cluster_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_CL_FAIL.format(
                cluster_backup=self.cluster_backup)
            self.logger.stop_exe(message)
Ejemplo n.º 25
0
 def stop(self):
     Logger.debug('stop command received from network process', category='client')
     self.input_queue.put('quit')
     multiprocessing.Process.terminate(self)
Ejemplo n.º 26
0
class Terminator:

    target_all = None  # Flag which determinates if terminate any connection
    target_user = None  # Terminate any connection of an specific user
    target_dbs = []  # Terminate any connection to a list of databases
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter, target_all=False, target_user='',
                 target_dbs=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if target_all is None:
            self.target_all = target_all
        elif isinstance(target_all, bool):
            self.target_all = target_all
        elif Checker.str_is_bool(target_all):
            self.target_all = Casting.str_to_bool(target_all)
        else:
            self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)

        self.target_user = target_user

        if target_dbs is None:
            self.target_dbs = []
        elif isinstance(target_dbs, list):
            self.target_dbs = target_dbs
        else:
            self.target_dbs = Casting.str_to_list(target_dbs)

        message = Messenger.TERMINATOR_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, target_all=self.target_all,
            target_user=target_user, target_dbs=self.target_dbs)
        self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
        self.logger.debug(message)

    def terminate_backend_user(self):
        '''
        Target:
            - terminate every connection of a specific user to PostgreSQL (as
              long as the target user is the one who is running the program).
        '''
        message = Messenger.BEGINNING_TERMINATE_USER_CONN.format(
            target_user=self.target_user)
        self.logger.highlight('info', message, 'white')

        try:
            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            sql = Queries.GET_CURRENT_PG_USER
            self.connecter.cursor.execute(sql)
            current_pg_user = self.connecter.cursor.fetchone()[0]

            if self.target_user == current_pg_user:
                message = Messenger.TARGET_USER_IS_CURRENT_USER.format(
                    target_user=self.target_user)
                self.logger.highlight('warning', message, 'yellow')

            else:
                formatted_sql = Queries.BACKEND_PG_USER_EXISTS.format(
                    pg_pid=pg_pid, target_user=self.target_user)
                self.connecter.cursor.execute(formatted_sql)
                result = self.connecter.cursor.fetchone()

                if result:
                    formatted_sql = Queries.TERMINATE_BACKEND_PG_USER.format(
                        pg_pid=pg_pid, target_user=self.target_user)
                    self.connecter.cursor.execute(formatted_sql)
                else:
                    message = Messenger.NO_USER_CONNS.format(
                        target_user=self.target_user)
                    self.logger.info(message)

            message = Messenger.TERMINATE_USER_CONN_DONE.format(
                target_user=self.target_user)
            self.logger.highlight('info', message, 'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_user": '******'{}.'.format(str(e)))
            message = Messenger.TERMINATE_USER_CONN_FAIL.format(
                target_user=self.target_user)
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_db(self, target_db):
        '''
        Target:
            - terminate every connection to a PostgreSQL database (except the
              current one, if it is connected to the target database).
        '''
        try:
            # The variable "target_db" sometimes could be a string or a list
            # of list, so it is necessary to check it first
            if not isinstance(target_db, str):
                target_db = target_db['datname']

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=target_db)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:

                formatted_sql = Queries.TERMINATE_BACKEND_PG_DB.format(
                    pg_pid=pg_pid, target_db=target_db)

                self.connecter.cursor.execute(formatted_sql)

                message = Messenger.TERMINATE_DB_CONN_DONE.format(
                    target_dbname=target_db)
                self.logger.info(message)

            else:
                message = Messenger.NO_DB_CONNS.format(target_db=target_db)
                self.logger.info(message)

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_db": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_DB_CONN_FAIL.format(
                target_dbname=target_db)
            self.logger.highlight('warning', message, 'yellow')

    def terminate_backend_dbs(self, ter_list):
        '''
        Target:
            - terminate every connection to some PostgreSQL databases (except
              the current one, if it is connected to one of the target
              databases).
        Parameters:
            - ter_list: the list of databases whose connections are going to be
              terminated.
        '''
        message = Messenger.BEGINNING_TERMINATE_DBS_CONN
        self.logger.highlight('info', message, 'white')

        if ter_list:
            for target_db in ter_list:
                self.terminate_backend_db(target_db)
        else:
            self.logger.highlight('warning',
                                  Messenger.TERMINATOR_HAS_NOTHING_TO_DO,
                                  'yellow')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')

    def terminate_backend_all(self):
        '''
        Target:
            - remove every connection to PostgreSQL (except the current one).
        '''
        try:
            message = Messenger.BEGINNING_TERMINATE_ALL_CONN
            self.logger.highlight('info', message, 'white')

            pg_pid = self.connecter.get_pid_str()  # Get PID variable's name

            formatted_sql = Queries.BACKEND_PG_ALL_EXISTS.format(pg_pid=pg_pid)

            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                formatted_sql = Queries.TERMINATE_BACKEND_PG_ALL.format(
                    pg_pid=pg_pid)
                self.connecter.cursor.execute(formatted_sql)
            else:
                self.logger.info(Messenger.NO_CONNS)

            self.logger.highlight('info', Messenger.TERMINATE_ALL_CONN_DONE,
                                  'green')

        except Exception as e:
            self.logger.debug('Error en la función "terminate_backend_all": '
                              '{}.'.format(str(e)))
            message = Messenger.TERMINATE_ALL_CONN_FAIL
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TERMINATOR_DONE, 'green')
Ejemplo n.º 27
0
class Replicator:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    new_dbname = ''  # Name of the copy
    original_dbname = ''  # Name of the original database

    def __init__(self, connecter=None, new_dbname='', original_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # First check whether the name of the copy already exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
        # Do not replicate if the name already exists
        result = self.connecter.cursor.fetchone()
        if result:
            msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
            self.logger.stop_exe(msg)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Msg.NO_NEW_DBNAME)

        # First check whether the name of the source exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
                                      (original_dbname, ))
        result = self.connecter.cursor.fetchone()
        if not result:
            msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
            self.logger.stop_exe(msg)

        if original_dbname:
            self.original_dbname = original_dbname
        else:
            self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)

        msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
                                         user=self.connecter.user,
                                         port=self.connecter.port,
                                         original_dbname=self.original_dbname,
                                         new_dbname=self.new_dbname)
        self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
        self.logger.debug(msg)

    def replicate_pg_db(self):
        '''
        Target:
            - clone a specified database in PostgreSQL.
        '''
        try:
            pg_pid = self.connecter.get_pid_str()
            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=self.original_dbname)
            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                msg = Msg.ACTIVE_CONNS_ERROR.format(
                    dbname=self.original_dbname)
                self.logger.stop_exe(msg)

            formatted_query_clone_pg_db = Queries.CLONE_PG_DB.format(
                dbname=self.new_dbname, original_dbname=self.original_dbname,
                user=self.connecter.user)

            msg = Msg.BEGINNING_REPLICATOR.format(
                original_dbname=self.original_dbname)
            self.logger.highlight('info', msg, 'white')

            # Get the database's "datallowconn" value
            datallowconn = self.connecter.get_datallowconn(
                self.original_dbname)

            # If datallowconn is allowed, change it temporarily
            if datallowconn:
                # Disallow connections to the database during the
                # process
                result = self.connecter.disallow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            # self.connecter.cursor.execute('commit')
            start_time = DateTools.get_current_datetime()
            # Replicate the database
            self.connecter.cursor.execute(formatted_query_clone_pg_db)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            # If datallowconn was allowed, leave it as it was
            if datallowconn:
                # Allow connections to the database at the end of
                # the process
                result = self.connecter.allow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            msg = Msg.REPLICATE_DB_DONE.format(
                new_dbname=self.new_dbname,
                original_dbname=self.original_dbname, diff=diff)
            self.logger.highlight('info', msg, 'green')
            self.logger.highlight('info', Msg.REPLICATOR_DONE, 'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "clone_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.REPLICATE_DB_FAIL)
Ejemplo n.º 28
0
class Restorer:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    db_backup = ''  # Absolute path of the backup file (of a database)
    new_dbname = ''  # New name for the database restored in PostgreSQL

    def __init__(self,
                 connecter=None,
                 db_backup='',
                 new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)

    def restore_db_backup(self):
        '''
        Target:
            - restore a database's backup in PostgreSQL.
        '''
        replicator = Replicator(self.connecter, self.new_dbname,
                                Default.RESTORING_TEMPLATE, self.logger)
        result = self.connecter.allow_db_conn(Default.RESTORING_TEMPLATE)
        if result:
            replicator.replicate_pg_db()
            self.connecter.disallow_db_conn(Default.RESTORING_TEMPLATE)
        else:
            self.logger.stop_exe(
                Messenger.ALLOW_DB_CONN_FAIL.format(
                    dbname=Default.RESTORING_TEMPLATE))

        # Regular expression which must match the backup's name
        regex = r'.*db_(.+)_(\d{8}_\d{6}_.+)\.(dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        if re.match(regex, self.db_backup):
            # Store the parts of the backup's name (name, date, ext)
            parts = regex.search(self.db_backup).groups()
            # Store only the extension to know the type of file
            ext = parts[2]
        else:
            self.logger.stop_exe(Messenger.NO_BACKUP_FORMAT)

        message = Messenger.BEGINNING_DB_RESTORER.format(
            db_backup=self.db_backup, new_dbname=self.new_dbname)
        self.logger.highlight('info', message, 'white')
        self.logger.info(Messenger.WAIT_PLEASE)

        if ext == 'gz':
            command = 'gunzip -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'bz2':
            command = 'bunzip2 -c {} -k | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        elif ext == 'zip':
            command = 'unzip -p {} | pg_restore -U {} -h {} -p {} ' \
                      '-d {}'.format(self.db_backup, self.connecter.user,
                                     self.connecter.server,
                                     self.connecter.port, self.new_dbname)
        else:
            command = 'pg_restore -U {} -h {} -p {} -d {} {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, self.new_dbname, self.db_backup)

        try:
            start_time = DateTools.get_current_datetime()
            # Make the restauration of the database
            result = subprocess.call(command, shell=True)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            if result != 0:
                raise Exception()

            message = Messenger.RESTORE_DB_DONE.format(
                db_backup=self.db_backup,
                new_dbname=self.new_dbname,
                diff=diff)
            self.logger.highlight('info', message, 'green')

            self.logger.highlight('info',
                                  Messenger.RESTORER_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "restore_db_backup": '
                              '{}.'.format(str(e)))
            message = Messenger.RESTORE_DB_FAIL.format(
                db_backup=self.db_backup, new_dbname=self.new_dbname)
            self.logger.stop_exe(message)
Ejemplo n.º 29
0
class Backer:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    # Flag which determinates whether the templates must be included
    ex_templates = True
    # Flag which determinates whether the included databases must be vacuumed
    # before the backup process
    vacuum = True
    # Use other PostgreSQL user during the backup process (only for superusers)
    db_owner = ''
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 bkp_path='',
                 group='',
                 bkp_type='dump',
                 prefix='',
                 in_dbs=[],
                 in_regex='',
                 in_priority=False,
                 ex_dbs=['postgres'],
                 ex_regex='',
                 ex_templates=True,
                 vacuum=True,
                 db_owner='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Msg.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Msg.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Msg.INVALID_EX_REGEX)

        if isinstance(ex_templates, bool):
            self.ex_templates = ex_templates
        elif Checker.str_is_bool(ex_templates):
            self.ex_templates = Casting.str_to_bool(ex_templates)
        else:
            self.logger.stop_exe(Msg.INVALID_EX_TEMPLATES)

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        if db_owner is None:
            self.db_owner = db_owner
        else:
            self.db_owner = Default.DB_OWNER

        msg = Msg.DB_BACKER_VARS.format(server=self.connecter.server,
                                        user=self.connecter.user,
                                        port=self.connecter.port,
                                        bkp_path=self.bkp_path,
                                        group=self.group,
                                        bkp_type=self.bkp_type,
                                        prefix=self.prefix,
                                        in_dbs=self.in_dbs,
                                        in_regex=self.in_regex,
                                        in_priority=self.in_priority,
                                        ex_dbs=self.ex_dbs,
                                        ex_regex=self.ex_regex,
                                        ex_templates=self.ex_templates,
                                        vacuum=self.vacuum,
                                        db_owner=self.db_owner)
        self.logger.debug(Msg.DB_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_db(self, dbname, bkps_dir):
        '''
        Target:
            - make a backup of a specified database.
        Parameters:
            - dbname: name of the database which is going to be backuped.
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)
        # Set backup's name
        file_name = self.prefix + 'db_' + dbname + '_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | gzip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | bzip2 > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} | zip > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dump {} -Fc -U {} -h {} -p {} > {}'.format(
                dbname, self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)

        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_db": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_dbs(self, dbs_all):
        '''
        Target:
            - make a backup of some specified databases.
        Parameters:
            - dbs_all: names of the databases which are going to be backuped.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.DB_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        self.logger.highlight('info', Msg.PROCESSING_DB_BACKER, 'white')

        if dbs_all:
            for db in dbs_all:

                dbname = db['datname']
                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                # Let the user know whether the database connection is allowed
                if not db['datallowconn']:
                    msg = Msg.FORBIDDEN_DB_CONNECTION.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
                    success = False

                else:
                    # Vaccum the database before the backup process if
                    # necessary
                    if self.vacuum:
                        self.logger.info(
                            Msg.PRE_VACUUMING_DB.format(dbname=dbname))
                        vacuumer = Vacuumer(self.connecter, self.in_dbs,
                                            self.in_regex, self.in_priority,
                                            self.ex_dbs, self.ex_regex,
                                            self.ex_templates, self.db_owner,
                                            self.logger)

                        # Vacuum the database
                        success = vacuumer.vacuum_db(dbname)
                        if success:
                            msg = Msg.PRE_VACUUMING_DB_DONE.format(
                                dbname=dbname)
                            self.logger.info(msg)
                        else:
                            msg = Msg.PRE_VACUUMING_DB_FAIL.format(
                                dbname=dbname)
                            self.logger.highlight('warning', msg, 'yellow')

                    self.logger.info(
                        Msg.BEGINNING_DB_BACKER.format(dbname=dbname))

                    start_time = DateTools.get_current_datetime()
                    # Make the backup of the database
                    success = self.backup_db(dbname, bkps_dir)
                    end_time = DateTools.get_current_datetime()
                    # Get and show the process' duration
                    diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_BACKER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_BACKER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.BACKER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green', effect='bold')
Ejemplo n.º 30
0
 def send_broadcast_payload_except_self(self, payload):
     payload = json.dumps(payload)
     Logger.debug('broadcasting except self: %s', payload, 'server')
     for user_id, connection in self.connections.iteritems():
         if connection is not self:
             connection.sendLine(payload)
Ejemplo n.º 31
0
class CfgParser:

    logger = None  # Logger to show and log some messages
    cfg = None  # Parser which stores the variables of the config file
    conn_vars = {}  # Dictionary to store the loaded connection variables
    bkp_vars = {}  # Dictionary to store the loaded backup variables
    kill_vars = {}  # Dictionary to store the loaded terminator variables
    mail_vars = {}  # Dictionary to store the loaded logger variables

    def __init__(self, logger):
        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

    def load_cfg(self, cfg_file):
        '''
        Target:
            - create a parser and read a config file.
        Parameters:
            - cfg_file: the config file to be readed.
        '''
        try:  # Probar si hay excepciones en...
            self.cfg = configparser.ConfigParser()  # Crear un Parser
            if os.path.exists(cfg_file):
                self.cfg.read(cfg_file)  # Parsear el archivo .cfg
            else:
                raise Exception()
        except Exception as e:  # Si salta una excepción...
            self.logger.debug('Error en la función "load_cfg": {}.'.format(
                str(e)))
            self.logger.stop_exe(Messenger.INVALID_CFG_PATH)

    def parse_connecter(self):
        '''
        Target:
            - get the connecter variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.conn_vars = {
                'server': self.cfg.get('postgres', 'server').strip(),
                'user': self.cfg.get('postgres', 'username').strip(),
                'port': self.cfg.get('postgres', 'port'),
            }
        except Exception as e:
            self.logger.debug('Error en la función "parse_connecter": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.CONNECTER_CFG_DAMAGED)

    def parse_mailer(self):
        '''
        Target:
            - get the mailer variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.mail_vars = {
                'name': self.cfg.get('from', 'name').strip(),
                'address': self.cfg.get('from', 'address').strip(),
                'password': self.cfg.get('from', 'password').strip(),
                'to': self.cfg.get('to', 'to').strip(),
                'cc': self.cfg.get('to', 'cc').strip(),
                'bcc': self.cfg.get('to', 'bcc').strip(),
                'level': self.cfg.get('settings', 'level').strip(),
                'server_tag': self.cfg.get('settings', 'server_tag').strip(),
                'external_ip': self.cfg.get('settings', 'external_ip').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_mailer": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.MAILER_CFG_DAMAGED)

    def parse_alterer(self):
        '''
        Target:
            - get the alterer variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.bkp_vars = {
                'in_dbs': self.cfg.get('settings', 'in_dbs'),
                'old_role': self.cfg.get('settings', 'old_role'),
                'new_role': self.cfg.get('settings', 'new_role'),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_alterer": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.ALTERER_CFG_DAMAGED)

    def parse_backer(self):
        '''
        Target:
            - get the backer variables from a configuration file and store them
              in a dictionary (database case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('dir', 'bkp_path').strip(),
                'group': self.cfg.get('dir', 'group').strip(),
                'bkp_type': self.cfg.get('file', 'bkp_type').strip(),
                'prefix': self.cfg.get('file', 'prefix').strip(),
                'in_dbs': self.cfg.get('includes', 'in_dbs'),
                'in_regex': self.cfg.get('includes', 'in_regex').strip(),
                'in_priority': self.cfg.get('includes', 'in_priority').strip(),
                'ex_dbs': self.cfg.get('excludes', 'ex_dbs'),
                'ex_regex': self.cfg.get('excludes', 'ex_regex').strip(),
                'ex_templates': self.cfg.get('excludes',
                                             'ex_templates').strip(),
                'vacuum': self.cfg.get('other', 'vacuum').strip(),
                'db_owner': self.cfg.get('other', 'db_owner').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_backer": {}.'.format(
                str(e)))
            self.logger.stop_exe(Messenger.DB_BACKER_CFG_DAMAGED)

    def parse_backer_cluster(self):
        '''
        Target:
            - get the backer variables from a configuration file and store them
              in a dictionary (cluster case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('dir', 'bkp_path').strip(),
                'group': self.cfg.get('dir', 'group').strip(),
                'bkp_type': self.cfg.get('file', 'bkp_type').strip(),
                'prefix': self.cfg.get('file', 'prefix').strip(),
                'vacuum': self.cfg.get('other', 'vacuum').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_backer_cluster": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.CL_BACKER_CFG_DAMAGED)

    def parse_dropper(self):
        '''
        Target:
            - get the dropper variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.bkp_vars = {
                'in_dbs': self.cfg.get('includes', 'in_dbs'),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_dropper": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.DROPPER_CFG_DAMAGED)

    def parse_replicator(self):
        '''
        Target:
            - get the replicator variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.bkp_vars = {
                'new_dbname': self.cfg.get('settings', 'new_dbname'),
                'original_dbname': self.cfg.get('settings', 'original_dbname'),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_replicator": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.REPLICATOR_CFG_DAMAGED)

    def parse_restorer(self):
        '''
        Target:
            - get the restorer variables from a configuration file and store
              them in a dictionary (database case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('settings', 'bkp_path'),
                'new_dbname': self.cfg.get('settings', 'new_dbname'),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_restorer": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.DB_RESTORER_CFG_DAMAGED)

    def parse_restorer_cluster(self):
        '''
        Target:
            - get the restorer variables from a configuration file and store
              them in a dictionary (cluster case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('settings', 'bkp_path'),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_restorer_cluster": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.CL_RESTORER_CFG_DAMAGED)

    def parse_scheduler(self):
        '''
        Target:
            - get the scheduler variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.bkp_vars = {
                'time': self.cfg.get('settings', 'time').strip(),
                'command': self.cfg.get('settings', 'command').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_scheduler": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.SCHEDULER_CFG_DAMAGED)

    def parse_terminator(self):
        '''
        Target:
            - get the terminator variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.kill_vars = {
                'kill_all': self.cfg.get('settings', 'kill_all').strip(),
                'kill_user': self.cfg.get('settings', 'kill_user').strip(),
                'kill_dbs': self.cfg.get('settings', 'kill_dbs').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_terminator": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.TERMINATOR_CFG_DAMAGED)

    def parse_trimmer(self):
        '''
        Target:
            - get the trimmer variables from a configuration file and store
              them in a dictionary (database case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('dir', 'bkp_path').strip(),
                'prefix': self.cfg.get('file', 'prefix').strip(),
                'in_dbs': self.cfg.get('includes', 'in_dbs'),
                'in_regex': self.cfg.get('includes', 'in_regex').strip(),
                'in_priority': self.cfg.get('includes', 'in_priority').strip(),
                'ex_dbs': self.cfg.get('excludes', 'ex_dbs'),
                'ex_regex': self.cfg.get('excludes', 'ex_regex').strip(),
                'min_n_bkps': self.cfg.get('conditions', 'min_n_bkps').strip(),
                'exp_days': self.cfg.get('conditions', 'exp_days').strip(),
                'max_size': self.cfg.get('conditions', 'max_size').strip(),
                'pg_warnings': self.cfg.get('other', 'pg_warnings').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_trimmer": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.DB_TRIMMER_CFG_DAMAGED)

    def parse_trimmer_cluster(self):
        '''
        Target:
            - get the trimmer variables from a configuration file and store
              them in a dictionary (cluster case).
        '''
        try:
            self.bkp_vars = {
                'bkp_path': self.cfg.get('dir', 'bkp_path').strip(),
                'prefix': self.cfg.get('file', 'prefix').strip(),
                'min_n_bkps': self.cfg.get('conditions', 'min_n_bkps').strip(),
                'exp_days': self.cfg.get('conditions', 'exp_days').strip(),
                'max_size': self.cfg.get('conditions', 'max_size').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_trimmer_cluster": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.CL_TRIMMER_CFG_DAMAGED)

    def parse_vacuumer(self):
        '''
        Target:
            - get the vacuumer variables from a configuration file and store
              them in a dictionary.
        '''
        try:
            self.bkp_vars = {
                'in_dbs': self.cfg.get('includes', 'in_dbs'),
                'in_regex': self.cfg.get('includes', 'in_regex').strip(),
                'in_priority': self.cfg.get('includes', 'in_priority').strip(),
                'ex_dbs': self.cfg.get('excludes', 'ex_dbs'),
                'ex_regex': self.cfg.get('excludes', 'ex_regex').strip(),
                'ex_templates': self.cfg.get('excludes',
                                             'ex_templates').strip(),
                'db_owner': self.cfg.get('other', 'db_owner').strip(),
            }

        except Exception as e:
            self.logger.debug('Error en la función "parse_vacuumer": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Messenger.VACUUMER_CFG_DAMAGED)
Ejemplo n.º 32
0
 def send_payload(self, payload):
     payload = json.dumps(payload)
     Logger.debug('send payload to self only: %s', payload, 'server')
     self.sendLine(payload)
Ejemplo n.º 33
0
class Vacuumer:

    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    # Flag which determinates whether the templates must be included
    ex_templates = True
    # Use other PostgreSQL user during the backup process (only for superusers)
    db_owner = ''
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, in_dbs=[], in_regex='',
                 in_priority=False, ex_dbs=['postgres'], ex_regex='',
                 ex_templates=True, db_owner='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_REGEX)

        if isinstance(ex_templates, bool):
            self.ex_templates = ex_templates
        elif Checker.str_is_bool(ex_templates):
            self.ex_templates = Casting.str_to_bool(ex_templates)
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_TEMPLATES)

        if db_owner is None:
            self.db_owner = Default.DB_OWNER
        else:
            self.db_owner = db_owner

        message = Messenger.VACUUMER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, in_dbs=self.in_dbs,
            in_regex=self.in_regex, in_priority=self.in_priority,
            ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
            ex_templates=self.ex_templates, db_owner=self.db_owner)
        self.logger.debug(Messenger.VACUUMER_VARS_INTRO)
        self.logger.debug(message)

    def vacuum_db(self, dbname):
        '''
        Target:
            - vacuum a PostgreSQL database.
        Parameters:
            - dbname: name of the database which is going to be vacuumed.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True

        # Store the command to do
        command = 'vacuumdb {} -U {} -h {} -p {}'.format(
            dbname, self.connecter.user, self.connecter.server,
            self.connecter.port)

        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()
        except Exception as e:
            self.logger.debug('Error en la función "vacuum_db": {}.'.format(
                str(e)))
            success = False
        return success

    def vacuum_dbs(self, vacuum_list):
        '''
        Target:
            - vacuum a group of PostgreSQL databases.
        Parameters:
            - vacuum_list: names of the databases which are going to be
              vacuumed.
        '''
        if vacuum_list:
            self.logger.highlight('info', Messenger.BEGINNING_VACUUMER,
                                  'white')

        for db in vacuum_list:

            dbname = db['datname']

            message = Messenger.PROCESSING_DB.format(dbname=dbname)
            self.logger.highlight('info', message, 'cyan')

            # Let the user know whether the database connection is allowed
            if not db['datallowconn']:
                message = Messenger.FORBIDDEN_DB_CONNECTION.format(
                    dbname=dbname)
                self.logger.highlight('warning', message, 'yellow',
                                      effect='bold')
                success = False
            else:
                start_time = DateTools.get_current_datetime()
                # Vacuum the database
                success = self.vacuum_db(dbname)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

            if success:
                message = Messenger.DB_VACUUMER_DONE.format(dbname=dbname,
                                                            diff=diff)
                self.logger.highlight('info', message, 'green')

            else:
                message = Messenger.DB_VACUUMER_FAIL.format(dbname=dbname)
                self.logger.highlight('warning', message, 'yellow',
                                      effect='bold')

        self.logger.highlight('info', Messenger.VACUUMER_DONE, 'green',
                              effect='bold')
Ejemplo n.º 34
0
			time.sleep(0.5)
		# Collect Results
		self._collectIgnitionDelay()
		return True

	def getFitness(self):
		"""
		Call this after calling run.
		This will return a fitness value based on the temperatures computed.
		"""
		score = 0.0
		for value in self.results:
			score += abs(value[0] - value[1])
		return score

if __name__ == "__main__":
	"""
	Some test code
	"""
	import parameters
	l = Logger()
	l.setLogLevel('debug')
	runner = KivaRunner('../work', '../log', '../ext/Detalierte Mechanismus.csv', parameters.parameter_format, l)
	l.debug("Default Parameter Set: %s" % runner.getParameters())
	runner.run(10)
	l.debug("Fitness: %s" % runner.getFitness())
	runner.setParameters([0.01])
	l.debug("New Parameter Set: %s" % runner.getParameters())
	runner.run(10)
	l.debug("Fitness: %s" % runner.getFitness())
Ejemplo n.º 35
0
class Replicator:

    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages
    new_dbname = ''  # Name of the copy
    original_dbname = ''  # Name of the original database

    def __init__(self,
                 connecter=None,
                 new_dbname='',
                 original_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # First check whether the name of the copy already exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
        # Do not replicate if the name already exists
        result = self.connecter.cursor.fetchone()
        if result:
            msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
            self.logger.stop_exe(msg)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Msg.NO_NEW_DBNAME)

        # First check whether the name of the source exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
                                      (original_dbname, ))
        result = self.connecter.cursor.fetchone()
        if not result:
            msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
            self.logger.stop_exe(msg)

        if original_dbname:
            self.original_dbname = original_dbname
        else:
            self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)

        msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
                                         user=self.connecter.user,
                                         port=self.connecter.port,
                                         original_dbname=self.original_dbname,
                                         new_dbname=self.new_dbname)
        self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
        self.logger.debug(msg)

    def replicate_pg_db(self):
        '''
        Target:
            - clone a specified database in PostgreSQL.
        '''
        try:
            pg_pid = self.connecter.get_pid_str()
            formatted_sql = Queries.BACKEND_PG_DB_EXISTS.format(
                pg_pid=pg_pid, target_db=self.original_dbname)
            self.connecter.cursor.execute(formatted_sql)
            result = self.connecter.cursor.fetchone()

            if result:
                msg = Msg.ACTIVE_CONNS_ERROR.format(
                    dbname=self.original_dbname)
                self.logger.stop_exe(msg)

            formatted_query_clone_pg_db = Queries.CLONE_PG_DB.format(
                dbname=self.new_dbname,
                original_dbname=self.original_dbname,
                user=self.connecter.user)

            msg = Msg.BEGINNING_REPLICATOR.format(
                original_dbname=self.original_dbname)
            self.logger.highlight('info', msg, 'white')

            # Get the database's "datallowconn" value
            datallowconn = self.connecter.get_datallowconn(
                self.original_dbname)

            # If datallowconn is allowed, change it temporarily
            if datallowconn:
                # Disallow connections to the database during the
                # process
                result = self.connecter.disallow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            # self.connecter.cursor.execute('commit')
            start_time = DateTools.get_current_datetime()
            # Replicate the database
            self.connecter.cursor.execute(formatted_query_clone_pg_db)
            end_time = DateTools.get_current_datetime()
            # Get and show the process' duration
            diff = DateTools.get_diff_datetimes(start_time, end_time)

            # If datallowconn was allowed, leave it as it was
            if datallowconn:
                # Allow connections to the database at the end of
                # the process
                result = self.connecter.allow_db_conn(self.original_dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(
                        dbname=self.original_dbname)
                    self.logger.highlight('warning', msg, 'yellow')

            msg = Msg.REPLICATE_DB_DONE.format(
                new_dbname=self.new_dbname,
                original_dbname=self.original_dbname,
                diff=diff)
            self.logger.highlight('info', msg, 'green')
            self.logger.highlight('info',
                                  Msg.REPLICATOR_DONE,
                                  'green',
                                  effect='bold')

        except Exception as e:
            self.logger.debug('Error en la función "clone_pg_db": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.REPLICATE_DB_FAIL)
Ejemplo n.º 36
0
class Alterer:

    in_dbs = []  # List of databases to be included in the process
    old_role = ''  # Current owner of the database's tables
    new_role = ''  # New owner for the database and its tables
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, connecter=None, in_dbs=[], old_role='', new_role='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if old_role:
            self.old_role = old_role
        else:
            self.logger.stop_exe(Msg.NO_OLD_ROLE)

        if not new_role:
            self.logger.stop_exe(Msg.NO_NEW_ROLE)
        # First check whether the user exists in PostgreSQL or not
        self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
        # Do not alter database if the user does not exist
        result = self.connecter.cursor.fetchone()
        if result:
            self.new_role = new_role
        else:
            msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
            self.logger.stop_exe(msg)

        msg = Msg.ALTERER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, in_dbs=self.in_dbs,
            old_role=self.old_role, new_role=self.new_role)
        self.logger.debug(Msg.ALTERER_VARS_INTRO)
        self.logger.debug(msg)

    def alter_db_owner(self, db):
        '''
        Target:
            - change the owner of a databases and its tables.
        Parameters:
            - db: database which is going to be altered.
        Return:
            - a boolean which indicates the success of the process.
        '''
        msg = Msg.ALTERER_FEEDBACK.format(old_role=self.old_role,
                                          new_role=self.new_role)
        self.logger.info(msg)

        success = True
        dbname = db['datname']

        if db['owner'] != 'postgres':  # Do not allow switch an owner postgres

            if db['datallowconn'] == 1:  # Check if the db allows connections

                try:
                    # Change the owner of the database
                    self.connecter.cursor.execute(
                        Queries.CHANGE_PG_DB_OWNER.format(
                            dbname=dbname, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.CHANGE_PG_DB_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Start another connection to the target database to be able to
                # apply the next query
                own_connecter = Connecter(server=self.connecter.server,
                                          user=self.connecter.user,
                                          port=self.connecter.port,
                                          database=dbname, logger=self.logger)

                # Disallow connections to the database during the process
                result = self.connecter.disallow_db_conn(dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                try:
                    # Change the owner of the database's tables
                    own_connecter.cursor.execute(
                        Queries.REASSIGN_PG_DB_TBLS_OWNER.format(
                            old_role=self.old_role, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.REASSIGN_PG_DB_TBLS_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Allow connections to the database at the end of the process
                result = self.connecter.allow_db_conn(dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                # Close cursor and connection to the target database
                own_connecter.pg_disconnect()

            else:
                success = False
                msg = Msg.DB_DOES_NOT_ALLOW_CONN.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        else:
            success = False
            msg = Msg.DB_OWNED_BY_POSTGRES_NOT_ALLOWED
            self.logger.highlight('warning', msg, 'yellow')

        return success

    def alter_dbs_owner(self, alt_list):
        '''
        Target:
            - change the owner of a group of databases and their tables.
        Parameters:
            - alt_list: names of the databases which are going to be altered.
        '''
        self.logger.highlight('info', Msg.PROCESSING_ALTERER, 'white')

        if alt_list:

            for db in alt_list:

                dbname = db['datname']

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                start_time = DateTools.get_current_datetime()
                # Change the owner of the database
                success = self.alter_db_owner(db)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_ALTERER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_ALTERER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning', Msg.ALTERER_HAS_NOTHING_TO_DO,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Msg.ALTERER_DONE, 'green', effect='bold')
Ejemplo n.º 37
0
class Alterer:

    in_dbs = []  # List of databases to be included in the process
    old_role = ''  # Current owner of the database's tables
    new_role = ''  # New owner for the database and its tables
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 in_dbs=[],
                 old_role='',
                 new_role='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if old_role:
            self.old_role = old_role
        else:
            self.logger.stop_exe(Msg.NO_OLD_ROLE)

        if not new_role:
            self.logger.stop_exe(Msg.NO_NEW_ROLE)
        # First check whether the user exists in PostgreSQL or not
        self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
        # Do not alter database if the user does not exist
        result = self.connecter.cursor.fetchone()
        if result:
            self.new_role = new_role
        else:
            msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
            self.logger.stop_exe(msg)

        msg = Msg.ALTERER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      in_dbs=self.in_dbs,
                                      old_role=self.old_role,
                                      new_role=self.new_role)
        self.logger.debug(Msg.ALTERER_VARS_INTRO)
        self.logger.debug(msg)

    def alter_db_owner(self, db):
        '''
        Target:
            - change the owner of a databases and its tables.
        Parameters:
            - db: database which is going to be altered.
        Return:
            - a boolean which indicates the success of the process.
        '''
        msg = Msg.ALTERER_FEEDBACK.format(old_role=self.old_role,
                                          new_role=self.new_role)
        self.logger.info(msg)

        success = True
        dbname = db['datname']

        if db['owner'] != 'postgres':  # Do not allow switch an owner postgres

            if db['datallowconn'] == 1:  # Check if the db allows connections

                try:
                    # Change the owner of the database
                    self.connecter.cursor.execute(
                        Queries.CHANGE_PG_DB_OWNER.format(
                            dbname=dbname, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.CHANGE_PG_DB_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Start another connection to the target database to be able to
                # apply the next query
                own_connecter = Connecter(server=self.connecter.server,
                                          user=self.connecter.user,
                                          port=self.connecter.port,
                                          database=dbname,
                                          logger=self.logger)

                # Disallow connections to the database during the process
                result = self.connecter.disallow_db_conn(dbname)
                if not result:
                    msg = Msg.DISALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                try:
                    # Change the owner of the database's tables
                    own_connecter.cursor.execute(
                        Queries.REASSIGN_PG_DB_TBLS_OWNER.format(
                            old_role=self.old_role, new_role=self.new_role))

                except Exception as e:
                    success = False
                    self.logger.debug('Error en la función "alter_db_owner": '
                                      '{}'.format(str(e)))
                    msg = Msg.REASSIGN_PG_DB_TBLS_OWNER_FAIL
                    self.logger.highlight('warning', msg, 'yellow')

                # Allow connections to the database at the end of the process
                result = self.connecter.allow_db_conn(dbname)
                if not result:
                    msg = Msg.ALLOW_CONN_TO_PG_DB_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning', msg, 'yellow')

                # Close cursor and connection to the target database
                own_connecter.pg_disconnect()

            else:
                success = False
                msg = Msg.DB_DOES_NOT_ALLOW_CONN.format(dbname=dbname)
                self.logger.highlight('warning', msg, 'yellow')

        else:
            success = False
            msg = Msg.DB_OWNED_BY_POSTGRES_NOT_ALLOWED
            self.logger.highlight('warning', msg, 'yellow')

        return success

    def alter_dbs_owner(self, alt_list):
        '''
        Target:
            - change the owner of a group of databases and their tables.
        Parameters:
            - alt_list: names of the databases which are going to be altered.
        '''
        self.logger.highlight('info', Msg.PROCESSING_ALTERER, 'white')

        if alt_list:

            for db in alt_list:

                dbname = db['datname']

                msg = Msg.PROCESSING_DB.format(dbname=dbname)
                self.logger.highlight('info', msg, 'cyan')

                start_time = DateTools.get_current_datetime()
                # Change the owner of the database
                success = self.alter_db_owner(db)
                end_time = DateTools.get_current_datetime()
                # Get and show the process' duration
                diff = DateTools.get_diff_datetimes(start_time, end_time)

                if success:
                    msg = Msg.DB_ALTERER_DONE.format(dbname=dbname, diff=diff)
                    self.logger.highlight('info', msg, 'green')
                else:
                    msg = Msg.DB_ALTERER_FAIL.format(dbname=dbname)
                    self.logger.highlight('warning',
                                          msg,
                                          'yellow',
                                          effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.ALTERER_HAS_NOTHING_TO_DO,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.ALTERER_DONE, 'green', effect='bold')
Ejemplo n.º 38
0
 def connectionLost(self, reason):
     Logger.debug('connection lost from network process', category='client')
Ejemplo n.º 39
0
class Trimmer:

    bkp_path = ''  # The path where the backups are stored
    prefix = ''  # The prefix of the backups' names
    in_dbs = []  # List of databases to be included in the process
    in_regex = ''  # Regular expression which must match the included databases
    # Flag which determinates whether inclusion conditions predominate over the
    # exclusion ones
    in_priority = False
    ex_dbs = []  # List of databases to be excluded in the process
    ex_regex = ''  # Regular expression which must match the excluded databases
    min_n_bkps = None  # Minimum number of a database's backups to keep
    exp_days = None  # Number of days which make a backup obsolete
    max_size = None  # Maximum size of a group of database's backups
    # Maximum size in Bytes of a group of database's backups
    max_size_bytes = None
    # Related to max_size, equivalence to turn the specified unit of measure in
    # the max_size variable into Bytes
    equivalence = 10 ** 6
    # Flag which determinates whether show alerts about PostgreSQL
    pg_warnings = True
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self, bkp_path='', prefix='', in_dbs=[], in_regex='',
                 in_priority=False, ex_dbs=[], ex_regex='', min_n_bkps=1,
                 exp_days=365, max_size='10000MB', pg_warnings=True,
                 connecter=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if bkp_path and os.path.isdir(bkp_path):
            self.bkp_path = bkp_path
        else:
            self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)

        if prefix is None:
            self.prefix = Default.PREFIX
        else:
            self.prefix = prefix

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if Checker.check_regex(in_regex):
            self.in_regex = in_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_REGEX)

        if isinstance(in_priority, bool):
            self.in_priority = in_priority
        elif Checker.str_is_bool(in_priority):
            self.in_priority = Casting.str_to_bool(in_priority)
        else:
            self.logger.stop_exe(Messenger.INVALID_IN_PRIORITY)

        if isinstance(ex_dbs, list):
            self.ex_dbs = ex_dbs
        else:
            self.ex_dbs = Casting.str_to_list(ex_dbs)

        if Checker.check_regex(ex_regex):
            self.ex_regex = ex_regex
        else:
            self.logger.stop_exe(Messenger.INVALID_EX_REGEX)

        if min_n_bkps is None:
            self.min_n_bkps = Default.MIN_N_BKPS
        elif isinstance(min_n_bkps, int):
            self.min_n_bkps = min_n_bkps
        elif Checker.str_is_int(min_n_bkps):
            self.min_n_bkps = Casting.str_to_int(min_n_bkps)
        else:
            self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)

        if exp_days is None:
            self.exp_days = Default.EXP_DAYS
        elif isinstance(exp_days, int) and exp_days >= -1:
            self.exp_days = exp_days
        elif Checker.str_is_valid_exp_days(exp_days):
            self.exp_days = Casting.str_to_int(exp_days)
        else:
            self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)

        if max_size is None:
            self.max_size = Default.MAX_SIZE
        elif Checker.str_is_valid_max_size(max_size):
            self.max_size = max_size
        else:
            self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)

        # Split a string with size and unit of measure into a dictionary
        self.max_size = Casting.str_to_max_size(self.max_size)
        # Get the equivalence in Bytes of the specified unit of measure
        self.equivalence = Casting.get_equivalence(self.max_size['unit'])
        # Get the specified size in Bytes
        self.max_size_bytes = self.max_size['size'] * self.equivalence

        if isinstance(pg_warnings, bool):
            self.pg_warnings = pg_warnings
        elif Checker.str_is_bool(pg_warnings):
            self.pg_warnings = Casting.str_to_bool(pg_warnings)
        else:
            self.logger.stop_exe(Messenger.INVALID_PG_WARNINGS)

        if self.pg_warnings:
            if connecter:
                self.connecter = connecter
            else:
                self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        message = Messenger.DB_TRIMMER_VARS.format(
            bkp_path=self.bkp_path, prefix=self.prefix, in_dbs=self.in_dbs,
            in_regex=self.in_regex, in_priority=self.in_priority,
            ex_dbs=self.ex_dbs, ex_regex=self.ex_regex,
            min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
            max_size=self.max_size, pg_warnings=self.pg_warnings)
        self.logger.debug(Messenger.DB_TRIMMER_VARS_INTRO)
        self.logger.debug(message)

    def trim_db(self, dbname, db_bkps_list):
        '''
        Target:
            - remove (if necessary) some database's backups, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - dbname: name of the database whose backups are going to be
              trimmed.
            - db_bkps_list: list of backups of a database to analyse and trim.
        '''
        if self.exp_days == -1:  # No expiration date
            x_days_ago = None
        else:
            x_days_ago = time.time() - (60 * 60 * 24 * self.exp_days)

        # Store the total number of backups of the database
        num_bkps = len(db_bkps_list)
        # Clone the list to avoid conflict errors when removing
        db_bkps_lt = db_bkps_list[:]

        unlinked = False

        message = Messenger.BEGINNING_DB_TRIMMER.format(dbname=dbname)
        self.logger.highlight('info', message, 'white')

        start_time = DateTools.get_current_datetime()

        for f in db_bkps_list:

            # Break if number of backups do not exceed the minimum
            if num_bkps <= self.min_n_bkps:
                break

            file_info = os.stat(f)

            # Obsolete backup
            if x_days_ago and file_info.st_ctime < x_days_ago:

                self.logger.info(Messenger.DELETING_OBSOLETE_BACKUP % f)
                os.unlink(f)  # Remove backup's file
                unlinked = True
                # Update the number of backups of the database
                num_bkps -= 1
                db_bkps_lt.remove(f)  # Update the list of database's backups

        end_time = DateTools.get_current_datetime()

        # Get total size of the backups in Bytes
        tsize = Dir.get_files_tsize(db_bkps_lt)
        # Get total size of the backups in the selected unit of measure
        tsize_unit = ceil(tsize / self.equivalence)

        ## UNCOMMENT NEXT SECTION TO PROCEDURE WITH THE BACKUP'S DELETION IF
        ## THEIR TOTAL SIZE EXCEEDS THE SPECIFIED MAXIMUM SIZE

        #db_bkps_list = db_bkps_lt[:]

        #for f in db_bkps_list:
            ## If there are less backups than the minimum required...
            #if num_bkps <= self.min_n_bkps:
                #break
            #if tsize <= self.max_size_bytes:
                #break
            #else:
                #file_info = os.stat(f)
                #self.logger.info('Tamaño de copias de seguridad en disco '
                                 #'mayor que {} {}: eliminando el archivo '
                                 #'{}...' % (self.max_size['size'],
                                            #self.max_size['unit'], f))
                #os.unlink(f)  # Remove backup's file
                #unlinked = True
                ## Update the number of backups of the database
                #num_bkps -= 1
                ## Update the list of database's backups
                ## db_bkps_lt.remove(f)
                #tsize -= file_info.st_size  # Update total size after deletion

        if not unlinked:

            message = Messenger.NO_DB_BACKUP_DELETED.format(dbname=dbname)
            self.logger.highlight('warning', message, 'yellow')

        if tsize > self.max_size_bytes:  # Total size exceeds the maximum

            message = Messenger.DB_BKPS_SIZE_EXCEEDED.format(
                dbname=dbname, tsize_unit=tsize_unit,
                size=self.max_size['size'], unit=self.max_size['unit'])
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)
        self.logger.highlight('info', Messenger.DB_TRIMMER_DONE.format(
            dbname=dbname, diff=diff), 'green')

    def trim_dbs(self, bkps_list, dbs_to_clean):
        '''
        Target:
            - remove (if necessary) some backups of a group of databases,
              taking into account some parameters in the following order:
              minimum number of backups to keep > obsolete backups.
        Parameters:
            - bkps_list: list of backups found in the specified directory.
            - dbs_to_clean: name of the database whose backups are going to be
              trimmed.
        '''
        # If not prefix specified, trim all the backups (not only the ones
        # without prefix)
        if self.prefix:
            regex = r'(' + self.prefix + ')db_(.+)_(\d{8}_\d{6}_.+)\.' \
                    '(?:dump|bz2|gz|zip)$'
        else:
            regex = r'(.+)?db_(.+)_(\d{8}_\d{6}_.+)\.(?:dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        for dbname in dbs_to_clean:

            db_bkps_list = []

            for file in bkps_list:

                # Extract file's name from the absolute path
                filename = os.path.basename(file)

                # If file matches regex (it means that file is a backup)
                if re.match(regex, filename):

                    # Extract parts of the name ([prefix], dbname, date)
                    parts = regex.search(filename).groups()
                    # Store the database's name whose this backup belongs to
                    fdbname = parts[1]

                    # If that backup belongs to a database which is has to be
                    # trimmed
                    if dbname == fdbname:
                        # Append backup to the group of database's backups
                        db_bkps_list.append(file)
                    else:
                        continue
                else:
                    continue

            # Remove (if necessary) some backups of the specified database
            self.trim_db(dbname, db_bkps_list)

        # Remove directories which could be empty after the trim
        Dir.remove_empty_dirs(self.bkp_path)

        self.logger.highlight('info', Messenger.TRIMMER_DONE, 'green',
                              effect='bold')
Ejemplo n.º 40
0
 def start(self):
     Logger.debug('start command received from network process', category='client')
     multiprocessing.Process.start(self)
Ejemplo n.º 41
0
class TrimmerCluster:

    bkp_path = ''  # The path where the backups are stored
    prefix = ''  # The prefix of the backups' names
    min_n_bkps = None  # Minimum number of a database's backups to keep
    exp_days = None  # Number of days which make a backup obsolete
    max_size = None  # Maximum size of a group of database's backups
    # Maximum size in Bytes of a group of database's backups
    max_size_bytes = None
    # Related to max_size, equivalence to turn the specified unit of measure in
    # the max_size variable into Bytes
    equivalence = 10 ** 6
    logger = None  # Logger to show and log some messages

    def __init__(self, bkp_path='', prefix='', min_n_bkps=1, exp_days=365,
                 max_size=5000, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if bkp_path and os.path.isdir(bkp_path):
            self.bkp_path = bkp_path
        else:
            self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)

        if prefix is None:
            self.prefix = Default.PREFIX
        else:
            self.prefix = prefix

        if min_n_bkps is None:
            self.min_n_bkps = Default.MIN_N_BKPS
        elif isinstance(min_n_bkps, int):
            self.min_n_bkps = min_n_bkps
        elif Checker.str_is_int(min_n_bkps):
            self.min_n_bkps = Casting.str_to_int(min_n_bkps)
        else:
            self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)

        if exp_days is None:
            self.exp_days = Default.EXP_DAYS
        elif isinstance(exp_days, int) and exp_days >= -1:
            self.exp_days = exp_days
        elif Checker.str_is_valid_exp_days(exp_days):
            self.exp_days = Casting.str_to_int(exp_days)
        else:
            self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)

        if max_size is None:
            self.max_size = Default.MAX_SIZE
        elif Checker.str_is_valid_max_size(max_size):
            self.max_size = max_size
        else:
            self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)

        # Split a string with size and unit of measure into a dictionary
        self.max_size = Casting.str_to_max_size(self.max_size)
        # Get the equivalence in Bytes of the specified unit of measure
        self.equivalence = Casting.get_equivalence(self.max_size['unit'])
        # Get the specified size in Bytes
        self.max_size_bytes = self.max_size['size'] * self.equivalence

        message = Messenger.CL_TRIMMER_VARS.format(
            bkp_path=self.bkp_path, prefix=self.prefix,
            min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
            max_size=self.max_size)
        self.logger.debug(Messenger.CL_TRIMMER_VARS_INTRO)
        self.logger.debug(message)

    def trim_cluster(self, ht_bkps_list):
        '''
        Target:
            - remove (if necessary) some cluster's backups, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - ht_bkps_list: list of backups of a cluster to analyse and trim.
        '''
        if self.exp_days == -1:  # No expiration date
            x_days_ago = None
        else:
            x_days_ago = time.time() - (60 * 60 * 24 * self.exp_days)

        # Store the total number of backups of the cluster
        num_bkps = len(ht_bkps_list)
        # Clone the list to avoid conflict errors when removing
        ht_bkps_lt = ht_bkps_list[:]

        unlinked = False

        self.logger.highlight('info', Messenger.BEGINNING_CL_TRIMMER, 'white')

        start_time = DateTools.get_current_datetime()

        for f in ht_bkps_list:

            # Break if number of backups do not exceed the minimum
            if num_bkps <= self.min_n_bkps:
                break

            file_info = os.stat(f)

            # Obsolete backup
            if x_days_ago and file_info.st_ctime < x_days_ago:

                self.logger.info(Messenger.DELETING_OBSOLETE_BACKUP % f)
                os.unlink(f)  # Remove backup's file
                unlinked = True
                # Update the number of backups of the database
                num_bkps -= 1
                ht_bkps_lt.remove(f)  # Update the list of cluster's backups

        end_time = DateTools.get_current_datetime()

        # Get total size of the backups in Bytes
        tsize = Dir.get_files_tsize(ht_bkps_lt)
        # Get total size of the backups in the selected unit of measure
        tsize_unit = ceil(tsize / self.equivalence)

        ## UNCOMMENT NEXT SECTION TO PROCEDURE WITH THE BACKUP'S DELETION IF
        ## THEIR TOTAL SIZE EXCEEDS THE SPECIFIED MAXIMUM SIZE

        #ht_bkps_list = ht_bkps_lt[:]

        #for f in ht_bkps_list:
            ## If there are less backups than the minimum required...
            #if num_bkps <= self.min_n_bkps:
                #break
            #if tsize <= self.max_size_bytes:
                #break
            #else:
                #file_info = os.stat(f)
                #self.logger.info('Tamaño de copias de seguridad en disco '
                                 #'mayor que {} {}: eliminando el archivo '
                                 #'{}...' % (self.max_size['size'],
                                            #self.max_size['unit'], f))
                #os.unlink(f)  # Remove backup's file
                #unlinked = True
                ## Update the number of backups of the cluster
                #num_bkps -= 1
                ## ht_bkps_lt.remove(f)  # Update the list of cluster's backups
                #tsize -= file_info.st_size  # Update total size after deletion

        if not unlinked:

            message = Messenger.NO_CL_BACKUP_DELETED
            self.logger.highlight('warning', message, 'yellow')

        if tsize > self.max_size_bytes:  # Total size exceeds the maximum

            message = Messenger.CL_BKPS_SIZE_EXCEEDED.format(
                tsize_unit=tsize_unit, size=self.max_size['size'],
                unit=self.max_size['unit'])
            self.logger.highlight('warning', message, 'yellow', effect='bold')

        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)
        self.logger.highlight('info', Messenger.CL_TRIMMER_DONE.format(
            diff=diff), 'green')

    def trim_clusters(self, bkps_list):
        '''
        Target:
            - remove (if necessary) some backups of a cluster, taking into
              account some parameters in the following order: minimum number of
              backups to keep > obsolete backups.
        Parameters:
            - bkps_list: list of backups found in the specified directory.
        '''
        # If not prefix specified, trim all the backups (not only the ones
        # without prefix)
        if self.prefix:
            regex = r'(' + self.prefix + ')ht_(.+_cluster)_' \
                    '(\d{8}_\d{6}_.+)\.(?:dump|bz2|gz|zip)$'
        else:
            regex = r'(.+)?ht_(.+_cluster)_(\d{8}_\d{6}_.+)\.' \
                    '(?:dump|bz2|gz|zip)$'
        regex = re.compile(regex)

        ht_bkps_list = []

        for file in bkps_list:

            # Extract file's name from the absolute path
            filename = os.path.basename(file)

            # If file matches regex (it means that file is a backup)
            if re.match(regex, filename):

                # Append backup to the group of cluster's backups
                ht_bkps_list.append(file)

            else:
                continue

        if ht_bkps_list:

            # Remove (if necessary) some backups of the cluster
            self.trim_cluster(ht_bkps_list)
            # Remove directories which could be empty after the trim
            Dir.remove_empty_dirs(self.bkp_path)

        else:
            self.logger.highlight('warning', Messenger.NO_BACKUP_IN_DIR,
                                  'yellow', effect='bold')

        self.logger.highlight('info', Messenger.TRIMMER_DONE, 'green',
                              effect='bold')
Ejemplo n.º 42
0
    networkProcess = ClientProcess()
    networkProcess.start()

    # init local game
    localInputFeed = UserInputFeedLocal()

    # init client logic
    client_game_handler = ClientNetworkGameHandler(game)
    tick_simulator = TickSimulator(Game.fps)

    quit_the_game = False
    while not quit_the_game:
        # fetch network inputs and update game state
        if not networkProcess.output_queue.empty():
            payload = networkProcess.output_queue.get()
            Logger.debug("fetch input_queue from main, from network process to screen", category='start_client')
            client_game_handler.on_line_received(payload)

        # fetch local input and update game state
        user_input = localInputFeed.fetch_user_input()
        client_game_handler.on_local_user_input(user_input)

        # redraw game
        tick_simulator.simulate(game, lambda self: self.tick())
        #game.tick()
        hmi.draw()

        if user_input.has_pressed_something():
            # send local user_inputs
            Logger.debug("user_input to payload, from screen to network process", category='start_client')
            payload = client_game_handler.user_input_to_payload(user_input)
Ejemplo n.º 43
0
class Connecter:
    '''This class manages connections with database engines and operations
    involving them.
    So far, only PostgreSQL is supported.
    '''
    conn = None  # The PostgreSQL connection object
    cursor = None  # The cursor of the PostgreSQL connection
    server = None  # The target host of the connection
    user = None  # The PostgreSQL user who makes the connection
    port = None  # The target port of the connection
    database = None  # The target database of the connection
    logger = None  # A logger to show and log some messages

    # PostgreSQL version (from this one on some variables change their names)
    PG_PID_VERSION_THRESHOLD = 90200
    pg_pid_91 = 'procpid'  # Name for PostgreSQL PID variable till version 9.1
    pg_pid_92 = 'pid'  # Name for PostgreSQL PID variable since version 9.2

    def __init__(self, server, user, port, database=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.server = server

        self.user = user

        if isinstance(port, int):
            self.port = port
        elif Checker.str_is_int(port):
            self.port = Casting.str_to_int(port)
        else:
            self.logger.stop_exe(Msg.INVALID_PORT)

        if database is None:
            self.database = Default.CONNECTION_DATABASE
        elif database:
            self.database = database
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_DATABASE)

        try:
            self.conn = psycopg2.connect(host=self.server, user=self.user,
                                         port=self.port,
                                         database=self.database)
            self.conn.autocommit = True
            # TODO: ask for a password here if possible
            self.cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
        except Exception as e:
            self.logger.debug('Error en la función "pg_connect": {}.'.format(
                str(e)))
            self.logger.stop_exe(Msg.CONNECT_FAIL)

    def pg_disconnect(self):
        '''
        Target:
            - disconnect from PostgreSQL.
        '''
        try:
            self.cursor.close()
            self.conn.close()
        except Exception as e:
            self.logger.debug('Error en la función "pg_disconnect": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.DISCONNECT_FAIL)

    def get_pg_version(self):
        '''
        Target:
            - get the PostgreSQL version.
        Return:
            - a integer which gives the PostgreSQL version.
        '''
        return self.conn.server_version

    def get_pretty_pg_version(self):
        '''
        Target:
            - get the pretty PostgreSQL version.
        Return:
            - a string which gives the PostgreSQL version and more details.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_PRETTY_VERSION)
            pretty_pg_version = self.cursor.fetchone()

            return pretty_pg_version[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pretty_pg_version": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_VERSION_FAIL, 'yellow')
            return None

    def get_pid_str(self):
        '''
        Target:
            - get the name of the process id depending on the PostgreSQL
              version which is being used. Before the version 9.2 this variable
              was called "procpid", afterwards became "pid".
        Return:
            - a string which gives the name of the vaiable process id.
        '''
        pg_version = self.get_pg_version()  # Get PostgreSQL version

        if pg_version < self.PG_PID_VERSION_THRESHOLD:
            return self.pg_pid_91
        else:
            return self.pg_pid_92

    def is_pg_superuser(self):
        '''
        Target:
            - check if a user connected to PostgreSQL has a superuser role.
        Return:
            - a boolean which indicates whether a user is a PostgreSQL
              superuser or not.
        '''
        self.cursor.execute(Queries.IS_PG_SUPERUSER)
        row = self.cursor.fetchone()

        return row['usesuper']

    def get_pg_time_start(self):
        '''
        Target:
            - get the time when PostgreSQL was started.
        Return:
            - a date which indicates the time when PostgreSQL was started.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_START)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_start": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_START_FAIL,
                                  'yellow')
            return None

    def get_pg_time_up(self):
        '''
        Target:
            - get how long PostgreSQL has been working.
        Return:
            - a date which indicates how long PostgreSQL has been working.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_UP)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries becaustop_exese of
            # waiting this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_up": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_UP_FAIL, 'yellow')
            return None

    def get_pg_dbs_data(self, ex_templates=True, db_owner=''):
        '''
        Target:
            - do different queries to PostgreSQL depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
            - db_owner: the name of the user whose databases are going to be
              obtained.
        Return:
            - a list with the PostgreSQL databases and their names,
              datallowconn and owners.
        '''
        try:
            # Get all databases (no templates) of a specific owner
            if db_owner and ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS_BY_OWNER,
                                    (db_owner, ))
            # Get all databases (templates too) of a specific owner
            elif db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS_BY_OWNER, (db_owner, ))
            # Get all databases (no templates)
            elif not db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS)
            else:  # Get all databases (templates too)
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS)

            dbs = self.cursor.fetchall()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbs_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DBS_DATA
            self.logger.highlight('warning', msg, 'yellow')
            dbs = None

        return dbs

    def get_pg_db_data(self, dbname):
        '''
        Target:
            - show some info about a specified database.
        Parameters:
            - dbname: name of the database whose information is going to be
              gattered.
        '''

        try:
            self.cursor.execute(Queries.GET_PG_DB_DATA, (dbname, ))
            db = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_db_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DB_DATA.format(dbname=dbname)
            self.logger.highlight('warning', msg, 'yellow')
            db = None

        return db

    def get_pg_user_data(self, username):
        '''
        Target:
            - get some info about a specified user.
        Parameters:
            - username: name of the user whose information is going to be
              gattered.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_USER_DATA, (username, ))
            else:
                self.cursor.execute(Queries.GET_PG92_USER_DATA, (username, ))
            user = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_user_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_USER_DATA.format(username=username)
            self.logger.highlight('warning', msg, 'yellow')
            user = None

        return user

    def get_pg_conn_data(self, connpid):
        '''
        Target:
            - show some info about backends.
        Parameters:
            - connpid: PID of the backend whose information is going to be
              shown.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_CONN_DATA, (connpid, ))
            else:
                self.cursor.execute(Queries.GET_PG92_CONN_DATA, (connpid, ))
            conn = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_conn_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_CONN_DATA.format(connpid=connpid)
            self.logger.highlight('warning', msg, 'yellow')
            conn = None

        return conn

    def get_pg_dbnames(self, ex_templates=False):
        '''
        Target:
            - get PostgreSQL databases' names depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
        '''
        try:
            if ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBNAMES)
            else:
                self.cursor.execute(Queries.GET_PG_DBNAMES)
            result = self.cursor.fetchall()

            dbnames = []
            for record in result:
                dbnames.append(record['datname'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbnames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_DBNAMES_DATA, 'yellow')
            dbnames = None

        return dbnames

    def get_pg_usernames(self):
        '''
        Target:
            - get PostgreSQL users' names.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_USERNAMES)
            result = self.cursor.fetchall()

            usernames = []
            for record in result:
                usernames.append(record['usename'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_usernames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_USERNAMES_DATA,
                                  'yellow')
            usernames = None

        return usernames

    def get_pg_connpids(self):
        '''
        Target:
            - get PostgreSQL backends' PIDs.
        '''
        pid = self.get_pid_str()  # Get PID variable's name
        formatted_query_get_pg_connpids = Queries.GET_PG_CONNPIDS.format(
            pid=pid)

        try:
            self.cursor.execute(formatted_query_get_pg_connpids)
            result = self.cursor.fetchall()

            pids = []
            for record in result:
                pids.append(record['pid'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_connpids": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_CONNPIDS_DATA,
                                  'yellow')
            pids = None

        return pids

    def allow_db_conn(self, dbname):
        '''
        Target:
            - enable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to allow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.ALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "allow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def disallow_db_conn(self, dbname):
        '''
        Target:
            - disable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to disallow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.DISALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "disallow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def get_datallowconn(self, dbname):
        '''
        Target:
            - get "datallowconn" from a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be read.
        Return:
            - a boolean which indicates the value of "datallowconn".
        '''
        try:
            self.cursor.execute(Queries.GET_PG_DB_DATALLOWCONN, (dbname, ))
            result = self.cursor.fetchone()
            return result[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_datallowconn": '
                              '{}.'.format(str(e)))
            return None
Ejemplo n.º 44
0
class Mailer:

    level = 1  # Verbosity level of the email
    from_info = {}  # Information about the sender's email account
    to_infos = []  # List with the destiny emails
    cc_infos = []  # List with the destiny emails (carbon copy)
    bcc_infos = []  # List with the destiny emails (blind carbon copy)
    server_tag = ''  # Alias of the sender's machine
    external_ip = ''  # External IP of the sender's machine
    op_type = ''  # Executed action
    group = None  # Affected group
    bkp_path = None  # Affected path of backups
    logger = None  # Logger to show and log some messages

    # Definition of constants

    OP_TYPES = {
        'u': 'Undefined method',
        'a': 'Alterer',
        'B': 'Backer',
        'd': 'Dropper',
        'r': 'Replicator',
        'R': 'Restorer',
        'T': 'Trimmer',
        't': 'Terminator',
        'v': 'Vacuumer',
    }

    OP_RESULTS = {
        0: ('<h2>{op_type}: <span style="color: green;">OK</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>The process has been executed '
            'succesfully.<br/><br/>You can see its log file at the following '
            'path:<br/><br/>{log_file}.'),
        1: ('<h2>{op_type}: <span style="color: orange;">WARNING</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some warnings during '
            'the process, but not critical errors. Anyway, please check it, '
            'because its behaviour is not bound to have been the expected '
            'one.<br/><br/>You can see its log file at the following path:'
            '<br/><br/>{log_file}.'),
        2: ('<h2>{op_type}: <span style="color: red;">ERROR</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some errors during '
            'the process, and they prevented some operations, because the '
            'execution was truncated. Please check immediately.<br/><br/>You '
            'can see its log file at the following path:<br/><br/>'
            '{log_file}.'),
        3: ('<h2>{op_type}: <span style="color: purple;">CRITICAL</span> at '
            '"{server_tag}"</h2>Date: <span style="font-weight: bold">{date}'
            '</span><br/>Time: <span style="font-weight: bold">{time}</span>'
            '<br/>Time zone: <span style="font-weight: bold">{zone}</span>'
            '<br/>Host name: <span style="font-weight: bold">{server}</span>'
            '<br/>Netifaces IPs: <span style="font-weight: bold">'
            '{internal_ips}</span><br/>External IP: <span style="font-weight: '
            'bold">{external_ip}</span><br/>Group: <span style="font-weight: '
            'bold">{group}</span><br/>Path: <span style="font-weight: bold">'
            '{bkp_path}</span><br/><br/><br/>There were some critical errors '
            'during the process. The execution could not be carried out. '
            'Please check immediately.<br/><br/>You can see its log file at '
            'the following path:<br/><br/>{log_file}.'),
    }

    OP_RESULTS_NO_HTML = {
        0: ('{op_type}: OK at "{server_tag}"\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'The process has been executed succesfully.\n'
            'You can see its log file at the following path:\n'
            '{log_file}.\n'),
        1: ('{op_type}: WARNING at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'There were some warnings during the process, but not critical\n'
            'errors. Anyway, please check it, because its behaviour is not\n'
            'bound to have been the expected one. You can see its\n'
            'log file at the following path: {log_file}.\n'),
        2: ('{op_type}: ERROR at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'There were some errors during the process, and they prevented\n'
            'some operations, because the execution was truncated. Please\n'
            'check immediately. You can see its log file at the\n'
            'following path: {log_file}.\n'),
        3: ('{op_type}: CRITICAL at {server}\n'
            'Date: {date}\n'
            'Time: {time}\n'
            'Time zone: {zone}\n'
            'Host name: {server}\n'
            'Netifaces IPs: {internal_ips}\n'
            'External IP: {external_ip}\n'
            'Group: {group}\n'
            'Path: {bkp_path}\n'
            'The process has been executed succesfully.\n'
            'You can see its log file at the following path:\n'
            '{log_file}.\n'),
    }

    def __init__(self, level=1, username='', email='', password='',
                 to_infos=[], cc_infos=[], bcc_infos=[], server_tag='',
                 external_ip='', op_type='', logger=None):

        if logger:
            self.logger = logger
        else:
            from logger.logger import Logger
            self.logger = Logger()

        if isinstance(level, int) and level in Default.MAIL_LEVELS:
            self.level = level
        elif Checker.str_is_int(level):
            self.level = Casting.str_to_int(level)
        else:
            self.level = Default.MAIL_LEVEL

        self.from_info['email'] = email
        if not Checker.str_is_valid_mail(email):
            message = Messenger.INVALID_FROM_MAIL.format(
                email=email)
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['name'] = username
        if username is '':
            message = Messenger.INVALID_FROM_USERNAME
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['pwd'] = password
        if password is '':
            message = Messenger.INVALID_FROM_PASSWORD
            self.logger.highlight('warning', message, 'yellow')

        to_infos = Casting.str_to_list(to_infos)
        self.to_infos = self.get_mail_infos(to_infos)

        cc_infos = Casting.str_to_list(cc_infos)
        self.cc_infos = self.get_mail_infos(cc_infos)

        bcc_infos = Casting.str_to_list(bcc_infos)
        self.bcc_infos = self.get_mail_infos(bcc_infos)

        if op_type in self.OP_TYPES.keys():
            self.op_type = op_type
        else:
            self.op_type = 'u'

        self.server_tag = server_tag
        self.external_ip = external_ip

    def add_group(self, group):
        '''
        Target:
            - add a group to the information sent by the email. It will be used
              in case of "Backer" being executed.
        Parameters:
            - group: the group's name.
        '''
        self.group = group

    def add_bkp_path(self, bkp_path):
        '''
        Target:
            - add a path to the information sent by the email. It will be used
              in case of "Trimmer" being executed.
        Parameters:
            - bkp_path: the path where the involved backups are stored.
        '''
        self.bkp_path = bkp_path

    def get_mail_infos(self, mail_infos):
        '''
        Target:
            - takes a list of strings with mail data and a "username <email>"
              format, splits it into parts and gives the same data stored and
              classified in a dictionary.
        Parameters:
            - mail_infos: the list of strings to be converted.
        Return:
            - a list of dictionaries with the username and the address of some
              mail accounts.
        '''
        temp_list = []

        for record in mail_infos:

            if Checker.str_is_valid_mail_info(record):

                mail_info = Casting.str_to_mail_info(record)

                if Checker.str_is_valid_mail(mail_info['email']):
                    temp_list.append(mail_info)
                else:
                    message = Messenger.INVALID_TO_MAIL.format(
                        email=mail_info['email'])
                    self.logger.highlight('warning', message, 'yellow')

            else:
                message = Messenger.INVALID_TO_MAIL_INFO.format(
                    mail_info=record)
                self.logger.highlight('warning', message, 'yellow')

        return temp_list

    def send_mail(self, detected_level):
        '''
        Target:
            - send an email to the specified email addresses.
        '''
        message = Messenger.BEGINNING_MAILER
        self.logger.highlight('info', message, 'white')

        # Get current date
        date = DateTools.get_date(fmt='%d-%m-%Y')
        time = DateTools.get_date(fmt='%H:%M:%S')
        zone = DateTools.get_date(fmt='%Z')

        # Get server name and IP addresses data
        server = IpAddress.get_hostname(self.logger)

        internal_ips = ''
        netifaces = IpAddress.get_netifaces_ips(self.logger)
        if netifaces:
            last_index = len(netifaces) - 1
        for index, netiface in enumerate(netifaces):
            internal_ips += '{} > {}'.format(netiface['netiface'],
                                             netiface['ip'])
            if index != last_index:
                internal_ips += ', '

        # Email full info template, for: John Doe <*****@*****.**>
        ADDR_TMPLT = '{} <{}>'

        # Sender and recipients email addresses (needed for sending the email)
        from_email_str = self.from_info['email']
        to_emails_list = [dict['email'] for dict in self.to_infos]
        cc_emails_list = [dict['email'] for dict in self.cc_infos]
        bcc_emails_list = [dict['email'] for dict in self.bcc_infos]
        all_emails_list = to_emails_list + cc_emails_list + bcc_emails_list

        # Sender and recipients full info (used in email message header)
        from_info_str = ADDR_TMPLT.format(self.from_info['name'],
                                          self.from_info['email'])
        to_infos_str = ', '.join(ADDR_TMPLT.format(
            dict['name'], dict['email']) for dict in self.to_infos)
        cc_infos_str = ', '.join(ADDR_TMPLT.format(
            dict['name'], dict['email']) for dict in self.cc_infos)

        # Specifying an alternative mail in case the receiver does not have a
        # mail server with HTML

        html = self.OP_RESULTS[detected_level].format(
            op_type=self.OP_TYPES[self.op_type], server_tag=self.server_tag,
            date=date, time=time, zone=zone, server=server,
            internal_ips=internal_ips, external_ip=self.external_ip,
            group=self.group, bkp_path=self.bkp_path,
            log_file=str(self.logger.log_file))

        text = self.OP_RESULTS_NO_HTML[detected_level].format(
            op_type=self.OP_TYPES[self.op_type], server_tag=self.server_tag,
            date=date, time=time, zone=zone, server=server,
            internal_ips=internal_ips, external_ip=self.external_ip,
            group=self.group, bkp_path=self.bkp_path,
            log_file=str(self.logger.log_file))

        # Specifying other email data (used in email message header)
        mail = MIMEMultipart('alternative')
        mail['From'] = from_info_str
        mail['To'] = to_infos_str
        mail['Cc'] = cc_infos_str
        mail['Subject'] = '[INFO] {op_type} results'.format(
            op_type=self.OP_TYPES[self.op_type].upper())

        # Record the MIME types of both parts - text/plain and text/html.
        part1 = MIMEText(text, 'plain')
        part2 = MIMEText(html, 'html')

        # Attach parts into message container. According to RFC 2046, the last
        # part of a multipart message, in this case the HTML message, is best
        # and preferred.
        mail.attach(part1)
        mail.attach(part2)

        msg_full = mail.as_string().encode()

        if all_emails_list:

            for email in all_emails_list:
                self.logger.info(Messenger.MAIL_DESTINY.format(email=email))

            # Sending the mail
            try:
                server = smtplib.SMTP('smtp.gmail.com:587')
                server.starttls()
                server.login(self.from_info['email'], self.from_info['pwd'])
                server.sendmail(from_email_str, all_emails_list, msg_full)
                server.quit()

            except smtplib.SMTPException as e:
                message = Messenger.SEND_MAIL_FAIL
                self.logger.highlight('info', message, 'yellow')
                self.logger.debug('Error en la función "send_mail": '
                                  '{}'.format(str(e)))

        else:
            message = Messenger.MAILER_HAS_NOTHING_TO_DO
            self.logger.highlight('info', message, 'yellow')

        message = Messenger.SEND_MAIL_DONE
        self.logger.highlight('info', message, 'green')
Ejemplo n.º 45
0
class Connecter:
    '''This class manages connections with database engines and operations
    involving them.
    So far, only PostgreSQL is supported.
    '''
    conn = None  # The PostgreSQL connection object
    cursor = None  # The cursor of the PostgreSQL connection
    server = None  # The target host of the connection
    user = None  # The PostgreSQL user who makes the connection
    port = None  # The target port of the connection
    database = None  # The target database of the connection
    logger = None  # A logger to show and log some messages

    # PostgreSQL version (from this one on some variables change their names)
    PG_PID_VERSION_THRESHOLD = 90200
    pg_pid_91 = 'procpid'  # Name for PostgreSQL PID variable till version 9.1
    pg_pid_92 = 'pid'  # Name for PostgreSQL PID variable since version 9.2

    def __init__(self, server, user, port, database=None, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.server = server

        self.user = user

        if isinstance(port, int):
            self.port = port
        elif Checker.str_is_int(port):
            self.port = Casting.str_to_int(port)
        else:
            self.logger.stop_exe(Msg.INVALID_PORT)

        if database is None:
            self.database = Default.CONNECTION_DATABASE
        elif database:
            self.database = database
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_DATABASE)

        try:
            self.conn = psycopg2.connect(host=self.server,
                                         user=self.user,
                                         port=self.port,
                                         database=self.database)
            self.conn.autocommit = True
            # TODO: ask for a password here if possible
            self.cursor = self.conn.cursor(
                cursor_factory=psycopg2.extras.DictCursor)
        except Exception as e:
            self.logger.debug('Error en la función "pg_connect": {}.'.format(
                str(e)))
            self.logger.stop_exe(Msg.CONNECT_FAIL)

    def pg_disconnect(self):
        '''
        Target:
            - disconnect from PostgreSQL.
        '''
        try:
            self.cursor.close()
            self.conn.close()
        except Exception as e:
            self.logger.debug('Error en la función "pg_disconnect": '
                              '{}.'.format(str(e)))
            self.logger.stop_exe(Msg.DISCONNECT_FAIL)

    def get_pg_version(self):
        '''
        Target:
            - get the PostgreSQL version.
        Return:
            - a integer which gives the PostgreSQL version.
        '''
        return self.conn.server_version

    def get_pretty_pg_version(self):
        '''
        Target:
            - get the pretty PostgreSQL version.
        Return:
            - a string which gives the PostgreSQL version and more details.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_PRETTY_VERSION)
            pretty_pg_version = self.cursor.fetchone()

            return pretty_pg_version[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pretty_pg_version": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_VERSION_FAIL, 'yellow')
            return None

    def get_pid_str(self):
        '''
        Target:
            - get the name of the process id depending on the PostgreSQL
              version which is being used. Before the version 9.2 this variable
              was called "procpid", afterwards became "pid".
        Return:
            - a string which gives the name of the vaiable process id.
        '''
        pg_version = self.get_pg_version()  # Get PostgreSQL version

        if pg_version < self.PG_PID_VERSION_THRESHOLD:
            return self.pg_pid_91
        else:
            return self.pg_pid_92

    def is_pg_superuser(self):
        '''
        Target:
            - check if a user connected to PostgreSQL has a superuser role.
        Return:
            - a boolean which indicates whether a user is a PostgreSQL
              superuser or not.
        '''
        self.cursor.execute(Queries.IS_PG_SUPERUSER)
        row = self.cursor.fetchone()

        return row['usesuper']

    def get_pg_time_start(self):
        '''
        Target:
            - get the time when PostgreSQL was started.
        Return:
            - a date which indicates the time when PostgreSQL was started.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_START)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_start": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_START_FAIL,
                                  'yellow')
            return None

    def get_pg_time_up(self):
        '''
        Target:
            - get how long PostgreSQL has been working.
        Return:
            - a date which indicates how long PostgreSQL has been working.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_TIME_UP)
            row = self.cursor.fetchone()

            return row[0]

        except Exception as e:
            # Rollback to avoid errors in next queries becaustop_exese of
            # waiting this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_time_up": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_TIME_UP_FAIL, 'yellow')
            return None

    def get_pg_dbs_data(self, ex_templates=True, db_owner=''):
        '''
        Target:
            - do different queries to PostgreSQL depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
            - db_owner: the name of the user whose databases are going to be
              obtained.
        Return:
            - a list with the PostgreSQL databases and their names,
              datallowconn and owners.
        '''
        try:
            # Get all databases (no templates) of a specific owner
            if db_owner and ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS_BY_OWNER,
                                    (db_owner, ))
            # Get all databases (templates too) of a specific owner
            elif db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS_BY_OWNER, (db_owner, ))
            # Get all databases (no templates)
            elif not db_owner and ex_templates is False:
                self.cursor.execute(Queries.GET_PG_DBS)
            else:  # Get all databases (templates too)
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBS)

            dbs = self.cursor.fetchall()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbs_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DBS_DATA
            self.logger.highlight('warning', msg, 'yellow')
            dbs = None

        return dbs

    def get_pg_db_data(self, dbname):
        '''
        Target:
            - show some info about a specified database.
        Parameters:
            - dbname: name of the database whose information is going to be
              gattered.
        '''

        try:
            self.cursor.execute(Queries.GET_PG_DB_DATA, (dbname, ))
            db = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_db_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_DB_DATA.format(dbname=dbname)
            self.logger.highlight('warning', msg, 'yellow')
            db = None

        return db

    def get_pg_user_data(self, username):
        '''
        Target:
            - get some info about a specified user.
        Parameters:
            - username: name of the user whose information is going to be
              gattered.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_USER_DATA, (username, ))
            else:
                self.cursor.execute(Queries.GET_PG92_USER_DATA, (username, ))
            user = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_user_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_USER_DATA.format(username=username)
            self.logger.highlight('warning', msg, 'yellow')
            user = None

        return user

    def get_pg_conn_data(self, connpid):
        '''
        Target:
            - show some info about backends.
        Parameters:
            - connpid: PID of the backend whose information is going to be
              shown.
        '''
        try:
            pg_version = self.get_pg_version()  # Get PostgreSQL version

            if pg_version < self.PG_PID_VERSION_THRESHOLD:
                self.cursor.execute(Queries.GET_PG91_CONN_DATA, (connpid, ))
            else:
                self.cursor.execute(Queries.GET_PG92_CONN_DATA, (connpid, ))
            conn = self.cursor.fetchone()

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_conn_data": '
                              '{}.'.format(str(e)))
            msg = Msg.GET_PG_CONN_DATA.format(connpid=connpid)
            self.logger.highlight('warning', msg, 'yellow')
            conn = None

        return conn

    def get_pg_dbnames(self, ex_templates=False):
        '''
        Target:
            - get PostgreSQL databases' names depending on the parameters
              received, and store the results in the connection cursor.
        Parameters:
            - ex_templates: flag which determinates whether or not get those
              databases which are templates.
        '''
        try:
            if ex_templates:
                self.cursor.execute(Queries.GET_PG_NO_TEMPLATE_DBNAMES)
            else:
                self.cursor.execute(Queries.GET_PG_DBNAMES)
            result = self.cursor.fetchall()

            dbnames = []
            for record in result:
                dbnames.append(record['datname'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_dbnames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_DBNAMES_DATA, 'yellow')
            dbnames = None

        return dbnames

    def get_pg_usernames(self):
        '''
        Target:
            - get PostgreSQL users' names.
        '''
        try:
            self.cursor.execute(Queries.GET_PG_USERNAMES)
            result = self.cursor.fetchall()

            usernames = []
            for record in result:
                usernames.append(record['usename'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_usernames": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_USERNAMES_DATA,
                                  'yellow')
            usernames = None

        return usernames

    def get_pg_connpids(self):
        '''
        Target:
            - get PostgreSQL backends' PIDs.
        '''
        pid = self.get_pid_str()  # Get PID variable's name
        formatted_query_get_pg_connpids = Queries.GET_PG_CONNPIDS.format(
            pid=pid)

        try:
            self.cursor.execute(formatted_query_get_pg_connpids)
            result = self.cursor.fetchall()

            pids = []
            for record in result:
                pids.append(record['pid'])

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_pg_connpids": '
                              '{}.'.format(str(e)))
            self.logger.highlight('warning', Msg.GET_PG_CONNPIDS_DATA,
                                  'yellow')
            pids = None

        return pids

    def allow_db_conn(self, dbname):
        '''
        Target:
            - enable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to allow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.ALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "allow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def disallow_db_conn(self, dbname):
        '''
        Target:
            - disable connections to a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be changed to disallow connections to itself.
        Return:
            - a boolean which indicates if the process succeded.
        '''
        try:
            self.cursor.execute(Queries.DISALLOW_CONN_TO_PG_DB, (dbname, ))
            return True

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "disallow_db_conn": '
                              '{}.'.format(str(e)))
            return False

    def get_datallowconn(self, dbname):
        '''
        Target:
            - get "datallowconn" from a specified PostgreSQL database.
        Parameters:
            - dbname: name of the database whose property "datallowconn" is
              going to be read.
        Return:
            - a boolean which indicates the value of "datallowconn".
        '''
        try:
            self.cursor.execute(Queries.GET_PG_DB_DATALLOWCONN, (dbname, ))
            result = self.cursor.fetchone()
            return result[0]

        except Exception as e:
            # Rollback to avoid errors in next queries because of waiting
            # this transaction to finish
            self.conn.rollback()
            self.logger.debug('Error en la función "get_datallowconn": '
                              '{}.'.format(str(e)))
            return None
Ejemplo n.º 46
0
class Scheduler:

    time = ''  # Time when the command is going to be executed in Cron
    command = ''  # Command which is going to be executed in Cron.
    logger = None  # Logger to show and log some messages

    def __init__(self, time='', command='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.time = time.strip()
        self.command = command.strip()

    def show_lines(self):
        '''
        Target:
            - show the lines of the program's CRON file.
        '''
        self.logger.highlight('info', Messenger.SHOWING_CRONTAB_FILE, 'white')
        print()

        cron = CronTab(user=True)

        if cron:
            for line in cron.lines:
                print(str(line))
        else:
            print('\033[1;40;93m' + Messenger.NO_CRONTAB_FILE + '\033[0m')

    def add_line(self):
        '''
        Target:
            - add a line to the program's CRON file.
        '''
        cron = CronTab(user=True)

        job = cron.new(command=self.command)

        if self.time in ['@yearly', '@annually']:
            job.setall('0 0 1 1 *')
        elif self.time == '@monthly':
            job.setall('0 0 1 * *')
        elif self.time == '@weekly':
            job.setall('0 0 * * 0')
        elif self.time in ['@daily', '@midnight']:
            job.setall('0 0 * * *')
        elif self.time == '@hourly':
            job.setall('0 * * * *')
        elif self.time == '@reboot':
            job.every_reboot()
        else:
            job.setall(self.time)

        self.logger.highlight('info', Messenger.SCHEDULER_ADDING, 'white')

        if not cron:
            self.logger.info(Messenger.CREATING_CRONTAB)

        try:
            cron.write()
            self.logger.highlight('info', Messenger.SCHEDULER_ADD_DONE,
                                  'green')
            #print(cron.render())

        except Exception as e:
            self.logger.debug('Error en la función "add_line": {}.'.format(
                str(e)))
            self.logger.stop_exe(Messenger.SCHEDULER_ADD_FAIL)

    def remove_line(self):
        '''
        Target:
            - remove a line from the program's CRON file.
        '''
        self.logger.highlight('info', Messenger.SCHEDULER_REMOVING, 'white')

        cron = CronTab(user=True)

        if not cron:
            self.logger.stop_exe(Messenger.NO_CRONTAB_FILE)

        deletion = False

        line = self.time + ' ' + self.command

        for job in cron:

            if str(job).strip() == line:

                try:
                    cron.remove(job)
                    message = Messenger.SCHEDULER_REMOVE_DONE.format(job=job)
                    self.logger.highlight('info', message, 'green')
                    deletion = True

                except Exception as e:
                    self.logger.debug('Error en la función "remove_line": '
                                      '{}.'.format(str(e)))
                    message = Messenger.SCHEDULER_REMOVE_FAIL.format(job=job)
                    self.logger.highlight('warning', message, 'yellow')

        if not deletion:
            self.logger.stop_exe(Messenger.NO_CRONTAB_JOB_TO_DEL)

        cron.write()
Ejemplo n.º 47
0
class BackerCluster:

    bkp_path = ''  # The path where the backups are stored
    group = ''  # The name of the subdirectory where the backups are stored
    bkp_type = ''  # The type of the backups' files
    prefix = ''  # The prefix of the backups' names
    # Flag which determinates whether the databases must be vacuumed before the
    # backup process
    vacuum = True
    # An object with connection parameters to connect to PostgreSQL
    connecter = None
    logger = None  # Logger to show and log some messages

    def __init__(self,
                 connecter=None,
                 bkp_path='',
                 group='',
                 bkp_type='dump',
                 prefix='',
                 vacuum=True,
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        msg = Msg.CL_BACKER_VARS.format(server=self.connecter.server,
                                        user=self.connecter.user,
                                        port=self.connecter.port,
                                        bkp_path=self.bkp_path,
                                        group=self.group,
                                        bkp_type=self.bkp_type,
                                        prefix=self.prefix,
                                        vacuum=self.vacuum)
        self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
        self.logger.debug(msg)

    def backup_all(self, bkps_dir):
        '''
        Target:
            - make a backup of a cluster.
        Parameters:
            - bkps_dir: directory where the backup is going to be stored.
        Return:
            - a boolean which indicates the success of the process.
        '''
        success = True
        # Get date and time of the zone
        init_ts = DateTools.get_date()
        # Get current year
        year = str(DateTools.get_year(init_ts))
        # Get current month
        month = str(DateTools.get_month(init_ts))
        # Create new directories with the year and the month of the backup
        bkp_dir = bkps_dir + year + '/' + month + '/'
        Dir.create_dir(bkp_dir, self.logger)

        # Set backup's name
        file_name = self.prefix + 'ht_' + self.connecter.server + \
            str(self.connecter.port) + '_cluster_' + init_ts + '.' + \
            self.bkp_type

        # Store the command to do depending on the backup type
        if self.bkp_type == 'gz':  # Zip with gzip
            command = 'pg_dumpall -U {} -h {} -p {} | gzip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'bz2':  # Zip with bzip2
            command = 'pg_dumpall -U {} -h {} -p {} | bzip2 > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        elif self.bkp_type == 'zip':  # Zip with zip
            command = 'pg_dumpall -U {} -h {} -p {} | zip > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        else:  # Do not zip
            command = 'pg_dumpall -U {} -h {} -p {} > {}'.format(
                self.connecter.user, self.connecter.server,
                self.connecter.port, bkp_dir + file_name)
        try:
            # Execute the command in console
            result = subprocess.call(command, shell=True)
            if result != 0:
                raise Exception()

        except Exception as e:
            self.logger.debug('Error en la función "backup_all": {}.'.format(
                str(e)))
            success = False

        return success

    def backup_cl(self):
        '''
        Target:
            - vacuum if necessary and make a backup of a cluster.
        '''
        self.logger.highlight('info', Msg.CHECKING_BACKUP_DIR, 'white')

        # Create a new directory with the name of the group
        bkps_dir = self.bkp_path + self.group + Default.CL_BKPS_DIR
        Dir.create_dir(bkps_dir, self.logger)

        self.logger.info(Msg.DESTINY_DIR.format(path=bkps_dir))

        # Vaccum the databases before the backup process if necessary
        if self.vacuum:
            vacuumer = Vacuumer(connecter=self.connecter, logger=self.logger)
            dbs_all = vacuumer.connecter.get_pg_dbs_data(
                vacuumer.ex_templates, vacuumer.db_owner)
            vacuumer.vacuum_dbs(dbs_all)

        self.logger.highlight('info', Msg.BEGINNING_CL_BACKER, 'white')

        start_time = DateTools.get_current_datetime()
        # Make the backup of the cluster
        success = self.backup_all(bkps_dir)
        end_time = DateTools.get_current_datetime()
        # Get and show the process' duration
        diff = DateTools.get_diff_datetimes(start_time, end_time)

        if success:
            msg = Msg.CL_BACKER_DONE.format(diff=diff)
            self.logger.highlight('info', msg, 'green', effect='bold')
        else:
            self.logger.highlight('warning',
                                  Msg.CL_BACKER_FAIL,
                                  'yellow',
                                  effect='bold')

        self.logger.highlight('info', Msg.BACKER_DONE, 'green', effect='bold')